diff --git "a/codeparrot-valid_1027.txt" "b/codeparrot-valid_1027.txt" new file mode 100644--- /dev/null +++ "b/codeparrot-valid_1027.txt" @@ -0,0 +1,10000 @@ + earliest = iat - _CLOCK_SKEW_SECS + + # Check expiration timestamp. + exp = parsed.get('exp') + if exp is None: + raise _AppIdentityError('No exp field in token') + if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS: + raise _AppIdentityError('exp field too far in future') + latest = exp + _CLOCK_SKEW_SECS + + if time_now < earliest: + raise _AppIdentityError('Token used too early, %d < %d' % + (time_now, earliest)) + if time_now > latest: + raise _AppIdentityError('Token used too late, %d > %d' % + (time_now, latest)) + + return parsed + +#!/usr/bin/env python + +""" Generate the stepper delay lookup table for Marlin firmware. """ + +import argparse + +__author__ = "Ben Gamari " +__copyright__ = "Copyright 2012, Ben Gamari" +__license__ = "GPL" + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('-f', '--cpu-freq', type=int, default=16, help='CPU clockrate in MHz (default=16)') +parser.add_argument('-d', '--divider', type=int, default=8, help='Timer/counter pre-scale divider (default=8)') +args = parser.parse_args() + +cpu_freq = args.cpu_freq * 1000000 +timer_freq = cpu_freq / args.divider + +print "#ifndef SPEED_LOOKUPTABLE_H" +print "#define SPEED_LOOKUPTABLE_H" +print +print '#include "Marlin.h"' +print + +print "const uint16_t speed_lookuptable_fast[256][2] PROGMEM = {" +a = [ timer_freq / ((i*256)+(args.cpu_freq*2)) for i in range(256) ] +b = [ a[i] - a[i+1] for i in range(255) ] +b.append(b[-1]) +for i in range(32): + print " ", + for j in range(8): + print "{%d, %d}," % (a[8*i+j], b[8*i+j]), + print +print "};" +print + +print "const uint16_t speed_lookuptable_slow[256][2] PROGMEM = {" +a = [ timer_freq / ((i*8)+(args.cpu_freq*2)) for i in range(256) ] +b = [ a[i] - a[i+1] for i in range(255) ] +b.append(b[-1]) +for i in range(32): + print " ", + for j in range(8): + print "{%d, %d}," % (a[8*i+j], b[8*i+j]), + print +print "};" +print + +print "#endif" + + +""" +kombu.transport.SQS +=================== + +Amazon SQS transport module for Kombu. This package implements an AMQP-like +interface on top of Amazons SQS service, with the goal of being optimized for +high performance and reliability. + +The default settings for this module are focused now on high performance in +task queue situations where tasks are small, idempotent and run very fast. + +SQS Features supported by this transport: + Long Polling: + http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ + sqs-long-polling.html + + Long polling is enabled by setting the `wait_time_seconds` transport + option to a number > 1. Amazon supports up to 20 seconds. This is + disabled for now, but will be enabled by default in the near future. + + Batch API Actions: + http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ + sqs-batch-api.html + + The default behavior of the SQS Channel.drain_events() method is to + request up to the 'prefetch_count' messages on every request to SQS. + These messages are stored locally in a deque object and passed back + to the Transport until the deque is empty, before triggering a new + API call to Amazon. + + This behavior dramatically speeds up the rate that you can pull tasks + from SQS when you have short-running tasks (or a large number of workers). + + When a Celery worker has multiple queues to monitor, it will pull down + up to 'prefetch_count' messages from queueA and work on them all before + moving on to queueB. If queueB is empty, it will wait up until + 'polling_interval' expires before moving back and checking on queueA. +""" + +from __future__ import absolute_import + +import collections +import socket +import string + +from anyjson import loads, dumps + +import boto +from boto import exception +from boto import sdb as _sdb +from boto import sqs as _sqs +from boto.sdb.domain import Domain +from boto.sdb.connection import SDBConnection +from boto.sqs.connection import SQSConnection +from boto.sqs.message import Message + +from kombu.five import Empty, range, text_t +from kombu.log import get_logger +from kombu.utils import cached_property, uuid +from kombu.utils.encoding import bytes_to_str, safe_str +from kombu.transport.virtual import scheduling + +from . import virtual + +logger = get_logger(__name__) + +# dots are replaced by dash, all other punctuation +# replaced by underscore. +CHARS_REPLACE_TABLE = dict((ord(c), 0x5f) + for c in string.punctuation if c not in '-_.') +CHARS_REPLACE_TABLE[0x2e] = 0x2d # '.' -> '-' + + +def maybe_int(x): + try: + return int(x) + except ValueError: + return x +BOTO_VERSION = tuple(maybe_int(part) for part in boto.__version__.split('.')) +W_LONG_POLLING = BOTO_VERSION >= (2, 8) + +#: SQS bulk get supports a maximum of 10 messages at a time. +SQS_MAX_MESSAGES = 10 + + +class Table(Domain): + """Amazon SimpleDB domain describing the message routing table.""" + # caches queues already bound, so we don't have to declare them again. + _already_bound = set() + + def routes_for(self, exchange): + """Iterator giving all routes for an exchange.""" + return self.select("""WHERE exchange = '%s'""" % exchange) + + def get_queue(self, queue): + """Get binding for queue.""" + qid = self._get_queue_id(queue) + if qid: + return self.get_item(qid) + + def create_binding(self, queue): + """Get binding item for queue. + + Creates the item if it doesn't exist. + + """ + item = self.get_queue(queue) + if item: + return item, item['id'] + id = uuid() + return self.new_item(id), id + + def queue_bind(self, exchange, routing_key, pattern, queue): + if queue not in self._already_bound: + binding, id = self.create_binding(queue) + binding.update(exchange=exchange, + routing_key=routing_key or '', + pattern=pattern or '', + queue=queue or '', + id=id) + binding.save() + self._already_bound.add(queue) + + def queue_delete(self, queue): + """delete queue by name.""" + self._already_bound.discard(queue) + item = self._get_queue_item(queue) + if item: + self.delete_item(item) + + def exchange_delete(self, exchange): + """Delete all routes for `exchange`.""" + for item in self.routes_for(exchange): + self.delete_item(item['id']) + + def get_item(self, item_name): + """Uses `consistent_read` by default.""" + # Domain is an old-style class, can't use super(). + for consistent_read in (False, True): + item = Domain.get_item(self, item_name, consistent_read) + if item: + return item + + def select(self, query='', next_token=None, + consistent_read=True, max_items=None): + """Uses `consistent_read` by default.""" + query = """SELECT * FROM `%s` %s""" % (self.name, query) + return Domain.select(self, query, next_token, + consistent_read, max_items) + + def _try_first(self, query='', **kwargs): + for c in (False, True): + for item in self.select(query, consistent_read=c, **kwargs): + return item + + def get_exchanges(self): + return list(set(i['exchange'] for i in self.select())) + + def _get_queue_item(self, queue): + return self._try_first("""WHERE queue = '%s' limit 1""" % queue) + + def _get_queue_id(self, queue): + item = self._get_queue_item(queue) + if item: + return item['id'] + + +class Channel(virtual.Channel): + Table = Table + + default_region = 'us-east-1' + default_visibility_timeout = 1800 # 30 minutes. + default_wait_time_seconds = 0 # disabled see #198 + domain_format = 'kombu%(vhost)s' + _sdb = None + _sqs = None + _queue_cache = {} + _noack_queues = set() + + def __init__(self, *args, **kwargs): + super(Channel, self).__init__(*args, **kwargs) + + # SQS blows up when you try to create a new queue if one already + # exists with a different visibility_timeout, so this prepopulates + # the queue_cache to protect us from recreating + # queues that are known to already exist. + queues = self.sqs.get_all_queues(prefix=self.queue_name_prefix) + for queue in queues: + self._queue_cache[queue.name] = queue + self._fanout_queues = set() + + # The drain_events() method stores extra messages in a local + # Deque object. This allows multiple messages to be requested from + # SQS at once for performance, but maintains the same external API + # to the caller of the drain_events() method. + self._queue_message_cache = collections.deque() + + def basic_consume(self, queue, no_ack, *args, **kwargs): + if no_ack: + self._noack_queues.add(queue) + return super(Channel, self).basic_consume( + queue, no_ack, *args, **kwargs + ) + + def basic_cancel(self, consumer_tag): + if consumer_tag in self._consumers: + queue = self._tag_to_queue[consumer_tag] + self._noack_queues.discard(queue) + return super(Channel, self).basic_cancel(consumer_tag) + + def drain_events(self, timeout=None): + """Return a single payload message from one of our queues. + + :raises Empty: if no messages available. + + """ + # If we're not allowed to consume or have no consumers, raise Empty + if not self._consumers or not self.qos.can_consume(): + raise Empty() + message_cache = self._queue_message_cache + + # Check if there are any items in our buffer. If there are any, pop + # off that queue first. + try: + return message_cache.popleft() + except IndexError: + pass + + # At this point, go and get more messages from SQS + res, queue = self._poll(self.cycle, timeout=timeout) + message_cache.extend((r, queue) for r in res) + + # Now try to pop off the queue again. + try: + return message_cache.popleft() + except IndexError: + raise Empty() + + def _reset_cycle(self): + """Reset the consume cycle. + + :returns: a FairCycle object that points to our _get_bulk() method + rather than the standard _get() method. This allows for multiple + messages to be returned at once from SQS (based on the prefetch + limit). + + """ + self._cycle = scheduling.FairCycle( + self._get_bulk, self._active_queues, Empty, + ) + + def entity_name(self, name, table=CHARS_REPLACE_TABLE): + """Format AMQP queue name into a legal SQS queue name.""" + return text_t(safe_str(name)).translate(table) + + def _new_queue(self, queue, **kwargs): + """Ensure a queue with given name exists in SQS.""" + # Translate to SQS name for consistency with initial + # _queue_cache population. + queue = self.entity_name(self.queue_name_prefix + queue) + try: + return self._queue_cache[queue] + except KeyError: + q = self._queue_cache[queue] = self.sqs.create_queue( + queue, self.visibility_timeout, + ) + return q + + def queue_bind(self, queue, exchange=None, routing_key='', + arguments=None, **kwargs): + super(Channel, self).queue_bind(queue, exchange, routing_key, + arguments, **kwargs) + if self.typeof(exchange).type == 'fanout': + self._fanout_queues.add(queue) + + def _queue_bind(self, *args): + """Bind ``queue`` to ``exchange`` with routing key. + + Route will be stored in SDB if so enabled. + + """ + if self.supports_fanout: + self.table.queue_bind(*args) + + def get_table(self, exchange): + """Get routing table. + + Retrieved from SDB if :attr:`supports_fanout`. + + """ + if self.supports_fanout: + return [(r['routing_key'], r['pattern'], r['queue']) + for r in self.table.routes_for(exchange)] + return super(Channel, self).get_table(exchange) + + def get_exchanges(self): + if self.supports_fanout: + return self.table.get_exchanges() + return super(Channel, self).get_exchanges() + + def _delete(self, queue, *args): + """delete queue by name.""" + if self.supports_fanout: + self.table.queue_delete(queue) + super(Channel, self)._delete(queue) + self._queue_cache.pop(queue, None) + + def exchange_delete(self, exchange, **kwargs): + """Delete exchange by name.""" + if self.supports_fanout: + self.table.exchange_delete(exchange) + super(Channel, self).exchange_delete(exchange, **kwargs) + + def _has_queue(self, queue, **kwargs): + """Return True if ``queue`` was previously declared.""" + if self.supports_fanout: + return bool(self.table.get_queue(queue)) + return super(Channel, self)._has_queue(queue) + + def _put(self, queue, message, **kwargs): + """Put message onto queue.""" + q = self._new_queue(queue) + m = Message() + m.set_body(dumps(message)) + q.write(m) + + def _put_fanout(self, exchange, message, routing_key, **kwargs): + """Deliver fanout message to all queues in ``exchange``.""" + for route in self.table.routes_for(exchange): + self._put(route['queue'], message, **kwargs) + + def _get_from_sqs(self, queue, count=1): + """Retrieve messages from SQS and returns the raw SQS message objects. + + :returns: List of SQS message objects + + """ + q = self._new_queue(queue) + if W_LONG_POLLING and queue not in self._fanout_queues: + return q.get_messages( + count, wait_time_seconds=self.wait_time_seconds, + ) + else: # boto < 2.8 + return q.get_messages(count) + + def _message_to_python(self, message, queue_name, queue): + payload = loads(bytes_to_str(message.get_body())) + if queue_name in self._noack_queues: + queue.delete_message(message) + else: + payload['properties']['delivery_info'].update({ + 'sqs_message': message, 'sqs_queue': queue, + }) + return payload + + def _messages_to_python(self, messages, queue): + """Convert a list of SQS Message objects into Payloads. + + This method handles converting SQS Message objects into + Payloads, and appropriately updating the queue depending on + the 'ack' settings for that queue. + + :param messages: A list of SQS Message objects. + :param queue: String name representing the queue they came from + + :returns: A list of Payload objects + + """ + q = self._new_queue(queue) + return [self._message_to_python(m, queue, q) for m in messages] + + def _get_bulk(self, queue, max_if_unlimited=SQS_MAX_MESSAGES): + """Try to retrieve multiple messages off ``queue``. + + Where _get() returns a single Payload object, this method returns a + list of Payload objects. The number of objects returned is determined + by the total number of messages available in the queue and the + number of messages that the QoS object allows (based on the + prefetch_count). + + .. note:: + Ignores QoS limits so caller is responsible for checking + that we are allowed to consume at least one message from the + queue. get_bulk will then ask QoS for an estimate of + the number of extra messages that we can consume. + + args: + queue: The queue name (string) to pull from + + returns: + payloads: A list of payload objects returned + """ + # drain_events calls `can_consume` first, consuming + # a token, so we know that we are allowed to consume at least + # one message. + maxcount = self.qos.can_consume_max_estimate() + maxcount = max_if_unlimited if maxcount is None else max(maxcount, 1) + if maxcount: + messages = self._get_from_sqs( + queue, count=min(maxcount, SQS_MAX_MESSAGES), + ) + + if messages: + return self._messages_to_python(messages, queue) + raise Empty() + + def _get(self, queue): + """Try to retrieve a single message off ``queue``.""" + messages = self._get_from_sqs(queue, count=1) + + if messages: + return self._messages_to_python(messages, queue)[0] + raise Empty() + + def _restore(self, message, + unwanted_delivery_info=('sqs_message', 'sqs_queue')): + for unwanted_key in unwanted_delivery_info: + # Remove objects that aren't JSON serializable (Issue #1108). + message.delivery_info.pop(unwanted_key, None) + return super(Channel, self)._restore(message) + + def basic_ack(self, delivery_tag): + delivery_info = self.qos.get(delivery_tag).delivery_info + try: + queue = delivery_info['sqs_queue'] + except KeyError: + pass + else: + queue.delete_message(delivery_info['sqs_message']) + super(Channel, self).basic_ack(delivery_tag) + + def _size(self, queue): + """Return the number of messages in a queue.""" + return self._new_queue(queue).count() + + def _purge(self, queue): + """Delete all current messages in a queue.""" + q = self._new_queue(queue) + # SQS is slow at registering messages, so run for a few + # iterations to ensure messages are deleted. + size = 0 + for i in range(10): + size += q.count() + if not size: + break + q.clear() + return size + + def close(self): + super(Channel, self).close() + for conn in (self._sqs, self._sdb): + if conn: + try: + conn.close() + except AttributeError as exc: # FIXME ??? + if "can't set attribute" not in str(exc): + raise + + def _get_regioninfo(self, regions): + if self.region: + for _r in regions: + if _r.name == self.region: + return _r + + def _aws_connect_to(self, fun, regions): + conninfo = self.conninfo + region = self._get_regioninfo(regions) + return fun(region=region, + aws_access_key_id=conninfo.userid, + aws_secret_access_key=conninfo.password, + port=conninfo.port) + + @property + def sqs(self): + if self._sqs is None: + self._sqs = self._aws_connect_to(SQSConnection, _sqs.regions()) + return self._sqs + + @property + def sdb(self): + if self._sdb is None: + self._sdb = self._aws_connect_to(SDBConnection, _sdb.regions()) + return self._sdb + + @property + def table(self): + name = self.entity_name( + self.domain_format % {'vhost': self.conninfo.virtual_host}) + d = self.sdb.get_object( + 'CreateDomain', {'DomainName': name}, self.Table) + d.name = name + return d + + @property + def conninfo(self): + return self.connection.client + + @property + def transport_options(self): + return self.connection.client.transport_options + + @cached_property + def visibility_timeout(self): + return (self.transport_options.get('visibility_timeout') or + self.default_visibility_timeout) + + @cached_property + def queue_name_prefix(self): + return self.transport_options.get('queue_name_prefix', '') + + @cached_property + def supports_fanout(self): + return self.transport_options.get('sdb_persistence', False) + + @cached_property + def region(self): + return self.transport_options.get('region') or self.default_region + + @cached_property + def wait_time_seconds(self): + return self.transport_options.get('wait_time_seconds', + self.default_wait_time_seconds) + + +class Transport(virtual.Transport): + Channel = Channel + + polling_interval = 1 + wait_time_seconds = 0 + default_port = None + connection_errors = ( + virtual.Transport.connection_errors + + (exception.SQSError, socket.error) + ) + channel_errors = ( + virtual.Transport.channel_errors + (exception.SQSDecodeError, ) + ) + driver_type = 'sqs' + driver_name = 'sqs' + +# encoding: utf-8 +# Copyright 2016 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Text processor tests.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from tensorflow.contrib.learn.python.learn.preprocessing import CategoricalVocabulary +from tensorflow.contrib.learn.python.learn.preprocessing import text +from tensorflow.python.platform import test + + +class TextTest(test.TestCase): + """Text processor tests.""" + + def testTokenizer(self): + words = text.tokenizer( + ["a b c", "a\nb\nc", "a, b - c", "фыв выф", "你好 怎么样"]) + self.assertEqual( + list(words), [["a", "b", "c"], ["a", "b", "c"], ["a", "b", "-", "c"], + ["фыв", "выф"], ["你好", "怎么样"]]) + + def testByteProcessor(self): + processor = text.ByteProcessor(max_document_length=8) + inp = ["abc", "фыва", "фыва", b"abc", "12345678901234567890"] + res = list(processor.fit_transform(inp)) + self.assertAllEqual(res, [[97, 98, 99, 0, 0, 0, 0, 0], + [209, 132, 209, 139, 208, 178, 208, 176], + [209, 132, 209, 139, 208, 178, 208, 176], + [97, 98, 99, 0, 0, 0, 0, 0], + [49, 50, 51, 52, 53, 54, 55, 56]]) + res = list(processor.reverse(res)) + self.assertAllEqual(res, ["abc", "фыва", "фыва", "abc", "12345678"]) + + def testVocabularyProcessor(self): + vocab_processor = text.VocabularyProcessor( + max_document_length=4, min_frequency=1) + tokens = vocab_processor.fit_transform(["a b c", "a\nb\nc", "a, b - c"]) + self.assertAllEqual( + list(tokens), [[1, 2, 3, 0], [1, 2, 3, 0], [1, 2, 0, 3]]) + + def testVocabularyProcessorSaveRestore(self): + filename = test.get_temp_dir() + "test.vocab" + vocab_processor = text.VocabularyProcessor( + max_document_length=4, min_frequency=1) + tokens = vocab_processor.fit_transform(["a b c", "a\nb\nc", "a, b - c"]) + vocab_processor.save(filename) + new_vocab = text.VocabularyProcessor.restore(filename) + tokens = new_vocab.transform(["a b c"]) + self.assertAllEqual(list(tokens), [[1, 2, 3, 0]]) + + def testExistingVocabularyProcessor(self): + vocab = CategoricalVocabulary() + vocab.get("A") + vocab.get("B") + vocab.freeze() + vocab_processor = text.VocabularyProcessor( + max_document_length=4, vocabulary=vocab, tokenizer_fn=list) + tokens = vocab_processor.fit_transform(["ABC", "CBABAF"]) + self.assertAllEqual(list(tokens), [[1, 2, 0, 0], [0, 2, 1, 2]]) + + +if __name__ == "__main__": + test.main() + +# -*- coding: utf-8 -*- +""" + jinja2.debug + ~~~~~~~~~~~~ + + Implements the debug interface for Jinja. This module does some pretty + ugly stuff with the Python traceback system in order to achieve tracebacks + with correct line numbers, locals and contents. + + :copyright: (c) 2010 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +import sys +import traceback +from types import TracebackType, CodeType +from jinja2.utils import missing, internal_code +from jinja2.exceptions import TemplateSyntaxError +from jinja2._compat import iteritems, reraise, PY2 + +# on pypy we can take advantage of transparent proxies +try: + from __pypy__ import tproxy +except ImportError: + tproxy = None + + +# how does the raise helper look like? +try: + exec("raise TypeError, 'foo'") +except SyntaxError: + raise_helper = 'raise __jinja_exception__[1]' +except TypeError: + raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]' + + +class TracebackFrameProxy(object): + """Proxies a traceback frame.""" + + def __init__(self, tb): + self.tb = tb + self._tb_next = None + + @property + def tb_next(self): + return self._tb_next + + def set_next(self, next): + if tb_set_next is not None: + try: + tb_set_next(self.tb, next and next.tb or None) + except Exception: + # this function can fail due to all the hackery it does + # on various python implementations. We just catch errors + # down and ignore them if necessary. + pass + self._tb_next = next + + @property + def is_jinja_frame(self): + return '__jinja_template__' in self.tb.tb_frame.f_globals + + def __getattr__(self, name): + return getattr(self.tb, name) + + +def make_frame_proxy(frame): + proxy = TracebackFrameProxy(frame) + if tproxy is None: + return proxy + def operation_handler(operation, *args, **kwargs): + if operation in ('__getattribute__', '__getattr__'): + return getattr(proxy, args[0]) + elif operation == '__setattr__': + proxy.__setattr__(*args, **kwargs) + else: + return getattr(proxy, operation)(*args, **kwargs) + return tproxy(TracebackType, operation_handler) + + +class ProcessedTraceback(object): + """Holds a Jinja preprocessed traceback for printing or reraising.""" + + def __init__(self, exc_type, exc_value, frames): + assert frames, 'no frames for this traceback?' + self.exc_type = exc_type + self.exc_value = exc_value + self.frames = frames + + # newly concatenate the frames (which are proxies) + prev_tb = None + for tb in self.frames: + if prev_tb is not None: + prev_tb.set_next(tb) + prev_tb = tb + prev_tb.set_next(None) + + def render_as_text(self, limit=None): + """Return a string with the traceback.""" + lines = traceback.format_exception(self.exc_type, self.exc_value, + self.frames[0], limit=limit) + return ''.join(lines).rstrip() + + def render_as_html(self, full=False): + """Return a unicode string with the traceback as rendered HTML.""" + from jinja2.debugrenderer import render_traceback + return u'%s\n\n' % ( + render_traceback(self, full=full), + self.render_as_text().decode('utf-8', 'replace') + ) + + @property + def is_template_syntax_error(self): + """`True` if this is a template syntax error.""" + return isinstance(self.exc_value, TemplateSyntaxError) + + @property + def exc_info(self): + """Exception info tuple with a proxy around the frame objects.""" + return self.exc_type, self.exc_value, self.frames[0] + + @property + def standard_exc_info(self): + """Standard python exc_info for re-raising""" + tb = self.frames[0] + # the frame will be an actual traceback (or transparent proxy) if + # we are on pypy or a python implementation with support for tproxy + if type(tb) is not TracebackType: + tb = tb.tb + return self.exc_type, self.exc_value, tb + + +def make_traceback(exc_info, source_hint=None): + """Creates a processed traceback object from the exc_info.""" + exc_type, exc_value, tb = exc_info + if isinstance(exc_value, TemplateSyntaxError): + exc_info = translate_syntax_error(exc_value, source_hint) + initial_skip = 0 + else: + initial_skip = 1 + return translate_exception(exc_info, initial_skip) + + +def translate_syntax_error(error, source=None): + """Rewrites a syntax error to please traceback systems.""" + error.source = source + error.translated = True + exc_info = (error.__class__, error, None) + filename = error.filename + if filename is None: + filename = '' + return fake_exc_info(exc_info, filename, error.lineno) + + +def translate_exception(exc_info, initial_skip=0): + """If passed an exc_info it will automatically rewrite the exceptions + all the way down to the correct line numbers and frames. + """ + tb = exc_info[2] + frames = [] + + # skip some internal frames if wanted + for x in range(initial_skip): + if tb is not None: + tb = tb.tb_next + initial_tb = tb + + while tb is not None: + # skip frames decorated with @internalcode. These are internal + # calls we can't avoid and that are useless in template debugging + # output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + # save a reference to the next frame if we override the current + # one with a faked one. + next = tb.tb_next + + # fake template exceptions + template = tb.tb_frame.f_globals.get('__jinja_template__') + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, + lineno)[2] + + frames.append(make_frame_proxy(tb)) + tb = next + + # if we don't have any exceptions in the frames left, we have to + # reraise it unchanged. + # XXX: can we backup here? when could this happen? + if not frames: + reraise(exc_info[0], exc_info[1], exc_info[2]) + + return ProcessedTraceback(exc_info[0], exc_info[1], frames) + + +def fake_exc_info(exc_info, filename, lineno): + """Helper for `translate_exception`.""" + exc_type, exc_value, tb = exc_info + + # figure the real context out + if tb is not None: + real_locals = tb.tb_frame.f_locals.copy() + ctx = real_locals.get('context') + if ctx: + locals = ctx.get_all() + else: + locals = {} + for name, value in iteritems(real_locals): + if name.startswith('l_') and value is not missing: + locals[name[2:]] = value + + # if there is a local called __jinja_exception__, we get + # rid of it to not break the debug functionality. + locals.pop('__jinja_exception__', None) + else: + locals = {} + + # assamble fake globals we need + globals = { + '__name__': filename, + '__file__': filename, + '__jinja_exception__': exc_info[:2], + + # we don't want to keep the reference to the template around + # to not cause circular dependencies, but we mark it as Jinja + # frame for the ProcessedTraceback + '__jinja_template__': None + } + + # and fake the exception + code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') + + # if it's possible, change the name of the code. This won't work + # on some python environments such as google appengine + try: + if tb is None: + location = 'template' + else: + function = tb.tb_frame.f_code.co_name + if function == 'root': + location = 'top-level template code' + elif function.startswith('block_'): + location = 'block "%s"' % function[6:] + else: + location = 'template' + + if PY2: + code = CodeType(0, code.co_nlocals, code.co_stacksize, + code.co_flags, code.co_code, code.co_consts, + code.co_names, code.co_varnames, filename, + location, code.co_firstlineno, + code.co_lnotab, (), ()) + else: + code = CodeType(0, code.co_kwonlyargcount, + code.co_nlocals, code.co_stacksize, + code.co_flags, code.co_code, code.co_consts, + code.co_names, code.co_varnames, filename, + location, code.co_firstlineno, + code.co_lnotab, (), ()) + except Exception as e: + pass + + # execute the code and catch the new traceback + try: + exec(code, globals, locals) + except: + exc_info = sys.exc_info() + new_tb = exc_info[2].tb_next + + # return without this frame + return exc_info[:2] + (new_tb,) + + +def _init_ugly_crap(): + """This function implements a few ugly things so that we can patch the + traceback objects. The function returned allows resetting `tb_next` on + any python traceback object. Do not attempt to use this on non cpython + interpreters + """ + import ctypes + from types import TracebackType + + if PY2: + # figure out size of _Py_ssize_t for Python 2: + if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): + _Py_ssize_t = ctypes.c_int64 + else: + _Py_ssize_t = ctypes.c_int + else: + # platform ssize_t on Python 3 + _Py_ssize_t = ctypes.c_ssize_t + + # regular python + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + # python with trace + if hasattr(sys, 'getobjects'): + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('_ob_next', ctypes.POINTER(_PyObject)), + ('_ob_prev', ctypes.POINTER(_PyObject)), + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + class _Traceback(_PyObject): + pass + _Traceback._fields_ = [ + ('tb_next', ctypes.POINTER(_Traceback)), + ('tb_frame', ctypes.POINTER(_PyObject)), + ('tb_lasti', ctypes.c_int), + ('tb_lineno', ctypes.c_int) + ] + + def tb_set_next(tb, next): + """Set the tb_next attribute of a traceback object.""" + if not (isinstance(tb, TracebackType) and + (next is None or isinstance(next, TracebackType))): + raise TypeError('tb_set_next arguments must be traceback objects') + obj = _Traceback.from_address(id(tb)) + if tb.tb_next is not None: + old = _Traceback.from_address(id(tb.tb_next)) + old.ob_refcnt -= 1 + if next is None: + obj.tb_next = ctypes.POINTER(_Traceback)() + else: + next = _Traceback.from_address(id(next)) + next.ob_refcnt += 1 + obj.tb_next = ctypes.pointer(next) + + return tb_set_next + + +# try to get a tb_set_next implementation if we don't have transparent +# proxies. +tb_set_next = None +if tproxy is None: + try: + tb_set_next = _init_ugly_crap() + except: + pass + del _init_ugly_crap + +# Part of Odoo. See LICENSE file for full copyright and licensing details. + +from openerp import models, fields, api + + +class tip(models.Model): + _name = 'web.tip' + _description = 'Tips' + + @api.one + @api.depends('user_ids') + def _is_consumed(self): + self.is_consumed = self.env.user in self.user_ids + + title = fields.Char('Tip title') + description = fields.Html('Tip Description', required=True) + action_id = fields.Many2one('ir.actions.act_window', string="Action", + help="The action that will trigger the tip") + model = fields.Char("Model", help="Model name on which to trigger the tip, e.g. 'res.partner'.") + type = fields.Char("Type", help="Model type, e.g. lead or opportunity for crm.lead") + mode = fields.Char("Mode", help="Mode, e.g. kanban, form") + trigger_selector = fields.Char('Trigger selector', help='CSS selectors used to trigger the tip, separated by a comma (ANDed).') + highlight_selector = fields.Char('Highlight selector', help='CSS selector for the element to highlight') + end_selector = fields.Char('End selector', help='CSS selector used to end the tip') + end_event = fields.Char('End event', help='Event to end the tip', default='click') + placement = fields.Char('Placement', help='Popover placement, bottom, top, left or right', default='auto') + user_ids = fields.Many2many('res.users', string='Consumed by') + is_consumed = fields.Boolean(string='Tip consumed', compute='_is_consumed') + + @api.multi + def consume(self): + self.write({'user_ids': [(4, self.env.uid)]}) + +import os +import sys + +if os.name == 'posix': + def become_daemon(our_home_dir='.', out_log='/dev/null', + err_log='/dev/null', umask=022): + "Robustly turn into a UNIX daemon, running in our_home_dir." + # First fork + try: + if os.fork() > 0: + sys.exit(0) # kill off parent + except OSError, e: + sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) + sys.exit(1) + os.setsid() + os.chdir(our_home_dir) + os.umask(umask) + + # Second fork + try: + if os.fork() > 0: + os._exit(0) + except OSError, e: + sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) + os._exit(1) + + si = open('/dev/null', 'r') + so = open(out_log, 'a+', 0) + se = open(err_log, 'a+', 0) + os.dup2(si.fileno(), sys.stdin.fileno()) + os.dup2(so.fileno(), sys.stdout.fileno()) + os.dup2(se.fileno(), sys.stderr.fileno()) + # Set custom file descriptors so that they get proper buffering. + sys.stdout, sys.stderr = so, se +else: + def become_daemon(our_home_dir='.', out_log=None, err_log=None, umask=022): + """ + If we're not running under a POSIX system, just simulate the daemon + mode by doing redirections and directory changing. + """ + os.chdir(our_home_dir) + os.umask(umask) + sys.stdin.close() + sys.stdout.close() + sys.stderr.close() + if err_log: + sys.stderr = open(err_log, 'a', 0) + else: + sys.stderr = NullDevice() + if out_log: + sys.stdout = open(out_log, 'a', 0) + else: + sys.stdout = NullDevice() + + class NullDevice: + "A writeable object that writes to nowhere -- like /dev/null." + def write(self, s): + pass + +from __future__ import print_function, division + +from sympy import expand +from sympy import diff +from sympy import Sum + +def finite_diff(expression, variable, increment=1): + """ + Takes as input a polynomial expression and the variable used to construct + it and returns the difference between function's value when the input is + incremented to 1 and the original function value. If you want an increment + other than one supply it as a third argument. + + Examples + ========= + + >>> from sympy.abc import x, y, z, k, n + >>> from sympy.series.kauers import finite_diff + >>> from sympy import Sum + >>> finite_diff(x**2, x) + 2*x + 1 + >>> finite_diff(y**3 + 2*y**2 + 3*y + 4, y) + 3*y**2 + 7*y + 6 + >>> finite_diff(x**2 + 3*x + 8, x, 2) + 4*x + 10 + >>> finite_diff(z**3 + 8*z, z, 3) + 9*z**2 + 27*z + 51 + """ + expression = expression.expand() + expression2 = expression.subs(variable, variable + increment) + expression2 = expression2.expand() + return expression2 - expression + +def finite_diff_kauers(sum): + """ + Takes as input a Sum instance and returns the difference between the sum + with the upper index incremented by 1 and the original sum. For example, + if S(n) is a sum, then finite_diff_kauers will return S(n + 1) - S(n). + + Examples + ======== + + >>> from sympy.series.kauers import finite_diff_kauers + >>> from sympy import Sum + >>> from sympy.abc import x, y, m, n, k + >>> finite_diff_kauers(Sum(k, (k, 1, n))) + n + 1 + >>> finite_diff_kauers(Sum(1/k, (k, 1, n))) + 1/(n + 1) + >>> finite_diff_kauers(Sum((x*y**2), (x, 1, n), (y, 1, m))) + (m + 1)**2*(n + 1) + >>> finite_diff_kauers(Sum((x*y), (x, 1, m), (y, 1, n))) + (m + 1)*(n + 1) + """ + function = sum.function + for l in sum.limits: + function = function.subs(l[0], l[- 1] + 1) + return function + +import sys + +try: + import simplejson as json +except ImportError: + import json + +from libcloud.common.luadns import LuadnsResponse, LuadnsConnection +from libcloud.common.luadns import LuadnsException +from libcloud.dns.base import DNSDriver, Zone, Record +from libcloud.dns.types import Provider, RecordType +from libcloud.dns.types import ZoneDoesNotExistError, ZoneAlreadyExistsError +from libcloud.dns.types import RecordDoesNotExistError + + +__all__ = [ + 'LuadnsDNSDriver' +] + + +class LuadnsDNSResponse(LuadnsResponse): + pass + + +class LuadnsDNSConnection(LuadnsConnection): + responseCls = LuadnsDNSResponse + + +class LuadnsDNSDriver(DNSDriver): + type = Provider.LUADNS + name = 'Luadns' + website = 'https://www.luadns.com' + connectionCls = LuadnsDNSConnection + + RECORD_TYPE_MAP = { + RecordType.A: 'A', + RecordType.AAAA: 'AAAA', + RecordType.CNAME: 'CNAME', + RecordType.MX: 'MX', + RecordType.NS: 'NS', + RecordType.PTR: 'PTR', + RecordType.SOA: 'SOA', + RecordType.SRV: 'SRV', + RecordType.TXT: 'TXT' + } + + def list_zones(self): + """ + Return a list of zones. + + :return: ``list`` of :class:`Zone` + """ + action = '/v1/zones' + response = self.connection.request(action=action, + method='GET') + zones = self._to_zones(response.parse_body()) + + return zones + + def get_zone(self, zone_id): + """ + Return a Zone instance. + + :param zone_id: ID of the required zone + :type zone_id: ``str`` + + :rtype: :class:`Zone` + """ + action = '/v1/zones/%s' % zone_id + try: + response = self.connection.request(action=action) + except LuadnsException: + e = sys.exc_info()[1] + if e.message in ['Zone not found.', 'Resource not found.']: + raise ZoneDoesNotExistError(zone_id=zone_id, + value='', driver=self) + else: + raise e + + zone = self._to_zone(response.parse_body()) + + return zone + + def delete_zone(self, zone): + """ + Delete a zone. + + Note: This will delete all the records belonging to this zone. + + :param zone: Zone to delete. + :type zone: :class:`Zone` + + :rtype: ``bool`` + """ + action = '/v1/zones/%s' % zone.id + try: + response = self.connection.request(action=action, + method='DELETE') + except LuadnsException: + e = sys.exc_info()[1] + if e.message in ['Resource not found.', 'Zone not found.']: + raise ZoneDoesNotExistError(zone_id=zone.id, + value='', driver=self) + else: + raise e + + return response.status == 200 + + def create_zone(self, domain, type='master', ttl=None, extra=None): + """ + Create a new zone. + + :param domain: Zone domain name (e.g. example.com) + :type domain: ``str`` + + :param type: Zone type (This is not really used. See API docs for extra + parameters). + :type type: ``str`` + + :param ttl: TTL for new records. (This is not really used) + :type ttl: ``int`` + + :param extra: Extra attributes (driver specific). ('region_support', + 'zone_data') + :type extra: ``dict`` + + :rtype: :class:`Zone` + """ + action = '/v1/zones' + data = json.dumps({'name': domain}) + try: + response = self.connection.request(action=action, + method='POST', + data=data) + except LuadnsException: + e = sys.exc_info()[1] + if e.message == "Zone '%s' is taken already." % domain: + raise ZoneAlreadyExistsError(zone_id=domain, + value='', + driver=self) + else: + raise e + zone = self._to_zone(response.parse_body()) + + return zone + + def list_records(self, zone): + """ + Return a list of records for the provided zone. + + :param zone: Zone to list records for. + :type zone: :class:`Zone` + + :return: ``list`` of :class:`Record` + """ + action = '/v1/zones/%s/records' % zone.id + response = self.connection.request(action=action) + records = self._to_records(response.parse_body(), zone=zone) + + return records + + def get_record(self, zone_id, record_id): + """ + Return a Record instance. + + :param zone_id: ID of the required zone + :type zone_id: ``str`` + + :param record_id: ID of the required record + :type record_id: ``str`` + + :rtype: :class:`Record` + """ + zone = self.get_zone(zone_id=zone_id) + action = '/v1/zones/%s/records/%s' % (zone_id, record_id) + try: + response = self.connection.request(action=action) + except LuadnsException: + e = sys.exc_info()[1] + if e.message == 'Record not found.': + raise RecordDoesNotExistError(record_id=record_id, driver=self, + value='') + else: + raise e + + record = self._to_record(response.parse_body(), zone=zone) + + return record + + def delete_record(self, record): + """ + Delete a record. + + :param record: Record to delete. + :type record: :class:`Record` + + :rtype: ``bool`` + """ + action = '/v1/zones/%s/records/%s' % (record.zone.id, record.id) + try: + response = self.connection.request(action=action, + method='DELETE') + except LuadnsException: + e = sys.exc_info()[1] + if e.message == 'Record not found.': + raise RecordDoesNotExistError(record_id=record.id, driver=self, + value='') + else: + raise e + + return response.status == 200 + + def create_record(self, name, zone, type, data, extra=None): + """ + Create a record. + + :param name: Record name without the domain name (e.g. www). + Note: If you want to create a record for a base domain + name, you should specify empty string ('') for this + argument. + :type name: ``str`` + + :param zone: Zone which the records will be created for. + :type zone: :class:`Zone` + + :param type: DNS record type ( 'A', 'AAAA', 'CNAME', 'MX', 'NS', + 'PTR', 'SOA', 'SRV', 'TXT'). + :type type: :class:`RecordType` + + :param data: Data for the record (depends on the record type). + :type data: ``str`` + + :param extra: (optional) Extra attributes ('prio', 'ttl'). + :type extra: ``dict`` + + :rtype: :class:`Record` + """ + action = '/v1/zones/%s/records' % zone.id + to_post = {'name': name, 'content': data, 'type': type, + 'zone_id': int(zone.id)} + # ttl is required to create a record for luadns + # pass it through extra like this: extra={'ttl':ttl} + if extra is not None: + to_post.update(extra) + data = json.dumps(to_post) + try: + response = self.connection.request(action=action, + method='POST', + data=data) + except LuadnsException: + e = sys.exc_info()[1] + raise e + + record = self._to_record(response.parse_body(), zone=zone) + + return record + + def _to_zone(self, item): + common_attr = ['id', 'name'] + extra = {} + for key in item: + if key not in common_attr: + extra[key] = item.get(key) + zone = Zone(domain=item['name'], id=item['id'], type=None, + ttl=None, driver=self, extra=extra) + + return zone + + def _to_zones(self, items): + zones = [] + for item in items: + zones.append(self._to_zone(item)) + + return zones + + def _to_record(self, item, zone): + common_attr = ['id', 'content', 'name', 'type'] + extra = {} + for key in item: + if key not in common_attr: + extra[key] = item.get(key) + record = Record(id=item['id'], name=item['name'], type=item['type'], + data=item['content'], zone=zone, driver=self, + extra=extra) + + return record + + def _to_records(self, items, zone): + records = [] + for item in items: + records.append(self._to_record(item, zone)) + + return records + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: skip-file +from __future__ import print_function + +import os +import logging +import numpy as np +from sklearn.cluster import KMeans +from scipy.spatial.distance import cdist +import mxnet as mx +import data +import model +from autoencoder import AutoEncoderModel +from solver import Solver, Monitor + + +def cluster_acc(Y_pred, Y): + from sklearn.utils.linear_assignment_ import linear_assignment + assert Y_pred.size == Y.size + D = max(Y_pred.max(), Y.max())+1 + w = np.zeros((D, D), dtype=np.int64) + for i in range(Y_pred.size): + w[Y_pred[i], int(Y[i])] += 1 + ind = linear_assignment(w.max() - w) + return sum([w[i, j] for i, j in ind])*1.0/Y_pred.size, w + + +class DECModel(model.MXModel): + class DECLoss(mx.operator.NumpyOp): + def __init__(self, num_centers, alpha): + super(DECModel.DECLoss, self).__init__(need_top_grad=False) + self.num_centers = num_centers + self.alpha = alpha + + def forward(self, in_data, out_data): + z = in_data[0] + mu = in_data[1] + q = out_data[0] + self.mask = 1.0/(1.0+cdist(z, mu)**2/self.alpha) + q[:] = self.mask**((self.alpha+1.0)/2.0) + q[:] = (q.T/q.sum(axis=1)).T + + def backward(self, out_grad, in_data, out_data, in_grad): + q = out_data[0] + z = in_data[0] + mu = in_data[1] + p = in_data[2] + dz = in_grad[0] + dmu = in_grad[1] + self.mask *= (self.alpha+1.0)/self.alpha*(p-q) + dz[:] = (z.T*self.mask.sum(axis=1)).T - self.mask.dot(mu) + dmu[:] = (mu.T*self.mask.sum(axis=0)).T - self.mask.T.dot(z) + + def infer_shape(self, in_shape): + assert len(in_shape) == 3 + assert len(in_shape[0]) == 2 + input_shape = in_shape[0] + label_shape = (input_shape[0], self.num_centers) + mu_shape = (self.num_centers, input_shape[1]) + out_shape = (input_shape[0], self.num_centers) + return [input_shape, mu_shape, label_shape], [out_shape] + + def list_arguments(self): + return ['data', 'mu', 'label'] + + def setup(self, X, num_centers, alpha, save_to='dec_model'): + sep = X.shape[0]*9//10 + X_train = X[:sep] + X_val = X[sep:] + ae_model = AutoEncoderModel(self.xpu, [X.shape[1], 500, 500, 2000, 10], pt_dropout=0.2) + if not os.path.exists(save_to+'_pt.arg'): + ae_model.layerwise_pretrain(X_train, 256, 50000, 'sgd', l_rate=0.1, decay=0.0, + lr_scheduler=mx.lr_scheduler.FactorScheduler(20000, 0.1)) + ae_model.finetune(X_train, 256, 100000, 'sgd', l_rate=0.1, decay=0.0, + lr_scheduler=mx.lr_scheduler.FactorScheduler(20000, 0.1)) + ae_model.save(save_to+'_pt.arg') + logging.log(logging.INFO, "Autoencoder Training error: %f"%ae_model.eval(X_train)) + logging.log(logging.INFO, "Autoencoder Validation error: %f"%ae_model.eval(X_val)) + else: + ae_model.load(save_to+'_pt.arg') + self.ae_model = ae_model + + self.dec_op = DECModel.DECLoss(num_centers, alpha) + label = mx.sym.Variable('label') + self.feature = self.ae_model.encoder + self.loss = self.dec_op(data=self.ae_model.encoder, label=label, name='dec') + self.args.update({k: v for k, v in self.ae_model.args.items() if k in self.ae_model.encoder.list_arguments()}) + self.args['dec_mu'] = mx.nd.empty((num_centers, self.ae_model.dims[-1]), ctx=self.xpu) + self.args_grad.update({k: mx.nd.empty(v.shape, ctx=self.xpu) for k, v in self.args.items()}) + self.args_mult.update({k: k.endswith('bias') and 2.0 or 1.0 for k in self.args}) + self.num_centers = num_centers + + def cluster(self, X, y=None, update_interval=None): + N = X.shape[0] + if not update_interval: + update_interval = N + batch_size = 256 + test_iter = mx.io.NDArrayIter({'data': X}, batch_size=batch_size, shuffle=False, + last_batch_handle='pad') + args = {k: mx.nd.array(v.asnumpy(), ctx=self.xpu) for k, v in self.args.items()} + z = list(model.extract_feature(self.feature, args, None, test_iter, N, self.xpu).values())[0] + kmeans = KMeans(self.num_centers, n_init=20) + kmeans.fit(z) + args['dec_mu'][:] = kmeans.cluster_centers_ + solver = Solver('sgd', momentum=0.9, wd=0.0, learning_rate=0.01) + + def ce(label, pred): + return np.sum(label*np.log(label/(pred+0.000001)))/label.shape[0] + solver.set_metric(mx.metric.CustomMetric(ce)) + + label_buff = np.zeros((X.shape[0], self.num_centers)) + train_iter = mx.io.NDArrayIter({'data': X}, {'label': label_buff}, batch_size=batch_size, + shuffle=False, last_batch_handle='roll_over') + self.y_pred = np.zeros((X.shape[0])) + + def refresh(i): + if i%update_interval == 0: + z = list(model.extract_feature(self.feature, args, None, test_iter, N, self.xpu).values())[0] + p = np.zeros((z.shape[0], self.num_centers)) + self.dec_op.forward([z, args['dec_mu'].asnumpy()], [p]) + y_pred = p.argmax(axis=1) + print(np.std(np.bincount(y_pred)), np.bincount(y_pred)) + print(np.std(np.bincount(y.astype(np.int))), np.bincount(y.astype(np.int))) + if y is not None: + print(cluster_acc(y_pred, y)[0]) + weight = 1.0/p.sum(axis=0) + weight *= self.num_centers/weight.sum() + p = (p**2)*weight + train_iter.data_list[1][:] = (p.T/p.sum(axis=1)).T + print(np.sum(y_pred != self.y_pred), 0.001*y_pred.shape[0]) + if np.sum(y_pred != self.y_pred) < 0.001*y_pred.shape[0]: + self.y_pred = y_pred + return True + self.y_pred = y_pred + solver.set_iter_start_callback(refresh) + solver.set_monitor(Monitor(50)) + + solver.solve(self.xpu, self.loss, args, self.args_grad, None, + train_iter, 0, 1000000000, {}, False) + self.end_args = args + if y is not None: + return cluster_acc(self.y_pred, y)[0] + else: + return -1 + + +def mnist_exp(xpu): + X, Y = data.get_mnist() + if not os.path.isdir('data'): + os.makedirs('data') + dec_model = DECModel(xpu, X, 10, 1.0, 'data/mnist') + acc = [] + for i in [10*(2**j) for j in range(9)]: + acc.append(dec_model.cluster(X, Y, i)) + logging.log(logging.INFO, 'Clustering Acc: %f at update interval: %d'%(acc[-1], i)) + logging.info(str(acc)) + logging.info('Best Clustering ACC: %f at update_interval: %d'%(np.max(acc), 10*(2**np.argmax(acc)))) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + mnist_exp(mx.gpu(0)) + +# Software License Agreement (BSD License) +# +# Copyright (c) 2011, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +import os +import genmsg.msg_loader +import genmsg + +# pkg_name - string +# msg_file - string full path +# search_paths - dict of {'pkg':'msg_dir'} +def find_msg_dependencies_with_type(pkg_name, msg_file, search_paths): + + # Read and parse the source msg file + msg_context = genmsg.msg_loader.MsgContext.create_default() + full_type_name = genmsg.gentools.compute_full_type_name(pkg_name, os.path.basename(msg_file)) + spec = genmsg.msg_loader.load_msg_from_file(msg_context, msg_file, full_type_name) + + try: + genmsg.msg_loader.load_depends(msg_context, spec, search_paths) + except genmsg.InvalidMsgSpec as e: + raise genmsg.MsgGenerationException("Cannot read .msg for %s: %s"%(full_type_name, str(e))) + + deps = set() + for dep_type_name in msg_context.get_all_depends(full_type_name): + deps.add((dep_type_name, msg_context.get_file(dep_type_name))) + + return list(deps) + + +def find_msg_dependencies(pkg_name, msg_file, search_paths): + deps = find_msg_dependencies_with_type(pkg_name, msg_file, search_paths) + return [d[1] for d in deps] + + +def find_srv_dependencies_with_type(pkg_name, msg_file, search_paths): + + # Read and parse the source msg file + msg_context = genmsg.msg_loader.MsgContext.create_default() + full_type_name = genmsg.gentools.compute_full_type_name(pkg_name, os.path.basename(msg_file)) + + spec = genmsg.msg_loader.load_srv_from_file(msg_context, msg_file, full_type_name) + + try: + genmsg.msg_loader.load_depends(msg_context, spec, search_paths) + except genmsg.InvalidMsgSpec as e: + raise genmsg.MsgGenerationException("Cannot read .msg for %s: %s"%(full_type_name, str(e))) + + deps = set() + + for dep_type_name in msg_context.get_all_depends(spec.request.full_name): + deps.add((dep_type_name, msg_context.get_file(dep_type_name))) + + for dep_type_name in msg_context.get_all_depends(spec.response.full_name): + deps.add((dep_type_name, msg_context.get_file(dep_type_name))) + + return list(deps) + + +def find_srv_dependencies(pkg_name, msg_file, search_paths): + deps = find_srv_dependencies_with_type(pkg_name, msg_file, search_paths) + return [d[1] for d in deps] + +#paths = {'std_msgs':'/u/mkjargaard/repositories/mkjargaard/dist-sandbox/std_msgs/msg'} +#file = '/u/mkjargaard/repositories/mkjargaard/dist-sandbox/quux_msgs/msg/QuuxString.msg' +#find_msg_dependencies('quux_msgs', file, paths) + +#An example for the Kaplan-Meier estimator +from __future__ import print_function +from statsmodels.compat.python import lrange +import statsmodels.api as sm +import matplotlib.pyplot as plt +import numpy as np +from statsmodels.sandbox.survival2 import KaplanMeier + +#Getting the strike data as an array +dta = sm.datasets.strikes.load() +print('basic data') +print('\n') +dta = list(dta.values()[-1]) +print(dta[lrange(5),:]) +print('\n') + +#Create the KaplanMeier object and fit the model + +km = KaplanMeier(dta,0) +km.fit() + +#show the results + +km.plot() +print('basic model') +print('\n') +km.summary() +print('\n') + +#Mutiple survival curves + +km2 = KaplanMeier(dta,0,exog=1) +km2.fit() +print('more than one curve') +print('\n') +km2.summary() +print('\n') +km2.plot() + +#with censoring + +censoring = np.ones_like(dta[:,0]) +censoring[dta[:,0] > 80] = 0 +dta = np.c_[dta,censoring] +print('with censoring') +print('\n') +print(dta[lrange(5),:]) +print('\n') +km3 = KaplanMeier(dta,0,exog=1,censoring=2) +km3.fit() +km3.summary() +print('\n') +km3.plot() + +#Test for difference of survival curves + +log_rank = km3.test_diff([0.0645,-0.03957]) +print('log rank test') +print('\n') +print(log_rank) +print('\n') + +#The zeroth element of log_rank is the chi-square test statistic +#for the difference between the survival curves for exog = 0.0645 +#and exog = -0.03957, the index one element is the degrees of freedom for +#the test, and the index two element is the p-value for the test + +wilcoxon = km3.test_diff([0.0645,-0.03957], rho=1) +print('Wilcoxon') +print('\n') +print(wilcoxon) +print('\n') + +#Same info as log_rank, but for Peto and Peto modification to the +#Gehan-Wilcoxon test + +#User specified functions for tests + +#A wider range of rates can be accessed by using the 'weight' parameter +#for the test_diff method + +#For example, if the desire weights are S(t)*(1-S(t)), where S(t) is a pooled +#estimate for the survival function, this could be computed by doing + +def weights(t): + #must accept one arguement, even though it is not used here + s = KaplanMeier(dta,0,censoring=2) + s.fit() + s = s.results[0][0] + s = s * (1 - s) + return s + +#KaplanMeier provides an array of times to the weighting function +#internally, so the weighting function must accept one arguement + +test = km3.test_diff([0.0645,-0.03957], weight=weights) +print('user specified weights') +print('\n') +print(test) +print('\n') + +#Groups with nan names + +#These can be handled by passing the data to KaplanMeier as an array of strings + +groups = np.ones_like(dta[:,1]) +groups = groups.astype('S4') +groups[dta[:,1] > 0] = 'high' +groups[dta[:,1] <= 0] = 'low' +dta = dta.astype('S4') +dta[:,1] = groups +print('with nan group names') +print('\n') +print(dta[lrange(5),:]) +print('\n') +km4 = KaplanMeier(dta,0,exog=1,censoring=2) +km4.fit() +km4.summary() +print('\n') +km4.plot() + +#show all the plots + +plt.show() + +# Copyright (C) 2010-2011 Richard Lincoln +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +"""This package contains the information classes that extend IEC61970::Wires package with power system resources required for distribution network modelling, including unbalanced networks. +""" + +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.DistributionTransformerWinding import DistributionTransformerWinding +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.DistributionLineSegment import DistributionLineSegment +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.WindingPiImpedance import WindingPiImpedance +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.DistributionTapChanger import DistributionTapChanger +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.PerLengthSequenceImpedance import PerLengthSequenceImpedance +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.TransformerBank import TransformerBank +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.PerLengthPhaseImpedance import PerLengthPhaseImpedance +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.DistributionTransformer import DistributionTransformer +from CIM14.CDPSM.Unbalanced.IEC61968.WiresExt.PhaseImpedanceData import PhaseImpedanceData + +nsURI = "http://iec.ch/TC57/2009/CIM-schema-cim14?profile=http://iec.ch/TC57/2007/profile#WiresExt" +nsPrefix = "cimWiresExt" + + +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: proto + +import flatbuffers + +class SubscriberReceived(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsSubscriberReceived(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SubscriberReceived() + x.Init(buf, n + offset) + return x + + # SubscriberReceived + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SubscriberReceived + def Publication(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # SubscriberReceived + def Subscriber(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + + # SubscriberReceived + def SubscriberAuthid(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # SubscriberReceived + def SubscriberAuthrole(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # SubscriberReceived + def Payload(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # SubscriberReceived + def PayloadAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # SubscriberReceived + def PayloadLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubscriberReceived + def EncAlgo(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # SubscriberReceived + def EncSerializer(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # SubscriberReceived + def EncKey(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # SubscriberReceived + def EncKeyAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # SubscriberReceived + def EncKeyLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def SubscriberReceivedStart(builder): builder.StartObject(8) +def SubscriberReceivedAddPublication(builder, publication): builder.PrependUint64Slot(0, publication, 0) +def SubscriberReceivedAddSubscriber(builder, subscriber): builder.PrependUint64Slot(1, subscriber, 0) +def SubscriberReceivedAddSubscriberAuthid(builder, subscriberAuthid): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(subscriberAuthid), 0) +def SubscriberReceivedAddSubscriberAuthrole(builder, subscriberAuthrole): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(subscriberAuthrole), 0) +def SubscriberReceivedAddPayload(builder, payload): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(payload), 0) +def SubscriberReceivedStartPayloadVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def SubscriberReceivedAddEncAlgo(builder, encAlgo): builder.PrependUint8Slot(5, encAlgo, 0) +def SubscriberReceivedAddEncSerializer(builder, encSerializer): builder.PrependUint8Slot(6, encSerializer, 0) +def SubscriberReceivedAddEncKey(builder, encKey): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(encKey), 0) +def SubscriberReceivedStartEncKeyVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def SubscriberReceivedEnd(builder): return builder.EndObject() + +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2004-2010 Tiny SPRL (). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +from openerp import tools +from openerp.osv import fields,osv + +class timesheet_report(osv.osv): + _name = "timesheet.report" + _description = "Timesheet" + _auto = False + _columns = { + 'year': fields.char('Year',size=64,required=False, readonly=True), + 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), + ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), + ('10','October'), ('11','November'), ('12','December')], 'Month',readonly=True), + 'day': fields.char('Day', size=128, readonly=True), + 'date': fields.date('Date', readonly=True), + 'name': fields.char('Description', size=64,readonly=True), + 'product_id' : fields.many2one('product.product', 'Product'), + 'general_account_id' : fields.many2one('account.account', 'General Account', readonly=True), + 'user_id': fields.many2one('res.users', 'User',readonly=True), + 'to_invoice': fields.many2one('hr_timesheet_invoice.factor', 'Type of Invoicing',readonly=True), + 'account_id': fields.many2one('account.analytic.account', 'Analytic Account',readonly=True), + 'nbr': fields.integer('#Nbr',readonly=True), + 'total_diff': fields.float('#Total Diff',readonly=True), + 'total_timesheet': fields.float('#Total Timesheet',readonly=True), + 'total_attendance': fields.float('#Total Attendance',readonly=True), + 'company_id': fields.many2one('res.company', 'Company',readonly=True), + 'department_id':fields.many2one('hr.department','Department',readonly=True), + 'date_from': fields.date('Date from',readonly=True,), + 'date_to': fields.date('Date to',readonly=True), + 'date_current': fields.date('Current date', required=True), + 'state' : fields.selection([ + ('new', 'New'), + ('draft','Draft'), + ('confirm','Confirmed'), + ('done','Done')], 'Status', readonly=True), + 'quantity': fields.float('Time',readonly=True), + 'cost': fields.float('#Cost',readonly=True), + } + + def init(self, cr): + tools.drop_view_if_exists(cr, 'timesheet_report') + cr.execute(""" + create or replace view timesheet_report as ( + select + min(aal.id) as id, + htss.name, + aal.date as date, + htss.date_from, + htss.date_to, + to_char(htss.date_from, 'YYYY-MM-DD') as day, + to_char(htss.date_from, 'YYYY') as year, + to_char(htss.date_from, 'MM') as month, + count(*) as nbr, + aal.unit_amount as quantity, + aal.amount as cost, + aal.account_id, + aal.product_id, + (SELECT sum(day.total_difference) + FROM hr_timesheet_sheet_sheet AS sheet + LEFT JOIN hr_timesheet_sheet_sheet_day AS day + ON (sheet.id = day.sheet_id) where sheet.id=htss.id) as total_diff, + (SELECT sum(day.total_timesheet) + FROM hr_timesheet_sheet_sheet AS sheet + LEFT JOIN hr_timesheet_sheet_sheet_day AS day + ON (sheet.id = day.sheet_id) where sheet.id=htss.id) as total_timesheet, + (SELECT sum(day.total_attendance) + FROM hr_timesheet_sheet_sheet AS sheet + LEFT JOIN hr_timesheet_sheet_sheet_day AS day + ON (sheet.id = day.sheet_id) where sheet.id=htss.id) as total_attendance, + aal.to_invoice, + aal.general_account_id, + htss.user_id, + htss.company_id, + htss.department_id, + htss.state + from account_analytic_line as aal + left join hr_analytic_timesheet as hat ON (hat.line_id=aal.id) + left join hr_timesheet_sheet_sheet as htss ON (hat.sheet_id=htss.id) + group by + aal.account_id, + aal.date, + htss.date_from, + htss.date_to, + aal.unit_amount, + aal.amount, + aal.to_invoice, + aal.product_id, + aal.general_account_id, + htss.name, + htss.company_id, + htss.state, + htss.id, + htss.department_id, + htss.user_id + ) + """) +timesheet_report() + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: + +import os +import sys +import tempfile +import operator +import functools +import itertools +import re +import contextlib +import pickle +import textwrap + +from setuptools.extern import six +from setuptools.extern.six.moves import builtins, map + +import pkg_resources + +if sys.platform.startswith('java'): + import org.python.modules.posix.PosixModule as _os +else: + _os = sys.modules[os.name] +try: + _file = file +except NameError: + _file = None +_open = open +from distutils.errors import DistutilsError +from pkg_resources import working_set + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + + +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + with open(filename, mode) as stream: + script = stream.read() + # compile() function in Python 2.6 and 3.1 requires LF line endings. + if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2): + script = script.replace(b'\r\n', b'\n') + script = script.replace(b'\r', b'\n') + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + + +@contextlib.contextmanager +def save_argv(repl=None): + saved = sys.argv[:] + if repl is not None: + sys.argv[:] = repl + try: + yield saved + finally: + sys.argv[:] = saved + + +@contextlib.contextmanager +def save_path(): + saved = sys.path[:] + try: + yield saved + finally: + sys.path[:] = saved + + +@contextlib.contextmanager +def override_temp(replacement): + """ + Monkey-patch tempfile.tempdir with replacement, ensuring it exists + """ + if not os.path.isdir(replacement): + os.makedirs(replacement) + + saved = tempfile.tempdir + + tempfile.tempdir = replacement + + try: + yield + finally: + tempfile.tempdir = saved + + +@contextlib.contextmanager +def pushd(target): + saved = os.getcwd() + os.chdir(target) + try: + yield saved + finally: + os.chdir(saved) + + +class UnpickleableException(Exception): + """ + An exception representing another Exception that could not be pickled. + """ + + @staticmethod + def dump(type, exc): + """ + Always return a dumped (pickled) type and exc. If exc can't be pickled, + wrap it in UnpickleableException first. + """ + try: + return pickle.dumps(type), pickle.dumps(exc) + except Exception: + # get UnpickleableException inside the sandbox + from setuptools.sandbox import UnpickleableException as cls + return cls.dump(cls, cls(repr(exc))) + + +class ExceptionSaver: + """ + A Context Manager that will save an exception, serialized, and restore it + later. + """ + + def __enter__(self): + return self + + def __exit__(self, type, exc, tb): + if not exc: + return + + # dump the exception + self._saved = UnpickleableException.dump(type, exc) + self._tb = tb + + # suppress the exception + return True + + def resume(self): + "restore and re-raise any exception" + + if '_saved' not in vars(self): + return + + type, exc = map(pickle.loads, self._saved) + six.reraise(type, exc, self._tb) + + +@contextlib.contextmanager +def save_modules(): + """ + Context in which imported modules are saved. + + Translates exceptions internal to the context into the equivalent exception + outside the context. + """ + saved = sys.modules.copy() + with ExceptionSaver() as saved_exc: + yield saved + + sys.modules.update(saved) + # remove any modules imported since + del_modules = ( + mod_name for mod_name in sys.modules + if mod_name not in saved + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ) + _clear_modules(del_modules) + + saved_exc.resume() + + +def _clear_modules(module_names): + for mod_name in list(module_names): + del sys.modules[mod_name] + + +@contextlib.contextmanager +def save_pkg_resources_state(): + saved = pkg_resources.__getstate__() + try: + yield saved + finally: + pkg_resources.__setstate__(saved) + + +@contextlib.contextmanager +def setup_context(setup_dir): + temp_dir = os.path.join(setup_dir, 'temp') + with save_pkg_resources_state(): + with save_modules(): + hide_setuptools() + with save_path(): + with save_argv(): + with override_temp(temp_dir): + with pushd(setup_dir): + # ensure setuptools commands are available + __import__('setuptools') + yield + + +def _needs_hiding(mod_name): + """ + >>> _needs_hiding('setuptools') + True + >>> _needs_hiding('pkg_resources') + True + >>> _needs_hiding('setuptools_plugin') + False + >>> _needs_hiding('setuptools.__init__') + True + >>> _needs_hiding('distutils') + True + >>> _needs_hiding('os') + False + >>> _needs_hiding('Cython') + True + """ + pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)') + return bool(pattern.match(mod_name)) + + +def hide_setuptools(): + """ + Remove references to setuptools' modules from sys.modules to allow the + invocation to import the most appropriate setuptools. This technique is + necessary to avoid issues such as #315 where setuptools upgrading itself + would fail to find a function declared in the metadata. + """ + modules = filter(_needs_hiding, sys.modules) + _clear_modules(modules) + + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + with setup_context(setup_dir): + try: + sys.argv[:] = [setup_script] + list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist: dist.activate()) + + # __file__ should be a byte string on Python 2 (#712) + dunder_file = ( + setup_script + if isinstance(setup_script, str) else + setup_script.encode(sys.getfilesystemencoding()) + ) + + with DirectorySandbox(setup_dir): + ns = dict(__file__=dunder_file, __name__='__main__') + _execfile(setup_script, ns) + except SystemExit as v: + if v.args and v.args[0]: + raise + # Normal exit, just return + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self, name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source, name)) + + def __enter__(self): + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + + def __exit__(self, exc_type, exc_value, traceback): + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) + + def run(self, func): + """Run 'func' under os sandboxing""" + with self: + return func() + + def _mk_dual_path_wrapper(name): + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): + if self._active: + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os, name): + locals()[name] = _mk_dual_path_wrapper(name) + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + + return wrap + + if _file: + _file = _mk_single_path_wrapper('file', _file) + _open = _mk_single_path_wrapper('open', _open) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os, name): + locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os, name): + locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) + if self._active: + return self._remap_output(name, retval) + return retval + + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os, name): + locals()[name] = _mk_query(name) + + def _validate_path(self, path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self, operation, path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation + '-from', src, *args, **kw), + self._remap_input(operation + '-to', dst, *args, **kw) + ) + + +if hasattr(os, 'devnull'): + _EXCEPTIONS = [os.devnull,] +else: + _EXCEPTIONS = [] + + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + _exception_patterns = [ + # Allow lib2to3 to attempt to save a pickled grammar object (#121) + r'.*lib2to3.*\.pickle$', + ] + "exempt writing to paths that match the pattern" + + def __init__(self, sandbox, exceptions=_EXCEPTIONS): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox, '') + self._exceptions = [ + os.path.normcase(os.path.realpath(path)) + for path in exceptions + ] + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + from setuptools.sandbox import SandboxViolation + raise SandboxViolation(operation, args, kw) + + if _file: + + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path, mode, *args, **kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path, mode, *args, **kw) + + def tmpnam(self): + self._violation("tmpnam") + + def _ok(self, path): + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + return ( + self._exempted(realpath) + or realpath == self._sandbox + or realpath.startswith(self._prefix) + ) + finally: + self._active = active + + def _exempted(self, filepath): + start_matches = ( + filepath.startswith(exception) + for exception in self._exceptions + ) + pattern_matches = ( + re.match(pattern, filepath) + for pattern in self._exception_patterns + ) + candidates = itertools.chain(start_matches, pattern_matches) + return any(candidates) + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src, dst) + + def open(self, file, flags, mode=0o777, *args, **kw): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode, *args, **kw) + return _os.open(file, flags, mode, *args, **kw) + + +WRITE_FLAGS = functools.reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + tmpl = textwrap.dedent(""" + SandboxViolation: {cmd}{args!r} {kwargs} + + The package setup script has attempted to modify files on your system + that are not within the EasyInstall build area, and has been aborted. + + This package cannot be safely installed by EasyInstall, and may not + support alternate installation locations even if you run its setup + script by hand. Please inform the package's author and the EasyInstall + maintainers to find out if a fix or workaround is available. + """).lstrip() + + def __str__(self): + cmd, args, kwargs = self.args + return self.tmpl.format(**locals()) + +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import Big5SMModel + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + MultiByteCharSetProber.__init__(self) + self._mCodingSM = CodingStateMachine(Big5SMModel) + self._mDistributionAnalyzer = Big5DistributionAnalysis() + self.reset() + + def get_charset_name(self): + return "Big5" + +# -*- coding: utf-8 -*- +""" + werkzeug.contrib.jsrouting + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Addon module that allows to create a JavaScript function from a map + that generates rules. + + :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +try: + from simplejson import dumps +except ImportError: + try: + from json import dumps + except ImportError: + def dumps(*args): + raise RuntimeError('simplejson required for jsrouting') + +from inspect import getmro +from werkzeug.routing import NumberConverter + + +def render_template(name_parts, rules, converters): + result = u'' + if name_parts: + for idx in xrange(0, len(name_parts) - 1): + name = u'.'.join(name_parts[:idx + 1]) + result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name) + result += '%s = ' % '.'.join(name_parts) + result += """(function (server_name, script_name, subdomain, url_scheme) { + var converters = %(converters)s; + var rules = $rules; + function in_array(array, value) { + if (array.indexOf != undefined) { + return array.indexOf(value) != -1; + } + for (var i = 0; i < array.length; i++) { + if (array[i] == value) { + return true; + } + } + return false; + } + function array_diff(array1, array2) { + array1 = array1.slice(); + for (var i = array1.length-1; i >= 0; i--) { + if (in_array(array2, array1[i])) { + array1.splice(i, 1); + } + } + return array1; + } + function split_obj(obj) { + var names = []; + var values = []; + for (var name in obj) { + if (typeof(obj[name]) != 'function') { + names.push(name); + values.push(obj[name]); + } + } + return {names: names, values: values, original: obj}; + } + function suitable(rule, args) { + var default_args = split_obj(rule.defaults || {}); + var diff_arg_names = array_diff(rule.arguments, default_args.names); + + for (var i = 0; i < diff_arg_names.length; i++) { + if (!in_array(args.names, diff_arg_names[i])) { + return false; + } + } + + if (array_diff(rule.arguments, args.names).length == 0) { + if (rule.defaults == null) { + return true; + } + for (var i = 0; i < default_args.names.length; i++) { + var key = default_args.names[i]; + var value = default_args.values[i]; + if (value != args.original[key]) { + return false; + } + } + } + + return true; + } + function build(rule, args) { + var tmp = []; + var processed = rule.arguments.slice(); + for (var i = 0; i < rule.trace.length; i++) { + var part = rule.trace[i]; + if (part.is_dynamic) { + var converter = converters[rule.converters[part.data]]; + var data = converter(args.original[part.data]); + if (data == null) { + return null; + } + tmp.push(data); + processed.push(part.name); + } else { + tmp.push(part.data); + } + } + tmp = tmp.join(''); + var pipe = tmp.indexOf('|'); + var subdomain = tmp.substring(0, pipe); + var url = tmp.substring(pipe+1); + + var unprocessed = array_diff(args.names, processed); + var first_query_var = true; + for (var i = 0; i < unprocessed.length; i++) { + if (first_query_var) { + url += '?'; + } else { + url += '&'; + } + first_query_var = false; + url += encodeURIComponent(unprocessed[i]); + url += '='; + url += encodeURIComponent(args.original[unprocessed[i]]); + } + return {subdomain: subdomain, path: url}; + } + function lstrip(s, c) { + while (s && s.substring(0, 1) == c) { + s = s.substring(1); + } + return s; + } + function rstrip(s, c) { + while (s && s.substring(s.length-1, s.length) == c) { + s = s.substring(0, s.length-1); + } + return s; + } + return function(endpoint, args, force_external) { + args = split_obj(args); + var rv = null; + for (var i = 0; i < rules.length; i++) { + var rule = rules[i]; + if (rule.endpoint != endpoint) continue; + if (suitable(rule, args)) { + rv = build(rule, args); + if (rv != null) { + break; + } + } + } + if (rv == null) { + return null; + } + if (!force_external && rv.subdomain == subdomain) { + return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/'); + } else { + return url_scheme + '://' + + (rv.subdomain ? rv.subdomain + '.' : '') + + server_name + rstrip(script_name, '/') + + '/' + lstrip(rv.path, '/'); + } + }; +})""" % {'converters': u', '.join(converters)} + return result + + +def generate_map(map, name='url_map'): + """ + Generates a JavaScript function containing the rules defined in + this map, to be used with a MapAdapter's generate_javascript + method. If you don't pass a name the returned JavaScript code is + an expression that returns a function. Otherwise it's a standalone + script that assigns the function with that name. Dotted names are + resolved (so you an use a name like 'obj.url_for') + + In order to use JavaScript generation, simplejson must be installed. + + Note that using this feature will expose the rules + defined in your map to users. If your rules contain sensitive + information, don't use JavaScript generation! + """ + map.update() + rules = [] + converters = [] + for rule in map.iter_rules(): + trace = [{ + 'is_dynamic': is_dynamic, + 'data': data + } for is_dynamic, data in rule._trace] + rule_converters = {} + for key, converter in rule._converters.iteritems(): + js_func = js_to_url_function(converter) + try: + index = converters.index(js_func) + except ValueError: + converters.append(js_func) + index = len(converters) - 1 + rule_converters[key] = index + rules.append({ + u'endpoint': rule.endpoint, + u'arguments': list(rule.arguments), + u'converters': rule_converters, + u'trace': trace, + u'defaults': rule.defaults + }) + + return render_template(name_parts=name and name.split('.') or [], + rules=dumps(rules), + converters=converters) + + +def generate_adapter(adapter, name='url_for', map_name='url_map'): + """Generates the url building function for a map.""" + values = { + u'server_name': dumps(adapter.server_name), + u'script_name': dumps(adapter.script_name), + u'subdomain': dumps(adapter.subdomain), + u'url_scheme': dumps(adapter.url_scheme), + u'name': name, + u'map_name': map_name + } + return u'''\ +var %(name)s = %(map_name)s( + %(server_name)s, + %(script_name)s, + %(subdomain)s, + %(url_scheme)s +);''' % values + + +def js_to_url_function(converter): + """Get the JavaScript converter function from a rule.""" + if hasattr(converter, 'js_to_url_function'): + data = converter.js_to_url_function() + else: + for cls in getmro(type(converter)): + if cls in js_to_url_functions: + data = js_to_url_functions[cls](converter) + break + else: + return 'encodeURIComponent' + return '(function(value) { %s })' % data + + +def NumberConverter_js_to_url(conv): + if conv.fixed_digits: + return u'''\ +var result = value.toString(); +while (result.length < %s) + result = '0' + result; +return result;''' % conv.fixed_digits + return u'return value.toString();' + + +js_to_url_functions = { + NumberConverter: NumberConverter_js_to_url +} + +import pygtk +import gtk +import pango +import cairo +import pangocairo +import logging +import math + +import utility +from amirconfig import config + +class PrintReport: + def __init__(self, content, cols_width, heading=None): +# self.lines_per_page = 24 + self.cell_margin = 4 + self.line = 2 #the thinest possible width of lines. + self.row_height = 2 * (config.contentfont + self.cell_margin) + self.header_height = 0 + self.heading_height = 35 + + self.operation = gtk.PrintOperation() + settings = gtk.PrintSettings() + paper_size = gtk.paper_size_new_from_ppd(config.paper_ppd, config.paper_name, config.paper_width, config.paper_height) + self.page_setup = gtk.PageSetup() + self.page_setup.set_paper_size(paper_size) + self.page_setup.set_orientation(config.paper_orientation) +# self.page_setup = gtk.print_run_page_setup_dialog(None, self.page_setup, settings) + + self.page_setup.set_top_margin(config.topmargin, gtk.UNIT_POINTS) + self.page_setup.set_bottom_margin(config.botmargin, gtk.UNIT_POINTS) + self.page_setup.set_right_margin(config.rightmargin, gtk.UNIT_POINTS) + self.page_setup.set_left_margin(config.leftmargin, gtk.UNIT_POINTS) + + self.operation.set_default_page_setup(self.page_setup) + self.operation.set_unit(gtk.UNIT_POINTS) + + self.content = content + tablewidth = self.page_setup.get_page_width(gtk.UNIT_POINTS) + tablewidth -= (len(cols_width) * (self.line + self.cell_margin)) + self.line + (config.rightmargin + config.leftmargin) + self.cols_width = [] + for percent in cols_width: + self.cols_width.append(math.floor((percent * tablewidth) / 100)) +# self.cols_width = cols_width + self.heading = heading + + self.operation.connect("begin_print", self.beginPrint) + self.operation.connect("draw-page", self.printPage) + self.type = 0 + self.title = "" + self.fields = {} + + ##self.content = data + def setHeader (self, title, fields): + self.title = title + self.fields = fields + + def beginPrint(self, operation, context): + tableheight = self.page_setup.get_page_height(gtk.UNIT_POINTS) + name_lineheight = 2 * config.namefont + header_lineheight = 2 * config.headerfont + tableheight -= (math.floor((len(self.fields) + 1) / 2) * header_lineheight) + (config.topmargin + config.botmargin) + self.heading_height + name_lineheight + (self.cell_margin * 2) + + self.lines_per_page = int(math.floor(tableheight / self.row_height)) + #Subtract two lines that show "Sum of previous page" and "Sum" + self.lines_per_page -= 2 + + pages = ((len(self.content) - 1) / self.lines_per_page ) + 1 + operation.set_n_pages(pages) + + def doPrintJob(self, action): + self.operation.run(action) + + def printPage(self, operation, context, page_nr): + self.pangolayout = context.create_pango_layout() + self.cairo_context = context.get_cairo_context() + + self.pangolayout.set_width(-1) + self.pangocairo = pangocairo.CairoContext(self.cairo_context) + + self.formatHeader() + getattr(self, self.drawfunction)(page_nr) + #self.drawDailyNotebook(page_nr) + + def formatHeader(self): + LINE_HEIGHT = 2 * (config.namefont) +# MARGIN = self.page_margin +# cwidth = context.get_width() + cwidth = self.page_setup.get_page_width(gtk.UNIT_POINTS) + logging.info("Paper width: " + str(cwidth)) + cr = self.cairo_context + + fontsize = config.namefont + fdesc = pango.FontDescription("Sans") + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + + if self.title != "": + self.pangolayout.set_text(self.title) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_CENTER) + cr.move_to ((cwidth - width / pango.SCALE) / 2, (LINE_HEIGHT - (height/ pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + +# cr.move_to((cwidth + width / pango.SCALE) / 2, LINE_HEIGHT + config.topmargin) +# cr.line_to((cwidth - width / pango.SCALE) / 2, LINE_HEIGHT + config.topmargin) + cr.move_to((cwidth + width / pango.SCALE) / 2, LINE_HEIGHT + self.cell_margin) + cr.line_to((cwidth - width / pango.SCALE) / 2, LINE_HEIGHT + self.cell_margin) + + + addh = LINE_HEIGHT + self.cell_margin + LINE_HEIGHT = 2 * config.headerfont + fontsize = config.headerfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + + flag = 1 + for k,v in self.fields.items(): + self.pangolayout.set_text(k + ": " + v) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_CENTER) + if flag == 1: + addh += LINE_HEIGHT + cr.move_to (cwidth - (width / pango.SCALE) - config.rightmargin, addh - (height/ pango.SCALE)/2) + flag = 0 + else: + cr.move_to ((width / pango.SCALE) + config.leftmargin, addh - (height/ pango.SCALE)/2) + flag = 1 + self.pangocairo.show_layout(self.pangolayout) + + cr.stroke() + self.header_height = addh + 8 + + + def drawDailyNotebook(self, page_nr): +# RIGHT_EDGE = 570 #(table width + PAGE_MARGIN) + RIGHT_EDGE = self.page_setup.get_page_width(gtk.UNIT_POINTS) - config.rightmargin + HEADER_HEIGHT = self.header_height + HEADING_HEIGHT = self.heading_height +# PAGE_MARGIN = self.page_margin + MARGIN = self.cell_margin + TABLE_TOP = HEADER_HEIGHT + HEADING_HEIGHT + self.cell_margin + ROW_HEIGHT = self.row_height + LINE = self.line + + cr = self.cairo_context + fontsize = config.contentfont + fdesc = pango.FontDescription("Sans") + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + +# #Table top line +# cr.move_to(PAGE_MARGIN, TABLE_TOP) +# cr.line_to(RIGHT_EDGE, TABLE_TOP) + + self.drawTableHeading() + + #Draw table data + rindex = page_nr * self.lines_per_page + offset = 0 + + right_txt = RIGHT_EDGE + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + self.pangolayout.set_text("----") + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + + for i in range(0, 3): + right_txt -= MARGIN + LINE + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[i] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(_("Sum of previous page")) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[3] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + if page_nr == 0: + self.pangolayout.set_text(utility.showNumber(0)) + self.debt_sum = 0 + else: + self.pangolayout.set_text(utility.showNumber(self.debt_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[4] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + if page_nr == 0: + self.pangolayout.set_text(utility.showNumber(0)) + self.credit_sum = 0 + else: + self.pangolayout.set_text(utility.showNumber(self.credit_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[5] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + addh= ROW_HEIGHT + TABLE_TOP + try: + while (offset < self.lines_per_page): + row = self.content[rindex + offset] + + cr.move_to(RIGHT_EDGE, addh) + cr.line_to(RIGHT_EDGE, addh+ROW_HEIGHT) + + right_txt = RIGHT_EDGE + dindex = 0 + for data in row: + right_txt -= MARGIN+LINE + if dindex == 3: + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + else: + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + + right_txt -= self.cols_width[dindex] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + dindex += 1 + + self.debt_sum += int(row[4].replace(",", "")) + self.credit_sum += int(row[5].replace(",", "")) + + addh += ROW_HEIGHT + offset += 1 + except IndexError: + pass + + right_txt = RIGHT_EDGE + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + self.pangolayout.set_text("----") + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + + for i in range(0, 3): + right_txt -= MARGIN + LINE + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[i] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(_("Sum")) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[3] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(utility.showNumber(self.debt_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[4] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(utility.showNumber(self.credit_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[5] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + #Table top line + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(RIGHT_EDGE, TABLE_TOP) + + #Table bottom line + cr.move_to(right_txt, addh + ROW_HEIGHT) + cr.line_to(RIGHT_EDGE, addh + ROW_HEIGHT) + + cr.stroke() + + def drawSubjectNotebook(self, page_nr): +# RIGHT_EDGE = 570 #(table width + PAGE_MARGIN) + RIGHT_EDGE = self.page_setup.get_page_width(gtk.UNIT_POINTS) - config.rightmargin + HEADER_HEIGHT = self.header_height + HEADING_HEIGHT = self.heading_height +# PAGE_MARGIN = self.page_margin + MARGIN = self.cell_margin + TABLE_TOP = HEADER_HEIGHT + HEADING_HEIGHT + self.cell_margin + ROW_HEIGHT = self.row_height + LINE = self.line + + cr = self.cairo_context + fontsize = config.contentfont + fdesc = pango.FontDescription("Sans") + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + +# #Table top line +# cr.move_to(PAGE_MARGIN, TABLE_TOP) +# cr.line_to(RIGHT_EDGE, TABLE_TOP) + + self.drawTableHeading() + + #Draw table data + rindex = page_nr * self.lines_per_page + offset = 0 + + right_txt = RIGHT_EDGE + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + self.pangolayout.set_text("----") + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + + for i in range(0, 2): + right_txt -= MARGIN + LINE + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[i] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(_("Sum of previous page")) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[2] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + if page_nr == 0: + self.pangolayout.set_text(utility.showNumber(0)) + self.debt_sum = 0 + else: + self.pangolayout.set_text(utility.showNumber(self.debt_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[3] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + if page_nr == 0: + self.pangolayout.set_text(utility.showNumber(0)) + self.credit_sum = 0 + else: + self.pangolayout.set_text(utility.showNumber(self.credit_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[4] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + if page_nr == 0: + remaining = int(self.content[0][3].replace(",", "")) - int(self.content[0][4].replace(",", "")) + if self.content[0][5] == _("deb"): + remaining -= int(self.content[0][6].replace(",", "")) + else: + remaining += int(self.content[0][6].replace(",", "")) + if remaining < 0: + self.diagnose = _("deb") + self.remaining = utility.showNumber(-(remaining)) + else: + if remaining == 0: + self.diagnose = _("equ") + else: + self.diagnose = _("cre") + self.remaining = utility.showNumber(remaining) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(self.diagnose) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[5] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(self.remaining) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), TABLE_TOP + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[6] + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(right_txt, TABLE_TOP + ROW_HEIGHT) + + addh= ROW_HEIGHT + TABLE_TOP + try: + while (offset < self.lines_per_page): + row = self.content[rindex + offset] + + cr.move_to(RIGHT_EDGE, addh) + cr.line_to(RIGHT_EDGE, addh+ROW_HEIGHT) + + right_txt = RIGHT_EDGE + dindex = 0 + for data in row: + right_txt -= MARGIN+LINE + if dindex == 2: + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + else: + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[dindex] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + dindex += 1 + + self.debt_sum += int(row[3].replace(",", "")) + self.credit_sum += int(row[4].replace(",", "")) + + addh += ROW_HEIGHT + offset += 1 + + except IndexError: + pass + + self.diagnose = self.content[rindex + offset - 1][5] + self.remaining = self.content[rindex + offset - 1][6] + + right_txt = RIGHT_EDGE + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + self.pangolayout.set_text("----") + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + + for i in range(0, 2): + right_txt -= MARGIN + LINE + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[i] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(_("Sum")) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[2] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(utility.showNumber(self.debt_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[3] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(utility.showNumber(self.credit_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[4] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(self.diagnose) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[5] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(self.remaining) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[6] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + #Table top line + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(RIGHT_EDGE, TABLE_TOP) + + #Table bottom line +# cr.move_to(self.page_margin, addh + ROW_HEIGHT) + cr.move_to(right_txt, addh + ROW_HEIGHT) + cr.line_to(RIGHT_EDGE, addh + ROW_HEIGHT) + + cr.stroke() + + def drawDocument(self, page_nr): +# RIGHT_EDGE = 570 #(table width + PAGE_MARGIN) + RIGHT_EDGE = self.page_setup.get_page_width(gtk.UNIT_POINTS) - config.rightmargin + HEADER_HEIGHT = self.header_height + HEADING_HEIGHT = self.heading_height +# PAGE_MARGIN = self.page_margin + MARGIN = self.cell_margin + TABLE_TOP = HEADER_HEIGHT + HEADING_HEIGHT + self.cell_margin + ROW_HEIGHT = self.row_height + LINE = self.line + + cr = self.cairo_context + fontsize = config.contentfont + fdesc = pango.FontDescription("Sans") + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + +# #Table top line +# cr.move_to(PAGE_MARGIN, TABLE_TOP) +# cr.line_to(RIGHT_EDGE, TABLE_TOP) + + self.drawTableHeading() + + #Draw table data + rindex = page_nr * self.lines_per_page + offset = 0 + + self.debt_sum = 0 + self.credit_sum = 0 + + addh= TABLE_TOP + try: + while (offset < self.lines_per_page): + row = self.content[rindex + offset] + + cr.move_to(RIGHT_EDGE, addh) + cr.line_to(RIGHT_EDGE, addh+ROW_HEIGHT) + + right_txt = RIGHT_EDGE + dindex = 0 + for data in row: + right_txt -= MARGIN+LINE + if dindex == 2 or dindex == 3: + fontsize -= 1 + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + fontsize = config.contentfont + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + else: + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + + right_txt -= self.cols_width[dindex] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + dindex += 1 + + self.debt_sum += int(row[4].replace(",", "")) + self.credit_sum += int(row[5].replace(",", "")) + + addh += ROW_HEIGHT + offset += 1 + except IndexError: + pass + + right_txt = RIGHT_EDGE + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= 4*(MARGIN + LINE) + self.cols_width[0] + self.cols_width[1] + self.cols_width[2] + self.pangolayout.set_text(_("Sum")) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[3] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + cr.move_to(RIGHT_EDGE, addh) + cr.line_to(right_txt, addh) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(utility.showNumber(self.debt_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[4] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + right_txt -= MARGIN + LINE + self.pangolayout.set_text(utility.showNumber(self.credit_sum)) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + right_txt -= self.cols_width[5] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + + #Table top line + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(RIGHT_EDGE, TABLE_TOP) + + #Table bottom line + cr.move_to(right_txt, addh + ROW_HEIGHT) + cr.line_to(RIGHT_EDGE, addh + ROW_HEIGHT) + + cr.stroke() + + def drawTrialReport(self, page_nr): + RIGHT_EDGE = self.page_setup.get_page_width(gtk.UNIT_POINTS) - config.rightmargin + HEADER_HEIGHT = self.header_height + HEADING_HEIGHT = self.heading_height + MARGIN = self.cell_margin + TABLE_TOP = HEADER_HEIGHT + HEADING_HEIGHT + self.cell_margin + ROW_HEIGHT = self.row_height + LINE = self.line + + cr = self.cairo_context + fontsize = config.contentfont + fdesc = pango.FontDescription("Sans") + fdesc.set_size(fontsize * pango.SCALE) + self.pangolayout.set_font_description(fdesc) + + self.drawTableHeading() + + #Draw table data + rindex = page_nr * self.lines_per_page + offset = 0 + addh= TABLE_TOP + + try: + while (offset < self.lines_per_page): + row = self.content[rindex + offset] + + cr.move_to(RIGHT_EDGE, addh) + cr.line_to(RIGHT_EDGE, addh+ROW_HEIGHT) + + right_txt = RIGHT_EDGE + dindex = 0 + for data in row: + right_txt -= MARGIN+LINE + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), addh + (ROW_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + + right_txt -= self.cols_width[dindex] + cr.move_to(right_txt, addh) + cr.line_to(right_txt, addh + ROW_HEIGHT) + dindex += 1 + + addh += ROW_HEIGHT + offset += 1 + except IndexError: + pass + + #Table top line + cr.move_to(right_txt, TABLE_TOP) + cr.line_to(RIGHT_EDGE, TABLE_TOP) + + #Table bottom line + cr.move_to(right_txt, addh) + cr.line_to(RIGHT_EDGE, addh) + + cr.stroke() + + def setDrawFunction(self, func): + self.drawfunction = func + + def drawTableHeading(self): +# RIGHT_EDGE = 570 #(table width + PAGE_MARGIN) + RIGHT_EDGE = self.page_setup.get_page_width(gtk.UNIT_POINTS) - config.rightmargin + HEADING_HEIGHT = self.heading_height + MARGIN = self.cell_margin + LINE = self.line + + cr = self.cairo_context + + htop = self.header_height + MARGIN +# #Heading top line +# cr.move_to(self.page_margin, htop) +# cr.line_to(RIGHT_EDGE, htop) + + cr.move_to(RIGHT_EDGE, htop) + cr.line_to(RIGHT_EDGE, htop + HEADING_HEIGHT) + + #Draw table headings + right_txt = RIGHT_EDGE + dindex = 0 + for data in self.heading: + right_txt -= MARGIN+LINE + self.pangolayout.set_text(data) + (width, height) = self.pangolayout.get_size() + if (width / pango.SCALE) > self.cols_width[dindex]: + res = data.split() + self.pangolayout.set_text(res[0]) + (width, height) = self.pangolayout.get_size() + if (width / pango.SCALE) < self.cols_width[dindex]: + #self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), htop + (HEADING_HEIGHT/2-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + # + self.pangolayout.set_text(res[1]) + (width, height) = self.pangolayout.get_size() + #self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), htop + ((HEADING_HEIGHT*3)/2-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + else: + #self.pangolayout.set_alignment(pango.ALIGN_RIGHT) + cr.move_to (right_txt -(width / pango.SCALE), htop + (HEADING_HEIGHT-(height / pango.SCALE))/2) + self.pangocairo.show_layout(self.pangolayout) + + right_txt -= self.cols_width[dindex] + cr.move_to(right_txt, htop) + cr.line_to(right_txt, htop + HEADING_HEIGHT) + + dindex += 1 + + #Heading top line + cr.move_to(right_txt, htop) + cr.line_to(RIGHT_EDGE, htop) + +# def dailySpecific(self, pos, page): +# pass +# +# def subjectSpecific(self, pos, page): +# pass +# +# def docSpecific(self, pos, page): +# pass + +# Copyright 2013 Dean Gardiner +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from logr import Logr +from caper import CaperClosure, CaperFragment +from caper.helpers import clean_dict +from caper.result import CaperFragmentNode, CaperClosureNode +from caper.step import CaptureStep +from caper.constraint import CaptureConstraint + + +class CaptureGroup(object): + def __init__(self, parser, result): + """Capture group object + + :type parser: caper.parsers.base.Parser + :type result: caper.result.CaperResult + """ + + self.parser = parser + self.result = result + + #: @type: list of CaptureStep + self.steps = [] + + #: type: str + self.step_source = None + + #: @type: list of CaptureConstraint + self.pre_constraints = [] + + #: :type: list of CaptureConstraint + self.post_constraints = [] + + def capture_fragment(self, tag, regex=None, func=None, single=True, **kwargs): + Logr.debug('capture_fragment("%s", "%s", %s, %s)', tag, regex, func, single) + + if self.step_source != 'fragment': + if self.step_source is None: + self.step_source = 'fragment' + else: + raise ValueError("Unable to mix fragment and closure capturing in a group") + + self.steps.append(CaptureStep( + self, tag, + 'fragment', + regex=regex, + func=func, + single=single, + **kwargs + )) + + return self + + def capture_closure(self, tag, regex=None, func=None, single=True, **kwargs): + Logr.debug('capture_closure("%s", "%s", %s, %s)', tag, regex, func, single) + + if self.step_source != 'closure': + if self.step_source is None: + self.step_source = 'closure' + else: + raise ValueError("Unable to mix fragment and closure capturing in a group") + + self.steps.append(CaptureStep( + self, tag, + 'closure', + regex=regex, + func=func, + single=single, + **kwargs + )) + + return self + + def until_closure(self, **kwargs): + self.pre_constraints.append(CaptureConstraint(self, 'match', target='closure', **kwargs)) + + return self + + def until_fragment(self, **kwargs): + self.pre_constraints.append(CaptureConstraint(self, 'match', target='fragment', **kwargs)) + + return self + + def until_result(self, **kwargs): + self.pre_constraints.append(CaptureConstraint(self, 'result', **kwargs)) + + return self + + def until_failure(self, **kwargs): + self.post_constraints.append(CaptureConstraint(self, 'failure', **kwargs)) + + return self + + def until_success(self, **kwargs): + self.post_constraints.append(CaptureConstraint(self, 'success', **kwargs)) + + return self + + def parse_subject(self, parent_head, subject): + Logr.debug("parse_subject (%s) subject: %s", self.step_source, repr(subject)) + + if type(subject) is CaperClosure: + return self.parse_closure(parent_head, subject) + + if type(subject) is CaperFragment: + return self.parse_fragment(parent_head, subject) + + raise ValueError('Unknown subject (%s)', subject) + + def parse_fragment(self, parent_head, subject): + parent_node = parent_head[0] if type(parent_head) is list else parent_head + + nodes, match = self.match(parent_head, parent_node, subject) + + # Capturing broke on constraint, return now + if not match: + return nodes + + Logr.debug('created fragment node with subject.value: "%s"' % subject.value) + + result = [CaperFragmentNode( + parent_node.closure, + subject.take_right(match.num_fragments), + parent_head, + match + )] + + # Branch if the match was indefinite (weight below 1.0) + if match.result and match.weight < 1.0: + if match.num_fragments == 1: + result.append(CaperFragmentNode(parent_node.closure, [subject], parent_head)) + else: + nodes.append(CaperFragmentNode(parent_node.closure, [subject], parent_head)) + + nodes.append(result[0] if len(result) == 1 else result) + + return nodes + + def parse_closure(self, parent_head, subject): + parent_node = parent_head[0] if type(parent_head) is list else parent_head + + nodes, match = self.match(parent_head, parent_node, subject) + + # Capturing broke on constraint, return now + if not match: + return nodes + + Logr.debug('created closure node with subject.value: "%s"' % subject.value) + + result = [CaperClosureNode( + subject, + parent_head, + match + )] + + # Branch if the match was indefinite (weight below 1.0) + if match.result and match.weight < 1.0: + if match.num_fragments == 1: + result.append(CaperClosureNode(subject, parent_head)) + else: + nodes.append(CaperClosureNode(subject, parent_head)) + + nodes.append(result[0] if len(result) == 1 else result) + + return nodes + + def match(self, parent_head, parent_node, subject): + nodes = [] + + # Check pre constaints + broke, definite = self.check_constraints(self.pre_constraints, parent_head, subject) + + if broke: + nodes.append(parent_head) + + if definite: + return nodes, None + + # Try match subject against the steps available + match = None + + for step in self.steps: + if step.source == 'closure' and type(subject) is not CaperClosure: + pass + elif step.source == 'fragment' and type(subject) is CaperClosure: + Logr.debug('Closure encountered on fragment step, jumping into fragments') + return [CaperClosureNode(subject, parent_head, None)], None + + match = step.execute(subject) + + if match.success: + if type(match.result) is dict: + match.result = clean_dict(match.result) + + Logr.debug('Found match with weight %s, match: %s, num_fragments: %s' % ( + match.weight, match.result, match.num_fragments + )) + + step.matched = True + + break + + if all([step.single and step.matched for step in self.steps]): + Logr.debug('All steps completed, group finished') + parent_node.finished_groups.append(self) + return nodes, match + + # Check post constraints + broke, definite = self.check_constraints(self.post_constraints, parent_head, subject, match=match) + if broke: + return nodes, None + + return nodes, match + + def check_constraints(self, constraints, parent_head, subject, **kwargs): + parent_node = parent_head[0] if type(parent_head) is list else parent_head + + # Check constraints + for constraint in [c for c in constraints if c.target == subject.__key__ or not c.target]: + Logr.debug("Testing constraint %s against subject %s", repr(constraint), repr(subject)) + + weight, success = constraint.execute(parent_node, subject, **kwargs) + + if success: + Logr.debug('capturing broke on "%s" at %s', subject.value, constraint) + parent_node.finished_groups.append(self) + + return True, weight == 1.0 + + return False, None + + def execute(self): + heads_finished = None + + while heads_finished is None or not (len(heads_finished) == len(self.result.heads) and all(heads_finished)): + heads_finished = [] + + heads = self.result.heads + self.result.heads = [] + + for head in heads: + node = head[0] if type(head) is list else head + + if self in node.finished_groups: + Logr.debug("head finished for group") + self.result.heads.append(head) + heads_finished.append(True) + continue + + Logr.debug('') + + Logr.debug(node) + + next_subject = node.next() + + Logr.debug('----------[%s] (%s)----------' % (next_subject, repr(next_subject.value) if next_subject else None)) + + if next_subject: + for node_result in self.parse_subject(head, next_subject): + self.result.heads.append(node_result) + + Logr.debug('Heads: %s', self.result.heads) + + heads_finished.append(self in node.finished_groups or next_subject is None) + + if len(self.result.heads) == 0: + self.result.heads = heads + + Logr.debug("heads_finished: %s, self.result.heads: %s", heads_finished, self.result.heads) + + Logr.debug("group finished") + +""" +============================================= +Whitening evoked data with a noise covariance +============================================= + +Evoked data are loaded and then whitened using a given noise covariance +matrix. It's an excellent quality check to see if baseline signals match +the assumption of Gaussian white noise during the baseline period. + +Covariance estimation and diagnostic plots are based on [1]_. + +References +---------- +.. [1] Engemann D. and Gramfort A. (2015) Automated model selection in + covariance estimation and spatial whitening of MEG and EEG signals, vol. + 108, 328-342, NeuroImage. + +""" +# Authors: Alexandre Gramfort +# Denis A. Engemann +# +# License: BSD (3-clause) + +import mne + +from mne import io +from mne.datasets import sample +from mne.cov import compute_covariance + +print(__doc__) + +############################################################################### +# Set parameters + +data_path = sample.data_path() +raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif' +event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif' + +raw = io.read_raw_fif(raw_fname, preload=True) +raw.filter(1, 40, n_jobs=1, fir_design='firwin') +raw.info['bads'] += ['MEG 2443'] # bads + 1 more +events = mne.read_events(event_fname) + +# let's look at rare events, button presses +event_id, tmin, tmax = 2, -0.2, 0.5 +reject = dict(mag=4e-12, grad=4000e-13, eeg=80e-6) + +epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=('meg', 'eeg'), + baseline=None, reject=reject, preload=True) + +# Uncomment next line to use fewer samples and study regularization effects +# epochs = epochs[:20] # For your data, use as many samples as you can! + +############################################################################### +# Compute covariance using automated regularization +method_params = dict(diagonal_fixed=dict(mag=0.01, grad=0.01, eeg=0.01)) +noise_covs = compute_covariance(epochs, tmin=None, tmax=0, method='auto', + return_estimators=True, verbose=True, n_jobs=1, + projs=None, rank=None, + method_params=method_params) + +# With "return_estimator=True" all estimated covariances sorted +# by log-likelihood are returned. + +print('Covariance estimates sorted from best to worst') +for c in noise_covs: + print("%s : %s" % (c['method'], c['loglik'])) + +############################################################################### +# Show the evoked data: + +evoked = epochs.average() + +evoked.plot(time_unit='s') # plot evoked response + +############################################################################### +# We can then show whitening for our various noise covariance estimates. +# +# Here we should look to see if baseline signals match the +# assumption of Gaussian white noise. we expect values centered at +# 0 within 2 standard deviations for 95% of the time points. +# +# For the Global field power we expect a value of 1. + +evoked.plot_white(noise_covs, time_unit='s') + +"""Conan recipe package for libsolace +""" +from conans import CMake, ConanFile +from conans.errors import ConanInvalidConfiguration +from conans.model.version import Version + + +class LibsolaceConan(ConanFile): + name = "libsolace" + description = "High performance components for mission critical applications" + license = "Apache-2.0" + author = "Ivan Ryabov " + url = "https://github.com/abbyssoul/conan-%s.git" % name + homepage = "https://github.com/abbyssoul/%s" % name + topics = ("HPC", "High reliability", "P10", "solace", "performance", "c++", "conan") + + settings = "os", "compiler", "build_type", "arch" + options = { + "shared": [True, False], + "fPIC": [True, False] + } + default_options = {"shared": False, "fPIC": True} + generators = "cmake" + build_requires = "gtest/1.10.0" + + scm = { + "type": "git", + "subfolder": name, + "url": "auto", + "revision": "auto" + } + + @property + def _supported_cppstd(self): + return ["17", "gnu17", "20", "gnu20"] + + @property + def _source_subfolder(self): + return self.name + + def config_options(self): + compiler_version = Version(str(self.settings.compiler.version)) + + if self.settings.os == "Windows": + del self.options.fPIC + # Exclude compilers that claims to support C++17 but do not in practice + if (self.settings.compiler == "gcc" and compiler_version < "7") or \ + (self.settings.compiler == "clang" and compiler_version < "5") or \ + (self.settings.compiler == "apple-clang" and compiler_version < "9"): + raise ConanInvalidConfiguration("This library requires C++17 or higher support standard. {} {} is not supported".format(self.settings.compiler, self.settings.compiler.version)) + if self.settings.compiler.cppstd and not self.settings.compiler.cppstd in self._supported_cppstd: + raise ConanInvalidConfiguration("This library requires c++17 standard or higher. {} required".format(self.settings.compiler.cppstd)) + + def _configure_cmake(self): + cmake = CMake(self, parallel=True) + cmake.definitions["PKG_CONFIG"] = "OFF" + cmake.configure(source_folder=self._source_subfolder) + return cmake + + def build(self): + cmake = self._configure_cmake() + cmake.build() + + def package(self): + cmake = self._configure_cmake() + cmake.install() + self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder) + + def package_info(self): + self.cpp_info.libs = ["solace"] + if self.settings.os == "Linux": + self.cpp_info.libs.append("m") + +from django.utils.encoding import smart_str, smart_unicode + +def ssn_check_digit(value): + "Calculate Italian social security number check digit." + ssn_even_chars = { + '0': 0, '1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, + '9': 9, 'A': 0, 'B': 1, 'C': 2, 'D': 3, 'E': 4, 'F': 5, 'G': 6, 'H': 7, + 'I': 8, 'J': 9, 'K': 10, 'L': 11, 'M': 12, 'N': 13, 'O': 14, 'P': 15, + 'Q': 16, 'R': 17, 'S': 18, 'T': 19, 'U': 20, 'V': 21, 'W': 22, 'X': 23, + 'Y': 24, 'Z': 25 + } + ssn_odd_chars = { + '0': 1, '1': 0, '2': 5, '3': 7, '4': 9, '5': 13, '6': 15, '7': 17, '8': + 19, '9': 21, 'A': 1, 'B': 0, 'C': 5, 'D': 7, 'E': 9, 'F': 13, 'G': 15, + 'H': 17, 'I': 19, 'J': 21, 'K': 2, 'L': 4, 'M': 18, 'N': 20, 'O': 11, + 'P': 3, 'Q': 6, 'R': 8, 'S': 12, 'T': 14, 'U': 16, 'V': 10, 'W': 22, + 'X': 25, 'Y': 24, 'Z': 23 + } + # Chars from 'A' to 'Z' + ssn_check_digits = [chr(x) for x in range(65, 91)] + + ssn = value.upper() + total = 0 + for i in range(0, 15): + try: + if i % 2 == 0: + total += ssn_odd_chars[ssn[i]] + else: + total += ssn_even_chars[ssn[i]] + except KeyError: + msg = "Character '%(char)s' is not allowed." % {'char': ssn[i]} + raise ValueError(msg) + return ssn_check_digits[total % 26] + +def vat_number_check_digit(vat_number): + "Calculate Italian VAT number check digit." + normalized_vat_number = smart_str(vat_number).zfill(10) + total = 0 + for i in range(0, 10, 2): + total += int(normalized_vat_number[i]) + for i in range(1, 11, 2): + quotient , remainder = divmod(int(normalized_vat_number[i]) * 2, 10) + total += quotient + remainder + return smart_unicode((10 - total % 10) % 10) + +"""Text wrapping and filling. +""" + +# Copyright (C) 1999-2001 Gregory P. Ward. +# Copyright (C) 2002, 2003 Python Software Foundation. +# Written by Greg Ward + +import re + +__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent'] + +# Hardcode the recognized whitespace characters to the US-ASCII +# whitespace characters. The main reason for doing this is that in +# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales +# that character winds up in string.whitespace. Respecting +# string.whitespace in those cases would 1) make textwrap treat 0xa0 the +# same as any other whitespace char, which is clearly wrong (it's a +# *non-breaking* space), 2) possibly cause problems with Unicode, +# since 0xa0 is not in range(128). +_whitespace = '\t\n\x0b\x0c\r ' + +class TextWrapper: + """ + Object for wrapping/filling text. The public interface consists of + the wrap() and fill() methods; the other methods are just there for + subclasses to override in order to tweak the default behaviour. + If you want to completely replace the main wrapping algorithm, + you'll probably have to override _wrap_chunks(). + + Several instance attributes control various aspects of wrapping: + width (default: 70) + the maximum width of wrapped lines (unless break_long_words + is false) + initial_indent (default: "") + string that will be prepended to the first line of wrapped + output. Counts towards the line's width. + subsequent_indent (default: "") + string that will be prepended to all lines save the first + of wrapped output; also counts towards each line's width. + expand_tabs (default: true) + Expand tabs in input text to spaces before further processing. + Each tab will become 0 .. 'tabsize' spaces, depending on its position + in its line. If false, each tab is treated as a single character. + tabsize (default: 8) + Expand tabs in input text to 0 .. 'tabsize' spaces, unless + 'expand_tabs' is false. + replace_whitespace (default: true) + Replace all whitespace characters in the input text by spaces + after tab expansion. Note that if expand_tabs is false and + replace_whitespace is true, every tab will be converted to a + single space! + fix_sentence_endings (default: false) + Ensure that sentence-ending punctuation is always followed + by two spaces. Off by default because the algorithm is + (unavoidably) imperfect. + break_long_words (default: true) + Break words longer than 'width'. If false, those words will not + be broken, and some lines might be longer than 'width'. + break_on_hyphens (default: true) + Allow breaking hyphenated words. If true, wrapping will occur + preferably on whitespaces and right after hyphens part of + compound words. + drop_whitespace (default: true) + Drop leading and trailing whitespace from lines. + """ + + unicode_whitespace_trans = {} + uspace = ord(' ') + for x in _whitespace: + unicode_whitespace_trans[ord(x)] = uspace + + # This funky little regex is just the trick for splitting + # text up into word-wrappable chunks. E.g. + # "Hello there -- you goof-ball, use the -b option!" + # splits into + # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option! + # (after stripping out empty strings). + wordsep_re = re.compile( + r'(\s+|' # any whitespace + r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words + r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash + + # This less funky little regex just split on recognized spaces. E.g. + # "Hello there -- you goof-ball, use the -b option!" + # splits into + # Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/ + wordsep_simple_re = re.compile(r'(\s+)') + + # XXX this is not locale- or charset-aware -- string.lowercase + # is US-ASCII only (and therefore English-only) + sentence_end_re = re.compile(r'[a-z]' # lowercase letter + r'[\.\!\?]' # sentence-ending punct. + r'[\"\']?' # optional end-of-quote + r'\Z') # end of chunk + + + def __init__(self, + width=70, + initial_indent="", + subsequent_indent="", + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True, + drop_whitespace=True, + break_on_hyphens=True, + tabsize=8): + self.width = width + self.initial_indent = initial_indent + self.subsequent_indent = subsequent_indent + self.expand_tabs = expand_tabs + self.replace_whitespace = replace_whitespace + self.fix_sentence_endings = fix_sentence_endings + self.break_long_words = break_long_words + self.drop_whitespace = drop_whitespace + self.break_on_hyphens = break_on_hyphens + self.tabsize = tabsize + + + # -- Private methods ----------------------------------------------- + # (possibly useful for subclasses to override) + + def _munge_whitespace(self, text): + """_munge_whitespace(text : string) -> string + + Munge whitespace in text: expand tabs and convert all other + whitespace characters to spaces. Eg. " foo\tbar\n\nbaz" + becomes " foo bar baz". + """ + if self.expand_tabs: + text = text.expandtabs(self.tabsize) + if self.replace_whitespace: + text = text.translate(self.unicode_whitespace_trans) + return text + + + def _split(self, text): + """_split(text : string) -> [string] + + Split the text to wrap into indivisible chunks. Chunks are + not quite the same as words; see _wrap_chunks() for full + details. As an example, the text + Look, goof-ball -- use the -b option! + breaks into the following chunks: + 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + if break_on_hyphens is True, or in: + 'Look,', ' ', 'goof-ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', option!' + otherwise. + """ + if self.break_on_hyphens is True: + chunks = self.wordsep_re.split(text) + else: + chunks = self.wordsep_simple_re.split(text) + chunks = [c for c in chunks if c] + return chunks + + def _fix_sentence_endings(self, chunks): + """_fix_sentence_endings(chunks : [string]) + + Correct for sentence endings buried in 'chunks'. Eg. when the + original text contains "... foo.\nBar ...", munge_whitespace() + and split() will convert that to [..., "foo.", " ", "Bar", ...] + which has one too few spaces; this method simply changes the one + space to two. + """ + i = 0 + patsearch = self.sentence_end_re.search + while i < len(chunks)-1: + if chunks[i+1] == " " and patsearch(chunks[i]): + chunks[i+1] = " " + i += 2 + else: + i += 1 + + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + """_handle_long_word(chunks : [string], + cur_line : [string], + cur_len : int, width : int) + + Handle a chunk of text (most likely a word, not whitespace) that + is too long to fit in any line. + """ + # Figure out when indent is larger than the specified width, and make + # sure at least one character is stripped off on every pass + if width < 1: + space_left = 1 + else: + space_left = width - cur_len + + # If we're allowed to break long words, then do so: put as much + # of the next chunk onto the current line as will fit. + if self.break_long_words: + cur_line.append(reversed_chunks[-1][:space_left]) + reversed_chunks[-1] = reversed_chunks[-1][space_left:] + + # Otherwise, we have to preserve the long word intact. Only add + # it to the current line if there's nothing already there -- + # that minimizes how much we violate the width constraint. + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + # If we're not allowed to break long words, and there's already + # text on the current line, do nothing. Next time through the + # main loop of _wrap_chunks(), we'll wind up here again, but + # cur_len will be zero, so the next line will be entirely + # devoted to the long word that we can't handle right now. + + def _wrap_chunks(self, chunks): + """_wrap_chunks(chunks : [string]) -> [string] + + Wrap a sequence of text chunks and return a list of lines of + length 'self.width' or less. (If 'break_long_words' is false, + some lines may be longer than this.) Chunks correspond roughly + to words and the whitespace between them: each chunk is + indivisible (modulo 'break_long_words'), but a line break can + come between any two chunks. Chunks should not have internal + whitespace; ie. a chunk is either all whitespace or a "word". + Whitespace chunks will be removed from the beginning and end of + lines, but apart from that whitespace is preserved. + """ + lines = [] + if self.width <= 0: + raise ValueError("invalid width %r (must be > 0)" % self.width) + + # Arrange in reverse order so items can be efficiently popped + # from a stack of chucks. + chunks.reverse() + + while chunks: + + # Start the list of chunks that will make up the current line. + # cur_len is just the length of all the chunks in cur_line. + cur_line = [] + cur_len = 0 + + # Figure out which static string will prefix this line. + if lines: + indent = self.subsequent_indent + else: + indent = self.initial_indent + + # Maximum width for this line. + width = self.width - len(indent) + + # First chunk on line is whitespace -- drop it, unless this + # is the very beginning of the text (ie. no lines started yet). + if self.drop_whitespace and chunks[-1].strip() == '' and lines: + del chunks[-1] + + while chunks: + l = len(chunks[-1]) + + # Can at least squeeze this chunk onto the current line. + if cur_len + l <= width: + cur_line.append(chunks.pop()) + cur_len += l + + # Nope, this line is full. + else: + break + + # The current line is full, and the next chunk is too big to + # fit on *any* line (not just this one). + if chunks and len(chunks[-1]) > width: + self._handle_long_word(chunks, cur_line, cur_len, width) + + # If the last chunk on this line is all whitespace, drop it. + if self.drop_whitespace and cur_line and cur_line[-1].strip() == '': + del cur_line[-1] + + # Convert current line back to a string and store it in list + # of all lines (return value). + if cur_line: + lines.append(indent + ''.join(cur_line)) + + return lines + + + # -- Public interface ---------------------------------------------- + + def wrap(self, text): + """wrap(text : string) -> [string] + + Reformat the single paragraph in 'text' so it fits in lines of + no more than 'self.width' columns, and return a list of wrapped + lines. Tabs in 'text' are expanded with string.expandtabs(), + and all other whitespace characters (including newline) are + converted to space. + """ + text = self._munge_whitespace(text) + chunks = self._split(text) + if self.fix_sentence_endings: + self._fix_sentence_endings(chunks) + return self._wrap_chunks(chunks) + + def fill(self, text): + """fill(text : string) -> string + + Reformat the single paragraph in 'text' to fit in lines of no + more than 'self.width' columns, and return a new string + containing the entire wrapped paragraph. + """ + return "\n".join(self.wrap(text)) + + +# -- Convenience interface --------------------------------------------- + +def wrap(text, width=70, **kwargs): + """Wrap a single paragraph of text, returning a list of wrapped lines. + + Reformat the single paragraph in 'text' so it fits in lines of no + more than 'width' columns, and return a list of wrapped lines. By + default, tabs in 'text' are expanded with string.expandtabs(), and + all other whitespace characters (including newline) are converted to + space. See TextWrapper class for available keyword args to customize + wrapping behaviour. + """ + w = TextWrapper(width=width, **kwargs) + return w.wrap(text) + +def fill(text, width=70, **kwargs): + """Fill a single paragraph of text, returning a new string. + + Reformat the single paragraph in 'text' to fit in lines of no more + than 'width' columns, and return a new string containing the entire + wrapped paragraph. As with wrap(), tabs are expanded and other + whitespace characters converted to space. See TextWrapper class for + available keyword args to customize wrapping behaviour. + """ + w = TextWrapper(width=width, **kwargs) + return w.fill(text) + + +# -- Loosely related functionality ------------------------------------- + +_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE) +_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE) + +def dedent(text): + """Remove any common leading whitespace from every line in `text`. + + This can be used to make triple-quoted strings line up with the left + edge of the display, while still presenting them in the source code + in indented form. + + Note that tabs and spaces are both treated as whitespace, but they + are not equal: the lines " hello" and "\thello" are + considered to have no common leading whitespace. (This behaviour is + new in Python 2.5; older versions of this module incorrectly + expanded tabs before searching for common leading whitespace.) + """ + # Look for the longest leading string of spaces and tabs common to + # all lines. + margin = None + text = _whitespace_only_re.sub('', text) + indents = _leading_whitespace_re.findall(text) + for indent in indents: + if margin is None: + margin = indent + + # Current line more deeply indented than previous winner: + # no change (previous winner is still on top). + elif indent.startswith(margin): + pass + + # Current line consistent with and no deeper than previous winner: + # it's the new winner. + elif margin.startswith(indent): + margin = indent + + # Current line and previous winner have no common whitespace: + # there is no margin. + else: + margin = "" + break + + # sanity check (testing/debugging only) + if 0 and margin: + for line in text.split("\n"): + assert not line or line.startswith(margin), \ + "line = %r, margin = %r" % (line, margin) + + if margin: + text = re.sub(r'(?m)^' + margin, '', text) + return text + + +def indent(text, prefix, predicate=None): + """Adds 'prefix' to the beginning of selected lines in 'text'. + + If 'predicate' is provided, 'prefix' will only be added to the lines + where 'predicate(line)' is True. If 'predicate' is not provided, + it will default to adding 'prefix' to all non-empty lines that do not + consist solely of whitespace characters. + """ + if predicate is None: + def predicate(line): + return line.strip() + + def prefixed_lines(): + for line in text.splitlines(True): + yield (prefix + line if predicate(line) else line) + return ''.join(prefixed_lines()) + + +if __name__ == "__main__": + #print dedent("\tfoo\n\tbar") + #print dedent(" \thello there\n \t how are you?") + print(dedent("Hello there.\n This is indented.")) + +"""Tests for classes defined in fields.py.""" + +import datetime +import unittest + +from django.utils.timezone import UTC + +from xmodule.fields import Date, Timedelta, RelativeTime +from xmodule.timeinfo import TimeInfo + + +class DateTest(unittest.TestCase): + date = Date() + + def compare_dates(self, dt1, dt2, expected_delta): + self.assertEqual( + dt1 - dt2, + expected_delta, + str(dt1) + "-" + str(dt2) + "!=" + str(expected_delta) + ) + + def test_from_json(self): + """Test conversion from iso compatible date strings to struct_time""" + self.compare_dates( + DateTest.date.from_json("2013-01-01"), + DateTest.date.from_json("2012-12-31"), + datetime.timedelta(days=1) + ) + self.compare_dates( + DateTest.date.from_json("2013-01-01T00"), + DateTest.date.from_json("2012-12-31T23"), + datetime.timedelta(hours=1) + ) + self.compare_dates( + DateTest.date.from_json("2013-01-01T00:00"), + DateTest.date.from_json("2012-12-31T23:59"), + datetime.timedelta(minutes=1) + ) + self.compare_dates( + DateTest.date.from_json("2013-01-01T00:00:00"), + DateTest.date.from_json("2012-12-31T23:59:59"), + datetime.timedelta(seconds=1) + ) + self.compare_dates( + DateTest.date.from_json("2013-01-01T00:00:00Z"), + DateTest.date.from_json("2012-12-31T23:59:59Z"), + datetime.timedelta(seconds=1) + ) + self.compare_dates( + DateTest.date.from_json("2012-12-31T23:00:01-01:00"), + DateTest.date.from_json("2013-01-01T00:00:00+01:00"), + datetime.timedelta(hours=1, seconds=1) + ) + + def test_enforce_type(self): + self.assertEqual(DateTest.date.enforce_type(None), None) + self.assertEqual(DateTest.date.enforce_type(""), None) + self.assertEqual( + DateTest.date.enforce_type("2012-12-31T23:00:01"), + datetime.datetime(2012, 12, 31, 23, 0, 1, tzinfo=UTC()) + ) + self.assertEqual( + DateTest.date.enforce_type(1234567890000), + datetime.datetime(2009, 2, 13, 23, 31, 30, tzinfo=UTC()) + ) + self.assertEqual( + DateTest.date.enforce_type(datetime.datetime(2014, 5, 9, 21, 1, 27, tzinfo=UTC())), + datetime.datetime(2014, 5, 9, 21, 1, 27, tzinfo=UTC()) + ) + with self.assertRaises(TypeError): + DateTest.date.enforce_type([1]) + + def test_return_None(self): + self.assertIsNone(DateTest.date.from_json("")) + self.assertIsNone(DateTest.date.from_json(None)) + with self.assertRaises(TypeError): + DateTest.date.from_json(['unknown value']) + + def test_old_due_date_format(self): + current = datetime.datetime.today() + self.assertEqual( + datetime.datetime(current.year, 3, 12, 12, tzinfo=UTC()), + DateTest.date.from_json("March 12 12:00") + ) + self.assertEqual( + datetime.datetime(current.year, 12, 4, 16, 30, tzinfo=UTC()), + DateTest.date.from_json("December 4 16:30") + ) + self.assertIsNone(DateTest.date.from_json("12 12:00")) + + def test_non_std_from_json(self): + """ + Test the non-standard args being passed to from_json + """ + now = datetime.datetime.now(UTC()) + delta = now - datetime.datetime.fromtimestamp(0, UTC()) + self.assertEqual( + DateTest.date.from_json(delta.total_seconds() * 1000), + now + ) + yesterday = datetime.datetime.now(UTC()) - datetime.timedelta(days=-1) + self.assertEqual(DateTest.date.from_json(yesterday), yesterday) + + def test_to_json(self): + """ + Test converting time reprs to iso dates + """ + self.assertEqual( + DateTest.date.to_json(datetime.datetime.strptime("2012-12-31T23:59:59Z", "%Y-%m-%dT%H:%M:%SZ")), + "2012-12-31T23:59:59Z" + ) + self.assertEqual( + DateTest.date.to_json(DateTest.date.from_json("2012-12-31T23:59:59Z")), + "2012-12-31T23:59:59Z" + ) + self.assertEqual( + DateTest.date.to_json(DateTest.date.from_json("2012-12-31T23:00:01-01:00")), + "2012-12-31T23:00:01-01:00" + ) + with self.assertRaises(TypeError): + DateTest.date.to_json('2012-12-31T23:00:01-01:00') + + +class TimedeltaTest(unittest.TestCase): + delta = Timedelta() + + def test_from_json(self): + self.assertEqual( + TimedeltaTest.delta.from_json('1 day 12 hours 59 minutes 59 seconds'), + datetime.timedelta(days=1, hours=12, minutes=59, seconds=59) + ) + + self.assertEqual( + TimedeltaTest.delta.from_json('1 day 46799 seconds'), + datetime.timedelta(days=1, seconds=46799) + ) + + def test_enforce_type(self): + self.assertEqual(TimedeltaTest.delta.enforce_type(None), None) + self.assertEqual( + TimedeltaTest.delta.enforce_type(datetime.timedelta(days=1, seconds=46799)), + datetime.timedelta(days=1, seconds=46799) + ) + self.assertEqual( + TimedeltaTest.delta.enforce_type('1 day 46799 seconds'), + datetime.timedelta(days=1, seconds=46799) + ) + with self.assertRaises(TypeError): + TimedeltaTest.delta.enforce_type([1]) + + def test_to_json(self): + self.assertEqual( + '1 days 46799 seconds', + TimedeltaTest.delta.to_json(datetime.timedelta(days=1, hours=12, minutes=59, seconds=59)) + ) + + +class TimeInfoTest(unittest.TestCase): + def test_time_info(self): + due_date = datetime.datetime(2000, 4, 14, 10, tzinfo=UTC()) + grace_pd_string = '1 day 12 hours 59 minutes 59 seconds' + timeinfo = TimeInfo(due_date, grace_pd_string) + self.assertEqual( + timeinfo.close_date, + due_date + Timedelta().from_json(grace_pd_string) + ) + + +class RelativeTimeTest(unittest.TestCase): + + delta = RelativeTime() + + def test_from_json(self): + self.assertEqual( + RelativeTimeTest.delta.from_json('0:05:07'), + datetime.timedelta(seconds=307) + ) + + self.assertEqual( + RelativeTimeTest.delta.from_json(100.0), + datetime.timedelta(seconds=100) + ) + self.assertEqual( + RelativeTimeTest.delta.from_json(None), + datetime.timedelta(seconds=0) + ) + + with self.assertRaises(TypeError): + RelativeTimeTest.delta.from_json(1234) # int + + with self.assertRaises(ValueError): + RelativeTimeTest.delta.from_json("77:77:77") + + def test_enforce_type(self): + self.assertEqual(RelativeTimeTest.delta.enforce_type(None), None) + self.assertEqual( + RelativeTimeTest.delta.enforce_type(datetime.timedelta(days=1, seconds=46799)), + datetime.timedelta(days=1, seconds=46799) + ) + self.assertEqual( + RelativeTimeTest.delta.enforce_type('0:05:07'), + datetime.timedelta(seconds=307) + ) + with self.assertRaises(TypeError): + RelativeTimeTest.delta.enforce_type([1]) + + def test_to_json(self): + self.assertEqual( + "01:02:03", + RelativeTimeTest.delta.to_json(datetime.timedelta(seconds=3723)) + ) + self.assertEqual( + "00:00:00", + RelativeTimeTest.delta.to_json(None) + ) + self.assertEqual( + "00:01:40", + RelativeTimeTest.delta.to_json(100.0) + ) + + with self.assertRaisesRegexp(ValueError, "RelativeTime max value is 23:59:59=86400.0 seconds, but 90000.0 seconds is passed"): + RelativeTimeTest.delta.to_json(datetime.timedelta(seconds=90000)) + + with self.assertRaises(TypeError): + RelativeTimeTest.delta.to_json("123") + + def test_str(self): + self.assertEqual( + "01:02:03", + RelativeTimeTest.delta.to_json(datetime.timedelta(seconds=3723)) + ) + self.assertEqual( + "11:02:03", + RelativeTimeTest.delta.to_json(datetime.timedelta(seconds=39723)) + ) + +import json +import unittest + +from django.contrib.postgres.fields import ArrayField +from django.contrib.postgres.forms import SimpleArrayField, SplitArrayField +from django.core import exceptions, serializers +from django.core.management import call_command +from django.db import models, IntegrityError, connection +from django import forms +from django.test import TestCase, override_settings +from django.utils import timezone + +from .models import IntegerArrayModel, NullableIntegerArrayModel, CharArrayModel, DateTimeArrayModel, NestedIntegerArrayModel, ArrayFieldSubclass + + +@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL required') +class TestSaveLoad(TestCase): + + def test_integer(self): + instance = IntegerArrayModel(field=[1, 2, 3]) + instance.save() + loaded = IntegerArrayModel.objects.get() + self.assertEqual(instance.field, loaded.field) + + def test_char(self): + instance = CharArrayModel(field=['hello', 'goodbye']) + instance.save() + loaded = CharArrayModel.objects.get() + self.assertEqual(instance.field, loaded.field) + + def test_dates(self): + instance = DateTimeArrayModel(field=[timezone.now()]) + instance.save() + loaded = DateTimeArrayModel.objects.get() + self.assertEqual(instance.field, loaded.field) + + def test_tuples(self): + instance = IntegerArrayModel(field=(1,)) + instance.save() + loaded = IntegerArrayModel.objects.get() + self.assertSequenceEqual(instance.field, loaded.field) + + def test_integers_passed_as_strings(self): + # This checks that get_prep_value is deferred properly + instance = IntegerArrayModel(field=['1']) + instance.save() + loaded = IntegerArrayModel.objects.get() + self.assertEqual(loaded.field, [1]) + + def test_default_null(self): + instance = NullableIntegerArrayModel() + instance.save() + loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk) + self.assertEqual(loaded.field, None) + self.assertEqual(instance.field, loaded.field) + + def test_null_handling(self): + instance = NullableIntegerArrayModel(field=None) + instance.save() + loaded = NullableIntegerArrayModel.objects.get() + self.assertEqual(instance.field, loaded.field) + + instance = IntegerArrayModel(field=None) + with self.assertRaises(IntegrityError): + instance.save() + + def test_nested(self): + instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]]) + instance.save() + loaded = NestedIntegerArrayModel.objects.get() + self.assertEqual(instance.field, loaded.field) + + +@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL required') +class TestQuerying(TestCase): + + def setUp(self): + self.objs = [ + NullableIntegerArrayModel.objects.create(field=[1]), + NullableIntegerArrayModel.objects.create(field=[2]), + NullableIntegerArrayModel.objects.create(field=[2, 3]), + NullableIntegerArrayModel.objects.create(field=[20, 30, 40]), + NullableIntegerArrayModel.objects.create(field=None), + ] + + def test_exact(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__exact=[1]), + self.objs[:1] + ) + + def test_isnull(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__isnull=True), + self.objs[-1:] + ) + + def test_gt(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__gt=[0]), + self.objs[:4] + ) + + def test_lt(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__lt=[2]), + self.objs[:1] + ) + + def test_in(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]), + self.objs[:2] + ) + + def test_contained_by(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]), + self.objs[:2] + ) + + def test_contains(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__contains=[2]), + self.objs[1:3] + ) + + def test_contains_charfield(self): + # Regression for #22907 + self.assertSequenceEqual( + CharArrayModel.objects.filter(field__contains=['text']), + [] + ) + + def test_index(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__0=2), + self.objs[1:3] + ) + + def test_index_chained(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__0__lt=3), + self.objs[0:3] + ) + + def test_index_nested(self): + instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) + self.assertSequenceEqual( + NestedIntegerArrayModel.objects.filter(field__0__0=1), + [instance] + ) + + @unittest.expectedFailure + def test_index_used_on_nested_data(self): + instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) + self.assertSequenceEqual( + NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), + [instance] + ) + + def test_overlap(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]), + self.objs[0:3] + ) + + def test_len(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__len__lte=2), + self.objs[0:3] + ) + + def test_slice(self): + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__0_1=[2]), + self.objs[1:3] + ) + + self.assertSequenceEqual( + NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), + self.objs[2:3] + ) + + @unittest.expectedFailure + def test_slice_nested(self): + instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) + self.assertSequenceEqual( + NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), + [instance] + ) + + +class TestChecks(TestCase): + + def test_field_checks(self): + field = ArrayField(models.CharField()) + field.set_attributes_from_name('field') + errors = field.check() + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0].id, 'postgres.E001') + + def test_invalid_base_fields(self): + field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel')) + field.set_attributes_from_name('field') + errors = field.check() + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0].id, 'postgres.E002') + + +class TestMigrations(TestCase): + + def test_deconstruct(self): + field = ArrayField(models.IntegerField()) + name, path, args, kwargs = field.deconstruct() + new = ArrayField(*args, **kwargs) + self.assertEqual(type(new.base_field), type(field.base_field)) + + def test_deconstruct_with_size(self): + field = ArrayField(models.IntegerField(), size=3) + name, path, args, kwargs = field.deconstruct() + new = ArrayField(*args, **kwargs) + self.assertEqual(new.size, field.size) + + def test_deconstruct_args(self): + field = ArrayField(models.CharField(max_length=20)) + name, path, args, kwargs = field.deconstruct() + new = ArrayField(*args, **kwargs) + self.assertEqual(new.base_field.max_length, field.base_field.max_length) + + def test_subclass_deconstruct(self): + field = ArrayField(models.IntegerField()) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField') + + field = ArrayFieldSubclass() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass') + + @override_settings(MIGRATION_MODULES={ + "postgres_tests": "postgres_tests.array_default_migrations", + }) + def test_adding_field_with_default(self): + # See #22962 + call_command('migrate', 'postgres_tests', verbosity=0) + + +@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL required') +class TestSerialization(TestCase): + test_data = '[{"fields": {"field": "[\\"1\\", \\"2\\"]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]' + + def test_dumping(self): + instance = IntegerArrayModel(field=[1, 2]) + data = serializers.serialize('json', [instance]) + self.assertEqual(json.loads(data), json.loads(self.test_data)) + + def test_loading(self): + instance = list(serializers.deserialize('json', self.test_data))[0].object + self.assertEqual(instance.field, [1, 2]) + + +class TestValidation(TestCase): + + def test_unbounded(self): + field = ArrayField(models.IntegerField()) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean([1, None], None) + self.assertEqual(cm.exception.code, 'item_invalid') + self.assertEqual(cm.exception.message % cm.exception.params, 'Item 1 in the array did not validate: This field cannot be null.') + + def test_blank_true(self): + field = ArrayField(models.IntegerField(blank=True, null=True)) + # This should not raise a validation error + field.clean([1, None], None) + + def test_with_size(self): + field = ArrayField(models.IntegerField(), size=3) + field.clean([1, 2, 3], None) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean([1, 2, 3, 4], None) + self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.') + + def test_nested_array_mismatch(self): + field = ArrayField(ArrayField(models.IntegerField())) + field.clean([[1, 2], [3, 4]], None) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean([[1, 2], [3, 4, 5]], None) + self.assertEqual(cm.exception.code, 'nested_array_mismatch') + self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.') + + +class TestSimpleFormField(TestCase): + + def test_valid(self): + field = SimpleArrayField(forms.CharField()) + value = field.clean('a,b,c') + self.assertEqual(value, ['a', 'b', 'c']) + + def test_to_python_fail(self): + field = SimpleArrayField(forms.IntegerField()) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('a,b,9') + self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.') + + def test_validate_fail(self): + field = SimpleArrayField(forms.CharField(required=True)) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('a,b,') + self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.') + + def test_validators_fail(self): + field = SimpleArrayField(forms.RegexField('[a-e]{2}')) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('a,bc,de') + self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.') + + def test_delimiter(self): + field = SimpleArrayField(forms.CharField(), delimiter='|') + value = field.clean('a|b|c') + self.assertEqual(value, ['a', 'b', 'c']) + + def test_delimiter_with_nesting(self): + field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|') + value = field.clean('a,b|c,d') + self.assertEqual(value, [['a', 'b'], ['c', 'd']]) + + def test_prepare_value(self): + field = SimpleArrayField(forms.CharField()) + value = field.prepare_value(['a', 'b', 'c']) + self.assertEqual(value, 'a,b,c') + + def test_max_length(self): + field = SimpleArrayField(forms.CharField(), max_length=2) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('a,b,c') + self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.') + + def test_min_length(self): + field = SimpleArrayField(forms.CharField(), min_length=4) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('a,b,c') + self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.') + + def test_required(self): + field = SimpleArrayField(forms.CharField(), required=True) + with self.assertRaises(exceptions.ValidationError) as cm: + field.clean('') + self.assertEqual(cm.exception.messages[0], 'This field is required.') + + def test_model_field_formfield(self): + model_field = ArrayField(models.CharField(max_length=27)) + form_field = model_field.formfield() + self.assertIsInstance(form_field, SimpleArrayField) + self.assertIsInstance(form_field.base_field, forms.CharField) + self.assertEqual(form_field.base_field.max_length, 27) + + def test_model_field_formfield_size(self): + model_field = ArrayField(models.CharField(max_length=27), size=4) + form_field = model_field.formfield() + self.assertIsInstance(form_field, SimpleArrayField) + self.assertEqual(form_field.max_length, 4) + + +class TestSplitFormField(TestCase): + + def test_valid(self): + class SplitForm(forms.Form): + array = SplitArrayField(forms.CharField(), size=3) + + data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'} + form = SplitForm(data) + self.assertTrue(form.is_valid()) + self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']}) + + def test_required(self): + class SplitForm(forms.Form): + array = SplitArrayField(forms.CharField(), required=True, size=3) + + data = {'array_0': '', 'array_1': '', 'array_2': ''} + form = SplitForm(data) + self.assertFalse(form.is_valid()) + self.assertEqual(form.errors, {'array': ['This field is required.']}) + + def test_remove_trailing_nulls(self): + class SplitForm(forms.Form): + array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True) + + data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''} + form = SplitForm(data) + self.assertTrue(form.is_valid(), form.errors) + self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']}) + + def test_required_field(self): + class SplitForm(forms.Form): + array = SplitArrayField(forms.CharField(), size=3) + + data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''} + form = SplitForm(data) + self.assertFalse(form.is_valid()) + self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']}) + + def test_rendering(self): + class SplitForm(forms.Form): + array = SplitArrayField(forms.CharField(), size=3) + + self.assertHTMLEqual(str(SplitForm()), ''' + + + + + + + + + ''') + +#!/usr/bin/env python +"""Utility functions and classes for GRR API client library.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import time + +from future.builtins import map + +from google.protobuf import wrappers_pb2 + +from google.protobuf import symbol_database + +from grr_api_client import errors + +from grr_response_proto import apple_firmware_pb2 +from grr_response_proto import checks_pb2 +from grr_response_proto import deprecated_pb2 +from grr_response_proto import flows_pb2 +from grr_response_proto import jobs_pb2 +from grr_response_proto import osquery_pb2 +from grr_response_proto import timeline_pb2 + +from grr_response_proto.api import artifact_pb2 +from grr_response_proto.api import client_pb2 +from grr_response_proto.api import config_pb2 +from grr_response_proto.api import cron_pb2 +from grr_response_proto.api import flow_pb2 +from grr_response_proto.api import hunt_pb2 +from grr_response_proto.api import output_plugin_pb2 +from grr_response_proto.api import reflection_pb2 +from grr_response_proto.api import stats_pb2 +from grr_response_proto.api import user_pb2 +from grr_response_proto.api import vfs_pb2 +from grr_response_proto.api import yara_pb2 + + +class ProtobufTypeNotFound(errors.Error): + pass + + +class ItemsIterator(object): + """Iterator object with a total_count property.""" + + def __init__(self, items=None, total_count=None): + super(ItemsIterator, self).__init__() + + self.items = items + self.total_count = total_count + + def __iter__(self): + for i in self.items: + yield i + + def __next__(self): + return next(self.items) + + # TODO: Compatibility method for Python 2. + def next(self): + return self.__next__() + + +def MapItemsIterator(function, items): + """Maps ItemsIterator via given function.""" + return ItemsIterator( + items=map(function, items), total_count=items.total_count) + + +class BinaryChunkIterator(object): + """Iterator object for binary streams.""" + + def __init__(self, chunks=None, total_size=None, on_close=None): + super(BinaryChunkIterator, self).__init__() + + self.chunks = chunks + self.total_size = total_size + self.on_close = on_close + + def Close(self): + if self.on_close: + self.on_close() + self.on_close = None + + def __exit__(self, unused_type, unused_value, unused_traceback): + self.Close() + + def __iter__(self): + for c in self.chunks: + yield c + self.Close() + + def __next__(self): + try: + return next(self.chunks) + except StopIteration: + self.Close() + raise + + # TODO: Compatibility method for Python 2. + def next(self): + return self.__next__() + + def WriteToStream(self, out): + for c in self.chunks: + out.write(c) + self.Close() + + def WriteToFile(self, file_name): + with open(file_name, "wb") as fd: + self.WriteToStream(fd) + + +# Default poll interval in seconds. +DEFAULT_POLL_INTERVAL = 15 + +# Default poll timeout in seconds. +DEFAULT_POLL_TIMEOUT = 3600 + + +def Poll(generator=None, condition=None, interval=None, timeout=None): + """Periodically calls generator function until a condition is satisfied.""" + + if not generator: + raise ValueError("generator has to be a lambda") + + if not condition: + raise ValueError("condition has to be a lambda") + + if interval is None: + interval = DEFAULT_POLL_INTERVAL + + if timeout is None: + timeout = DEFAULT_POLL_TIMEOUT + + started = time.time() + while True: + obj = generator() + check_result = condition(obj) + if check_result: + return obj + + if timeout and (time.time() - started) > timeout: + raise errors.PollTimeoutError( + "Polling on %s timed out after %ds." % (obj, timeout)) + time.sleep(interval) + + +AFF4_PREFIX = "aff4:/" + + +def UrnStringToClientId(urn): + """Converts given URN string to a client id string.""" + if urn.startswith(AFF4_PREFIX): + urn = urn[len(AFF4_PREFIX):] + + components = urn.split("/") + return components[0] + + +def UrnStringToHuntId(urn): + """Converts given URN string to a flow id string.""" + if urn.startswith(AFF4_PREFIX): + urn = urn[len(AFF4_PREFIX):] + + components = urn.split("/") + if len(components) != 2 or components[0] != "hunts": + raise ValueError("Invalid hunt URN: %s" % urn) + + return components[-1] + + +TYPE_URL_PREFIX = "type.googleapis.com/" + + +def GetTypeUrl(proto): + """Returns type URL for a given proto.""" + + return TYPE_URL_PREFIX + proto.DESCRIPTOR.full_name + + +def TypeUrlToMessage(type_url): + """Returns a message instance corresponding to a given type URL.""" + + if not type_url.startswith(TYPE_URL_PREFIX): + raise ValueError("Type URL has to start with a prefix %s: %s" % + (TYPE_URL_PREFIX, type_url)) + + full_name = type_url[len(TYPE_URL_PREFIX):] + try: + return symbol_database.Default().GetSymbol(full_name)() + except KeyError as e: + raise ProtobufTypeNotFound(str(e)) + + +def CopyProto(proto): + new_proto = proto.__class__() + new_proto.ParseFromString(proto.SerializeToString()) + return new_proto + + +class UnknownProtobuf(object): + + def __init__(self, proto_type, proto_any): + super(UnknownProtobuf, self).__init__() + + self.type = proto_type + self.original_value = proto_any + + +def UnpackAny(proto_any): + try: + proto = TypeUrlToMessage(proto_any.type_url) + except ProtobufTypeNotFound as e: + return UnknownProtobuf(str(e), proto_any) + + proto_any.Unpack(proto) + return proto + + +def RegisterProtoDescriptors(db, *additional_descriptors): + """Registers all API-releated descriptors in a given symbol DB.""" + db.RegisterFileDescriptor(apple_firmware_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(artifact_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(client_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(config_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(cron_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(flow_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(hunt_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(output_plugin_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(reflection_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(stats_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(user_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(vfs_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(yara_pb2.DESCRIPTOR) + + db.RegisterFileDescriptor(checks_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(deprecated_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(flows_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(jobs_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(osquery_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(timeline_pb2.DESCRIPTOR) + db.RegisterFileDescriptor(wrappers_pb2.DESCRIPTOR) + + for d in additional_descriptors: + db.RegisterFileDescriptor(d) + +#!/usr/bin/env python +# +# linearize-data.py: Construct a linear, no-fork version of the chain. +# +# Copyright (c) 2013-2014 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from __future__ import print_function, division +import json +import struct +import re +import os +import os.path +import base64 +import httplib +import sys +import hashlib +import datetime +import time +from collections import namedtuple + +settings = {} + +def uint32(x): + return x & 0xffffffffL + +def bytereverse(x): + return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) | + (((x) >> 8) & 0x0000ff00) | ((x) >> 24) )) + +def bufreverse(in_buf): + out_words = [] + for i in range(0, len(in_buf), 4): + word = struct.unpack('@I', in_buf[i:i+4])[0] + out_words.append(struct.pack('@I', bytereverse(word))) + return ''.join(out_words) + +def wordreverse(in_buf): + out_words = [] + for i in range(0, len(in_buf), 4): + out_words.append(in_buf[i:i+4]) + out_words.reverse() + return ''.join(out_words) + +def calc_hdr_hash(blk_hdr): + hash1 = hashlib.sha256() + hash1.update(blk_hdr) + hash1_o = hash1.digest() + + hash2 = hashlib.sha256() + hash2.update(hash1_o) + hash2_o = hash2.digest() + + return hash2_o + +def calc_hash_str(blk_hdr): + hash = calc_hdr_hash(blk_hdr) + hash = bufreverse(hash) + hash = wordreverse(hash) + hash_str = hash.encode('hex') + return hash_str + +def get_blk_dt(blk_hdr): + members = struct.unpack(" self.maxOutSz): + self.outF.close() + if self.setFileTime: + os.utime(outFname, (int(time.time()), highTS)) + self.outF = None + self.outFname = None + self.outFn = self.outFn + 1 + self.outsz = 0 + + (blkDate, blkTS) = get_blk_dt(blk_hdr) + if self.timestampSplit and (blkDate > self.lastDate): + print("New month " + blkDate.strftime("%Y-%m") + " @ " + hash_str) + lastDate = blkDate + if outF: + outF.close() + if setFileTime: + os.utime(outFname, (int(time.time()), highTS)) + self.outF = None + self.outFname = None + self.outFn = self.outFn + 1 + self.outsz = 0 + + if not self.outF: + if self.fileOutput: + outFname = self.settings['output_file'] + else: + outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn) + print("Output file " + outFname) + self.outF = open(outFname, "wb") + + self.outF.write(inhdr) + self.outF.write(blk_hdr) + self.outF.write(rawblock) + self.outsz = self.outsz + len(inhdr) + len(blk_hdr) + len(rawblock) + + self.blkCountOut = self.blkCountOut + 1 + if blkTS > self.highTS: + self.highTS = blkTS + + if (self.blkCountOut % 1000) == 0: + print('%i blocks scanned, %i blocks written (of %i, %.1f%% complete)' % + (self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex))) + + def inFileName(self, fn): + return os.path.join(self.settings['input'], "blk%05d.dat" % fn) + + def fetchBlock(self, extent): + '''Fetch block contents from disk given extents''' + with open(self.inFileName(extent.fn), "rb") as f: + f.seek(extent.offset) + return f.read(extent.size) + + def copyOneBlock(self): + '''Find the next block to be written in the input, and copy it to the output.''' + extent = self.blockExtents.pop(self.blkCountOut) + if self.blkCountOut in self.outOfOrderData: + # If the data is cached, use it from memory and remove from the cache + rawblock = self.outOfOrderData.pop(self.blkCountOut) + self.outOfOrderSize -= len(rawblock) + else: # Otherwise look up data on disk + rawblock = self.fetchBlock(extent) + + self.writeBlock(extent.inhdr, extent.blkhdr, rawblock) + + def run(self): + while self.blkCountOut < len(self.blkindex): + if not self.inF: + fname = self.inFileName(self.inFn) + print("Input file " + fname) + try: + self.inF = open(fname, "rb") + except IOError: + print("Premature end of block data") + return + + inhdr = self.inF.read(8) + if (not inhdr or (inhdr[0] == "\0")): + self.inF.close() + self.inF = None + self.inFn = self.inFn + 1 + continue + + inMagic = inhdr[:4] + if (inMagic != self.settings['netmagic']): + print("Invalid magic: " + inMagic.encode('hex')) + return + inLenLE = inhdr[4:] + su = struct.unpack("). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +import base_action_rule +import test_models + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: + +#!/usr/bin/python -u +# +# This test exercise the redirection of error messages with a +# functions defined in Python. +# +import sys +import libxml2 + +# Memory debug specific +libxml2.debugMemory(1) + +expect="""--> (3) xmlns: URI foo is not absolute +--> (4) Opening and ending tag mismatch: x line 0 and y +""" + +err="" +def callback(arg,msg,severity,reserved): + global err + err = err + "%s (%d) %s" % (arg,severity,msg) + +s = """""" + +parserCtxt = libxml2.createPushParser(None,"",0,"test.xml") +parserCtxt.setErrorHandler(callback, "-->") +if parserCtxt.getErrorHandler() != (callback,"-->"): + print "getErrorHandler failed" + sys.exit(1) +parserCtxt.parseChunk(s,len(s),1) +doc = parserCtxt.doc() +doc.freeDoc() +parserCtxt = None + +if err != expect: + print "error" + print "received %s" %(err) + print "expected %s" %(expect) + sys.exit(1) + +i = 10000 +while i > 0: + parserCtxt = libxml2.createPushParser(None,"",0,"test.xml") + parserCtxt.setErrorHandler(callback, "-->") + parserCtxt.parseChunk(s,len(s),1) + doc = parserCtxt.doc() + doc.freeDoc() + parserCtxt = None + err = "" + i = i - 1 + +# Memory debug specific +libxml2.cleanupParser() +if libxml2.debugMemory(1) == 0: + print "OK" +else: + print "Memory leak %d bytes" % (libxml2.debugMemory(1)) + libxml2.dumpMemory() + +from django.test import TestCase +from school.models import Semester, StaffMember, Department, Subject, SchoolClass, Student, Enrolment +from sync.google_admin import GoogleSync +from django.conf import settings +import datetime + +#class SchoolTest(TestCase): +class SchoolTest(TestCase): + def setUp(self): + #import pdb + #pdb.set_trace() + self.google_sync = GoogleSync() + + self.sem, c = Semester.objects.get_or_create(number=1, year="2013", + start_date=datetime.date(2013,1,29), end_date=datetime.date(2013,6,7)) + self.tch, c = StaffMember.objects.get_or_create( + first_name="John", last_name="Teacher", + email="john.teacher@" + settings.GOOGLE_APPS_DOMAIN, + date_of_birth=datetime.date(1970,3,3), timetable_id="XTCH", + is_current=True, + staff_type="TEA" + ) + #self.google_sync.update_google_staff(self.tch) + self.dept, c = Department.objects.get_or_create(name="Test Faculty") + self.subj, c = Subject.objects.get_or_create(code="14XTST", name="Test Subject", faculty=self.dept) + self.cla, c = SchoolClass.objects.get_or_create(code="14XTSTB", name="Test Class B", cycle=self.sem, + teacher=self.tch, subject=self.subj) + self.students = [] + for i in range(1,5): + id='XTST%04d' % i + s, c = Student.objects.get_or_create( + first_name="Test%d"%i, last_name="Student%d"%i, + email="%s@%s" % (id, settings.GOOGLE_APPS_DOMAIN), + date_of_birth=datetime.date(2000,3,(i%27)+1), timetable_id=id, + is_current=True, + student_type="STU", year_level="14" + ) + #self.google_sync.update_google_student(s) + Enrolment.objects.get_or_create(student=s, school_class=self.cla) + self.students.append(s) + + # def test_student_create(self): + # pass + # + # def test_student_update(self): + # pass + # + # def test_student_exit(self): + # pass + # + # def test_staff_create(self): + # pass + # + # def test_staff_update(self): + # pass + # + # def test_staff_exit(self): + # pass + # + # def test_class_create(self): + # pass +# coding: utf-8 +# The MIT License (MIT) + # Copyright (c) 2014 by Shuo Li (contact@shuo.li) + # + # Permission is hereby granted, free of charge, to any person obtaining a + # copy of this software and associated documentation files (the "Software"), + # to deal in the Software without restriction, including without limitation + # the rights to use, copy, modify, merge, publish, distribute, sublicense, + # and/or sell copies of the Software, and to permit persons to whom the + # Software is furnished to do so, subject to the following conditions: + # + # The above copyright notice and this permission notice shall be included in + # all copies or substantial portions of the Software. + # + # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + # DEALINGS IN THE SOFTWARE. + +__author__ = 'Shuo Li ' +__version__= '2014-09-27-12:42' + +import timeit +import sys +reload(sys) +sys.setdefaultencoding('utf-8') + +import os +from os import listdir +from os.path import isfile, join + +import copy +import re +import shutil + +import xml.etree.ElementTree as ET + +import jinja2 +# from fontTools.ttLib import TTFont + +import PIL +from PIL import ImageFont, ImageDraw + +if 'define constants' : + + def file_names_in_folder(folder) : + + abs_folder = os.path.abspath(folder) + return [ f for f in listdir(abs_folder) + if isfile(join(abs_folder,f)) ] + + def get_default_image() : + return PIL.Image.new( 'RGBA', (image_width, image_height) ) + + if isfile('config.py') : + config_module = __import__( 'config' ) + config = config_module.config + else : + # a font bitmap set has no shadow to keep location consistency. + shadow_size = 3 + + def construct_color(r, g, b) : + rx = hex(r).split('x')[1] + if len(rx) == 1 : + rx = '0' + rx + gx = hex(g).split('x')[1] + if len(gx) == 1 : + gx = '0' + rx + bx = hex(b).split('x')[1] + if len(gx) == 1 : + gx = '0' + rx + return '#' + rx + gx + bx + + blue = construct_color(214, 244, 255) + darkblue = construct_color(118, 200, 241) + green = construct_color(101, 181, 91) + red = construct_color(228, 63, 63) + white = construct_color(255, 255, 255) + black = construct_color(0, 0, 0) + shadow_color = construct_color(50, 50, 50) + + config = { + + 'Do not delete this configure file.' : '' + + # The base folder of this font map generator. + , 'base folder' + : './' + + # The folder stores all TrueType font (.ttf) files. + # The specified folder is relative to this configure file. + # Absolute folder will be base folder + font folder. + , 'font folder' + : 'fonts' + + # The Space Engineers (SE) installation path. + , 'space engineer base folder' + : 'C:\Program Files (x86)\Steam\SteamApps\common\SpaceEngineers' + + # Font size in SE + , 'font size' + : 28 + + # The font priority list, from high to low. + # The bitmap of each character + # is given by the TrueType font (.tff) + # who has a valid bitmap and a highest priority. + , 'font priority list' + : [ ] + + # The width of the result .dds image. + , 'image width' + : 1024 + + # The width of the result .dds image + , 'image height' + : 1024 + + # output .dds file name prefix + , 'output dds prefix' + : 'FontDataExtra-' + + # Original dds file names. + # They are used when + # the user wants to keep the original font bitmaps + # and only construct the characters that + # are not included in the original font bitmaps. + , 'original dds file names' + : [ 'FontData-0.dds' ] + + # Predefined colors + , 'predefined colors' + : { 'blue': { + 'output' : True, + 'color': blue, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : False }, + + 'darkblue': { + 'output' : True, + 'color': darkblue, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : False }, + + 'green': { + 'output' : True, + 'color': green, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : False }, + + 'red': { + 'output' : True, + 'color': red, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : False }, + + 'white': { + 'output' : True, + 'color': white, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : False }, + + 'white_shadow': { + 'output' : True, + 'color': white, + 'shadow_color': shadow_color, + 'shadow_size': shadow_size, + 'shadow' : True } + } + + # Left Side Bearing, lsb + # + # illusion: + # + # |< last >| |< this >| + # |< char >| |< char >| + # |< bitmap >||< lsb >||< bitmap >| + # + , 'lsb' + : -1 + + # font map xml template file + , 'xml template' + : 'xml_template.xml' + + # font map xml file name + , 'xml file name' + : 'FontData.xml' + + # font place holder north margin + , 'north margin' + : 0 + + # font place holder west margin + , 'west margin' + : 0 + + # font place holder south margin + , 'south margin' + : 0 + + # font place holder east margin + , 'east margin' + : 0 + + # keep original font map + , 'keep original font map' + : True + + , 'text file folder' + : 'text_files' + + , 'unsupported folder' + : 'unsupported' + + , 'backup folder' + : 'backup' + + , 'output folder' + : 'output' + + } + + keep_original = bool(config['keep original font map']) + + output_dds_prefix = str(config['output dds prefix']) + original_dds_file_names = config['original dds file names'] + se_folder = str(config['space engineer base folder']) + font_size = int(config['font size']) + base_folder = str(config['base folder']) + font_folder = base_folder + str(config['font folder']) + font_folder = os.path.abspath(font_folder) + output_folder = base_folder + str(config['output folder']) + output_folder = os.path.abspath(output_folder) + + font_priority_list = config['font priority list'] + font_priority_list = [] + font_files_in_folder = file_names_in_folder(font_folder) + + font_files = [ join(font_folder, f) for f in list(font_priority_list)] + + for f in font_files_in_folder : + if f not in font_priority_list : + font_files.append(join(font_folder, f)) + + fonts = [ { 'face' : 'freetype.Face(f)', + 'font' : ImageFont.truetype(f, font_size), + 'font_size' : font_size, + 'file_name' : os.path.basename(f)} + for f in font_files ] + + unsupported_folder = config['unsupported folder'] + + image_width = int(config['image width']) + image_height = int(config['image height']) + + + color_dict = config['predefined colors'] + + lsb = config['lsb'] + + xml_template_name = str(config['xml template']) + xml_file_name = str(config['xml file name']) + + north_margin = int(config['north margin']) + west_margin = int(config['west margin']) + south_margin = int(config['south margin']) + east_margin = int(config['east margin']) + + text_file_folder = os.path.abspath(str(config['text file folder'])) + + backup_folder = str(config['backup folder']) + +if 'define classes' : + + class location() : + + ''' + Location class + ''' + + def __init__(self, x, y) : + self.x = x + self.y = y + + def clone(self) : + return location(x, y) + + def __str__(self) : + return '(%s, %s)' % (self.x, self.y) + + + def add_sub_action(self, another_location, mode = '+') : + + def add_sub(a, b, mode = '+') : + if mode == '+' : + return a + b + if mode == '-' : + return a - b + + raise NotImplementedError() + + if isinstance(another_location, location) : + return location( + add_sub( self.x, + another_location.x, + mode), + add_sub( self.y, + another_location.y, + mode)) + + if isinstance(another_location, tuple) \ + or isinstance(another_location, list) : + if len(another_location) == 2 : + return location( + add_sub( self.x, + int(another_location[0]), + mode), + add_sub( self.y, + int(another_location[1]), + mode)) + + if isinstance(another_location, dict) : + if 'x' in another_location.keys() and 'y' in another_location.keys() : + return location( + add_sub( self.x, + int(another_location['x']), + mode), + add_sub( self.y, + int(another_location['y']), + mode)) + + raise NotImplementedError() + + def __add__(self, another_location) : + return self.add_sub_action(another_location, mode = '+') + + def __sub__(self, another_location) : + return self.add_sub_action(another_location, mode = '-') + + class char() : + + ''' + Character class + ''' + + def __init__(self, content) : + + self.content = content + + def map(self, color, fonts, + north_margin = north_margin, + west_margin = west_margin, + south_margin = south_margin, + east_margin = east_margin, + unsupported = {}) : + + def haschar(font, one_character, unsupported = {}) : + + ''' + Return if a font has a character. + ''' + return True + # ttf_face = font['face'] + # font_file_name = font['file_name'] + + # ttf_face.set_char_size( 48*64 ) + # ttf_face.load_char(one_character) + + # a = copy.deepcopy(ttf_face.glyph.bitmap.buffer) + # b = [] + # if font_file_name in unsupported.keys() : + # if one_character in unsupported[ font_file_name ] : + # return False + + # ttf_face.load_char(unsupported[ font_file_name ][0]) + # b = copy.deepcopy(ttf_face.glyph.bitmap.buffer) + + # return a != b + + self.color = color + + self.font = None + + for f in fonts : + + if haschar(f, one_character = self.content, + unsupported = unsupported) : + + self.font = f['font'] + self.font_size = f['font_size'] + break + + if self.font == None : + print 'Warning! No font file has \'%s\'.' % self.content + self.font = fonts[0]['font'] + self.font_size = f['font_size'] + + self.width, self.height = self.font.getsize(self.content) + + self.shadow_size = color['shadow_size'] + self.width += (self.shadow_size * 2) + self.height += (self.shadow_size * 2) + self.size = (self.width, self.height) + + self.holder_height = north_margin + self.font_size + south_margin + self.holder_height += (self.shadow_size * 4) + + self.holder_width = west_margin + self.width + east_margin + self.holder_size = (self.holder_width, self.holder_height) + + def locate(self, code, image_location, image_index, left_sep) : + + self.code = code + self.image_location = image_location + self.image_index = image_index + self.left_sep = left_sep + + def attribute(self) : + return { 'content' : escape(self.content), + 'code' : get_code_string(self.code), + 'image_index' : self.image_index, + 'x' : self.image_location.x, + 'y' : self.image_location.y + self.shadow_size, + 'width' : self.width-1, + 'height' : self.holder_height - (self.shadow_size*2), + 'advance_width' : self.width - (self.shadow_size*2), + 'left_sep' : self.left_sep } + +if 'define misc. functions' : + + def cleanup(folder_paths, file_names = [], remove_ext_name = ['.pyc', '.png']) : + + for folder_path in folder_paths : + for f in file_names : + os.remove(join(os.path.abspath(folder_path), f)) + + for f in file_names_in_folder(folder_path) : + for ext_name in remove_ext_name : + if f.endswith(ext_name) : + os.remove(join(folder_path, f)) + + + def distinct(string_list) : + + one_list = '' + for s in string_list : + one_list += s + one_list = list(set(one_list)) + return one_list + + def save_dds(pillow_image, index = 0, output_folder = './'): + output_folder = os.path.abspath(output_folder) + temp_file_path = join(output_folder, 'temp_%s.png' % index) + output_file_path = join(output_folder, '%s%s.dds' % (output_dds_prefix, index)) + pillow_image.save(temp_file_path) + os.system(r'.\nvtt\nvcompress.exe -nocuda -bc3 %s %s' \ + % (temp_file_path, output_file_path )) + os.remove(temp_file_path) + + def compute_location(one_char, draw_location, target_images) : + + (w, h) = target_images[-1].size + + # to the next line + if draw_location.x + one_char.holder_width >= w : + draw_location.y += one_char.holder_height + draw_location.x = 0 + + # to the next image + if draw_location.y + one_char.holder_height >= h : + target_images.append(get_default_image()) + draw_location.y = 0 + + return draw_location, target_images + + def draw_one_char_to_image(one_char, draw_location, target_image, west_margin, south_margin) : + + ''' + Draw one char on one image + ''' + + def draw_once(draw, color, xshift, yshift,) : + draw.text( ( draw_location.x + xshift, + draw_location.y + yshift), + one_char.content, + font = one_char.font, + fill = color ) + + draw = ImageDraw.Draw(target_image) + + if one_char.color['shadow'] == True : + for i in xrange(one_char.shadow_size) : + draw_once(draw, one_char.color['shadow_color'], +i, +i) + draw_once(draw, one_char.color['shadow_color'], one_char.shadow_size + 1 + i, +i) + draw_once(draw, one_char.color['shadow_color'], +i, one_char.shadow_size + 1 + i) + draw_once(draw, one_char.color['shadow_color'], + one_char.shadow_size + 1 + i, one_char.shadow_size + 1 + i) + + draw_once(draw, one_char.color['color'], one_char.shadow_size, one_char.shadow_size) + + return draw_location + (one_char.holder_width, 0), target_image + + def write_char_to_image( one_char, draw_location, code, + image_start_index, + target_images = [ get_default_image() ] ) : + + if not isinstance(target_images, list) : + target_images = [ target_images ] + + # compute char bitmap location + draw_location, target_images \ + = compute_location(one_char, draw_location, target_images) + + one_char.locate(code, draw_location, image_start_index + len(target_images) - 1, lsb) + + # draw one char + loc, target_images[-1] \ + = draw_one_char_to_image( one_char, draw_location, target_images[-1], + west_margin, south_margin) + + return one_char, loc, target_images + + def save_images(images, output_folder) : + i = 0 + for image in images : + save_dds(image, i, output_folder) + i += 1 + + def get_code_string(decimal_code) : + return hex(decimal_code).split('x')[1] + + def escape(input_string) : + + html_escape_table = { + unicode('&'): unicode("&"), + unicode('"'): unicode("""), + unicode("'"): unicode("'"), + unicode(">"): unicode(">"), + unicode("<"): unicode("<") } + + input_string = unicode(input_string) + + if input_string in html_escape_table.keys() : + return html_escape_table[ input_string ] + + return input_string + + def get_char_list(xml_file_name) : + + tree = ET.parse(xml_file_name) + root = tree.getroot() + + glyphs = [ child for child in root if child.tag.endswith('glyphs') ][0] + max_code = max([ int('0x' + glyph.attrib['code'], 16) for glyph in glyphs ]) + return [ glyph.attrib['ch'] for glyph in glyphs ], max_code + + def get_original_xml_attributes(xml_file_name) : + + tree = ET.parse(xml_file_name) + root = tree.getroot() + + glyphs = [ child for child in root if child.tag.endswith('glyphs') ][0] + kernpairs = [ child for child in root if child.tag.endswith('kernpairs') ][0] + + glyphs_attribute_list = [ { + 'content' : escape(glyph.attrib['ch']), + 'code' : glyph.attrib['code'], + 'bm' : glyph.attrib['bm'], + 'origin' : glyph.attrib['origin'], + 'size' : glyph.attrib['size'], + 'aw' : glyph.attrib['aw'], + 'lsb' : glyph.attrib['lsb'] } + for glyph in glyphs ] + + kernpair_attribute_list = [ { + 'left' : escape(kernpair.attrib['left']), + 'right' : escape(kernpair.attrib['right']), + 'adjust' : kernpair.attrib['adjust'] } + for kernpair in kernpairs ] + + return glyphs_attribute_list, kernpair_attribute_list + + def write_text_to_image(text, color, unsupported, start_code, output_folder = output_folder, + image_start_index = 0, + north_margin = north_margin, + west_margin = west_margin, + south_margin = south_margin, + east_margin = east_margin) : + + draw_location = location(0, 0) + target_images = [ get_default_image() ] + current_code = start_code + char_list = [] + for c in text : + + # create a char object + one_char = char(content = c) + + # map a char to a bitmap + one_char.map( color = color, fonts = fonts, + north_margin = north_margin, + west_margin = west_margin, + south_margin = south_margin, + east_margin = east_margin, + unsupported = unsupported ) + + one_char, draw_location, target_images \ + = write_char_to_image( one_char = one_char, + draw_location = draw_location, + code = current_code, + image_start_index = image_start_index, + target_images = target_images ) + + char_list.append(one_char) + current_code += 1 + + save_images(target_images, output_folder) + + return char_list, target_images + + def produce_xml(char_list, target_images, output_folder, + keep_original, original_xml_file_name) : + + env = jinja2.Environment() + env.loader = jinja2.FileSystemLoader('./') + + template = env.get_template(xml_template_name) + + xml_file = open(join(output_folder, xml_file_name), 'w+') + + char_attribute_list = [ c.attribute() for c in char_list ] + + dds_files = [] + glyphs_attribute_list = [] + kernpair_attribute_list = [] + + image_index = 0 + + if keep_original == True : + for n in original_dds_file_names : + dds_files.append( { 'index' : image_index, 'name': n } ) + image_index += 1 + + glyphs_attribute_list, kernpair_attribute_list = \ + get_original_xml_attributes(original_xml_file_name) + + dds_files += [ { 'index' : i + image_index, 'name': '%s%s.dds' % (output_dds_prefix, i) } + for i in xrange(len(target_images)) ] + + xml_file.write( template.render( + char_attribute_list = char_attribute_list, + dds_files = dds_files, + glyphs_attribute_list = glyphs_attribute_list, + kernpair_attribute_list = kernpair_attribute_list ) ) + + def get_original_text(base_folder, backup_folder, xml_file_name) : + + original_xml_file_name = join(backup_folder, 'red\\' + xml_file_name) + original_xml_file_name_copy = join(base_folder, 'original_' + xml_file_name) + shutil.copy2(original_xml_file_name, original_xml_file_name_copy) + + return get_char_list(xml_file_name = original_xml_file_name_copy) + + def backup_se_font_map(se_folder, backup_folder) : + + if not os.path.exists(backup_folder) : + shutil.copytree(join(se_folder, 'Content\\Fonts'), backup_folder ) + else : + if not os.listdir(backup_folder) : + os.rmdir(backup_folder ) + shutil.copytree(join(se_folder, 'Content\\Fonts'), backup_folder ) + + def include_text_files(base_folder, text_file_folder) : + + text_files = file_names_in_folder(text_file_folder) + text_mod_files = [ f for f in text_files if f.endswith('.py') ] + for f in text_files : + shutil.copy2(join(text_file_folder, f), join(base_folder, f)) + text_file_modules = [ __import__( f.split('.')[0]) for f in text_mod_files ] + + result = [] + for m in text_file_modules : + result += distinct(m.text) + return text_files, distinct(result) + + def check_unsupported_files (base_folder, unsupported_folder) : + + unsupported_files = file_names_in_folder(unsupported_folder) + + for f in unsupported_files : + shutil.copy2(join(unsupported_folder, f), join(base_folder, f)) + + unsupported_file_modules = [ __import__( f.split('.')[0]) for f in unsupported_files ] + unsupported = {} + + for m in unsupported_file_modules : + for key, value in m.unsupported_char.items() : + unsupported[key] = value + + return unsupported_files, unsupported + +start_time = timeit.default_timer() +backup_se_font_map(se_folder, backup_folder) +text_original, max_code = get_original_text(base_folder, backup_folder, xml_file_name) +text_files, text_in_files = include_text_files(base_folder, text_file_folder) +unsupported_files, unsupported = check_unsupported_files (base_folder, unsupported_folder) + +if not os.path.exists(output_folder) : + os.mkdir(output_folder) + +if not keep_original : + text = distinct(text_in_files + text_original) + start_code = 0 +else : + text = list(set(text_in_files).symmetric_difference(text_original)) + start_code = max_code + 1 + +# generate font map +for c, v in color_dict.items() : + + if v['output'] == True : + + print 'Generate bitmap for %s ...' % c + + if os.path.exists(join(output_folder, c)) : + shutil.rmtree(join(output_folder, c)) + + if not os.path.exists(join(output_folder, c)) : + os.mkdir(join(output_folder, c)) + + if keep_original == True : + + for n in original_dds_file_names : + shutil.copy2( + join( backup_folder, c + '\\' + n), + join( output_folder, c + '\\' + n) ) + + original_xml_file_name \ + = os.path.abspath(join( backup_folder, c + '\\' + xml_file_name)) + + print 'Done' + print + print 'Write bitmap to dds.' + + char_list, target_images \ + = write_text_to_image( + text = text, color = v, unsupported = unsupported, + start_code = copy.deepcopy(start_code), + output_folder = join(output_folder, c), + image_start_index = len(original_dds_file_names), + north_margin = north_margin, west_margin = west_margin, + south_margin = south_margin, east_margin = east_margin ) + + print 'Done' + print + print 'Generate XML for %s ...' + + produce_xml(char_list, target_images, join(output_folder, c), + keep_original, original_xml_file_name) + + print 'Done' + +print 'All image and XMl generations done.' +print +print 'Cleaning up temp files...' + +cleanup( folder_paths = [ base_folder ], + file_names = text_files + unsupported_files, + remove_ext_name = ['.pyc', '.png', '.csv', 'original_' + xml_file_name]) + +print 'Done' +print 'Total run time is %f.' % (timeit.default_timer() - start_time) + +""" +.. todo:: + + WRITEME +""" +import numpy +np = numpy +import os + +from theano.compat.six.moves import reduce + +from pylearn2.datasets import dense_design_matrix +from pylearn2.datasets import retina +from pylearn2.datasets.cache import datasetCache + + +class NORBSmall(dense_design_matrix.DenseDesignMatrix): + + """ + A pylearn2 dataset object for the small NORB dataset (v1.0). + + Parameters + ---------- + which_set : WRITEME + one of ['train','test'] + center : WRITEME + data is in range [0,256], center=True subtracts 127.5. + multi_target : WRITEME + load extra information as additional labels. + """ + + @classmethod + def load(cls, which_set, desc): + """ + .. todo:: + + WRITEME + """ + assert desc in ['dat', 'cat', 'info'] + + base = '%s/norb_small/original_npy/smallnorb-' + base = base % os.getenv('PYLEARN2_DATA_PATH') + if which_set == 'train': + base += '5x46789x9x18x6x2x96x96-training' + else: + base += '5x01235x9x18x6x2x96x96-testing' + + fname = base + '-%s.npy' % desc + fname = datasetCache.cache_file(fname) + fp = open(fname, 'r') + data = np.load(fp) + fp.close() + + return data + + def __init__(self, which_set, center=False, multi_target=False): + assert which_set in ['train', 'test'] + + X = NORBSmall.load(which_set, 'dat') + + # put things in pylearn2's DenseDesignMatrix format + X = np.cast['float32'](X) + X = X.reshape(-1, 2 * 96 * 96) + + # this is uint8 + y = NORBSmall.load(which_set, 'cat') + if multi_target: + y_extra = NORBSmall.load(which_set, 'info') + y = np.hstack((y[:, np.newaxis], y_extra)) + + if center: + X -= 127.5 + + view_converter = dense_design_matrix.DefaultViewConverter((96, 96, 2)) + + super(NORBSmall, self).__init__(X=X, y=y, y_labels=np.max(y) + 1, + view_converter=view_converter) + + +class FoveatedNORB(dense_design_matrix.DenseDesignMatrix): + + """ + .. todo:: + + WRITEME + + Parameters + ---------- + which_set : WRITEME + One of ['train','test'] + center : WRITEME + Data is in range [0,256], center=True subtracts 127.5. + # TODO: check this comment, sure it means {0, ..., 255} + scale : WRITEME + start : WRITEME + stop : WRITEME + restrict_instances : WRITEME + preprocessor : WRITEME + """ + + @classmethod + def load(cls, which_set): + + base = '%s/norb_small/foveated/smallnorb-' + base = base % os.getenv('PYLEARN2_DATA_PATH') + if which_set == 'train': + base += '5x46789x9x18x6x2x96x96-training-dat' + else: + base += '5x01235x9x18x6x2x96x96-testing-dat' + + fname = base + '.npy' + fname = datasetCache.cache_file(fname) + data = np.load(fname, 'r') + return data + + def __init__(self, which_set, center=False, scale=False, + start=None, stop=None, restrict_instances=None, + preprocessor=None): + + self.args = locals() + + if which_set not in ['train', 'test']: + raise ValueError("Unrecognized which_set value: " + which_set) + + X = FoveatedNORB.load(which_set) + X = np.cast['float32'](X) + + # this is uint8 + y = NORBSmall.load(which_set, 'cat') + y_extra = NORBSmall.load(which_set, 'info') + + assert y_extra.shape[0] == y.shape[0] + instance = y_extra[:, 0] + assert instance.min() >= 0 + assert instance.max() <= 9 + self.instance = instance + + if center: + X -= 127.5 + if scale: + X /= 127.5 + else: + if scale: + X /= 255. + + view_converter = retina.RetinaCodingViewConverter((96, 96, 2), + (8, 4, 2, 2)) + + super(FoveatedNORB, self).__init__(X=X, y=y, + y_labels=np.max(y) + 1, + view_converter=view_converter, + preprocessor=preprocessor) + + if restrict_instances is not None: + assert start is None + assert stop is None + self.restrict_instances(restrict_instances) + + self.restrict(start, stop) + + self.y = self.y.astype('float32') + + def get_test_set(self): + """ + .. todo:: + + WRITEME + """ + test_args = {'which_set': 'test'} + + for key in self.args: + if key in ['which_set', 'restrict_instances', + 'self', 'start', 'stop']: + continue + test_args[key] = self.args[key] + + return FoveatedNORB(**test_args) + + def restrict_instances(self, instances): + """ + .. todo:: + + WRITEME + """ + mask = reduce(np.maximum, [self.instance == ins for ins in instances]) + mask = mask.astype('bool') + self.instance = self.instance[mask] + self.X = self.X[mask, :] + if self.y.ndim == 2: + self.y = self.y[mask, :] + else: + self.y = self.y[mask] + assert self.X.shape[0] == self.y.shape[0] + expected = sum([(self.instance == ins).sum() for ins in instances]) + assert self.X.shape[0] == expected + +# This file is part of 'NTLM Authorization Proxy Server' +# This file Copyright 2012 Tony C. Heupel +# NTLM Authorization Proxy Server is +# Copyright 2001 Dmitry A. Rozmanov +# +# NTLM APS is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# NTLM APS is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with the sofware; see the file COPYING. If not, write to the +# Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. +# +import getopt + +def parse_command_line(cmdline): + """ Parse command line into a tuple (except for the configuration file) + NOTE: Must only contain the command-line options + """ + opts, values = getopt.getopt(cmdline, + '', + ['config=', 'domain=', 'username=', 'password=', + 'port=']) + + options = {} + for opt in opts: + option, value = opt + if option == '--domain': + options['domain'] = value + elif option == '--username': + options['username'] = value + elif option == '--password': + options['password'] = value + elif option == '--port': + options['port'] = int(value) + elif option == '--config': + options['config_file'] = value + + return options + +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2015, Joseph Callen +# Copyright: (c) 2018, Ansible Project +# +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' +} + +DOCUMENTATION = r''' +--- +module: vmware_cluster_ha +short_description: Manage High Availability (HA) on VMware vSphere clusters +description: + - Manages HA configuration on VMware vSphere clusters. + - All values and VMware object names are case sensitive. +version_added: '2.9' +author: +- Joseph Callen (@jcpowermac) +- Abhijeet Kasurde (@Akasurde) +requirements: + - Tested on ESXi 5.5 and 6.5. + - PyVmomi installed. +options: + cluster_name: + description: + - The name of the cluster to be managed. + type: str + required: yes + datacenter: + description: + - The name of the datacenter. + type: str + required: yes + aliases: [ datacenter_name ] + enable_ha: + description: + - Whether to enable HA. + type: bool + default: 'no' + ha_host_monitoring: + description: + - Whether HA restarts virtual machines after a host fails. + - If set to C(enabled), HA restarts virtual machines after a host fails. + - If set to C(disabled), HA does not restart virtual machines after a host fails. + - If C(enable_ha) is set to C(no), then this value is ignored. + type: str + choices: [ 'enabled', 'disabled' ] + default: 'enabled' + ha_vm_monitoring: + description: + - State of virtual machine health monitoring service. + - If set to C(vmAndAppMonitoring), HA response to both virtual machine and application heartbeat failure. + - If set to C(vmMonitoringDisabled), virtual machine health monitoring is disabled. + - If set to C(vmMonitoringOnly), HA response to virtual machine heartbeat failure. + - If C(enable_ha) is set to C(no), then this value is ignored. + type: str + choices: ['vmAndAppMonitoring', 'vmMonitoringOnly', 'vmMonitoringDisabled'] + default: 'vmMonitoringDisabled' + host_isolation_response: + description: + - Indicates whether or VMs should be powered off if a host determines that it is isolated from the rest of the compute resource. + - If set to C(none), do not power off VMs in the event of a host network isolation. + - If set to C(powerOff), power off VMs in the event of a host network isolation. + - If set to C(shutdown), shut down VMs guest operating system in the event of a host network isolation. + type: str + choices: ['none', 'powerOff', 'shutdown'] + default: 'none' + slot_based_admission_control: + description: + - Configure slot based admission control policy. + - C(slot_based_admission_control), C(reservation_based_admission_control) and C(failover_host_admission_control) are mutually exclusive. + suboptions: + failover_level: + description: + - Number of host failures that should be tolerated. + type: int + required: true + type: dict + reservation_based_admission_control: + description: + - Configure reservation based admission control policy. + - C(slot_based_admission_control), C(reservation_based_admission_control) and C(failover_host_admission_control) are mutually exclusive. + suboptions: + failover_level: + description: + - Number of host failures that should be tolerated. + type: int + required: true + auto_compute_percentages: + description: + - By default, C(failover_level) is used to calculate C(cpu_failover_resources_percent) and C(memory_failover_resources_percent). + If a user wants to override the percentage values, he has to set this field to false. + type: bool + default: true + cpu_failover_resources_percent: + description: + - Percentage of CPU resources in the cluster to reserve for failover. + Ignored if C(auto_compute_percentages) is not set to false. + type: int + default: 50 + memory_failover_resources_percent: + description: + - Percentage of memory resources in the cluster to reserve for failover. + Ignored if C(auto_compute_percentages) is not set to false. + type: int + default: 50 + type: dict + failover_host_admission_control: + description: + - Configure dedicated failover hosts. + - C(slot_based_admission_control), C(reservation_based_admission_control) and C(failover_host_admission_control) are mutually exclusive. + suboptions: + failover_hosts: + description: + - List of dedicated failover hosts. + type: list + required: true + type: dict + ha_vm_failure_interval: + description: + - The number of seconds after which virtual machine is declared as failed + if no heartbeat has been received. + - This setting is only valid if C(ha_vm_monitoring) is set to, either C(vmAndAppMonitoring) or C(vmMonitoringOnly). + - Unit is seconds. + type: int + default: 30 + ha_vm_min_up_time: + description: + - The number of seconds for the virtual machine's heartbeats to stabilize after + the virtual machine has been powered on. + - Valid only when I(ha_vm_monitoring) is set to either C(vmAndAppMonitoring) or C(vmMonitoringOnly). + - Unit is seconds. + type: int + default: 120 + ha_vm_max_failures: + description: + - Maximum number of failures and automated resets allowed during the time + that C(ha_vm_max_failure_window) specifies. + - Valid only when I(ha_vm_monitoring) is set to either C(vmAndAppMonitoring) or C(vmMonitoringOnly). + type: int + default: 3 + ha_vm_max_failure_window: + description: + - The number of seconds for the window during which up to C(ha_vm_max_failures) resets + can occur before automated responses stop. + - Valid only when I(ha_vm_monitoring) is set to either C(vmAndAppMonitoring) or C(vmMonitoringOnly). + - Unit is seconds. + - Default specifies no failure window. + type: int + default: -1 + ha_restart_priority: + description: + - Priority HA gives to a virtual machine if sufficient capacity is not available + to power on all failed virtual machines. + - Valid only if I(ha_vm_monitoring) is set to either C(vmAndAppMonitoring) or C(vmMonitoringOnly). + - If set to C(disabled), then HA is disabled for this virtual machine. + - If set to C(high), then virtual machine with this priority have a higher chance of powering on after a failure, + when there is insufficient capacity on hosts to meet all virtual machine needs. + - If set to C(medium), then virtual machine with this priority have an intermediate chance of powering on after a failure, + when there is insufficient capacity on hosts to meet all virtual machine needs. + - If set to C(low), then virtual machine with this priority have a lower chance of powering on after a failure, + when there is insufficient capacity on hosts to meet all virtual machine needs. + type: str + default: 'medium' + choices: [ 'disabled', 'high', 'low', 'medium' ] +extends_documentation_fragment: vmware.documentation +''' + +EXAMPLES = r""" +- name: Enable HA without admission control + vmware_cluster_ha: + hostname: '{{ vcenter_hostname }}' + username: '{{ vcenter_username }}' + password: '{{ vcenter_password }}' + datacenter_name: datacenter + cluster_name: cluster + enable_ha: yes + delegate_to: localhost + +- name: Enable HA and VM monitoring without admission control + vmware_cluster_ha: + hostname: "{{ vcenter_hostname }}" + username: "{{ vcenter_username }}" + password: "{{ vcenter_password }}" + validate_certs: no + datacenter_name: DC0 + cluster_name: "{{ cluster_name }}" + enable_ha: True + ha_vm_monitoring: vmMonitoringOnly + enable_vsan: True + delegate_to: localhost + +- name: Enable HA with admission control reserving 50% of resources for HA + vmware_cluster_ha: + hostname: '{{ vcenter_hostname }}' + username: '{{ vcenter_username }}' + password: '{{ vcenter_password }}' + datacenter_name: datacenter + cluster_name: cluster + enable_ha: yes + reservation_based_admission_control: + auto_compute_percentages: False + failover_level: 1 + cpu_failover_resources_percent: 50 + memory_failover_resources_percent: 50 + delegate_to: localhost +""" + +RETURN = r"""# +""" + +try: + from pyVmomi import vim, vmodl +except ImportError: + pass + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.vmware import (PyVmomi, TaskError, find_datacenter_by_name, + vmware_argument_spec, wait_for_task) +from ansible.module_utils._text import to_native + + +class VMwareCluster(PyVmomi): + def __init__(self, module): + super(VMwareCluster, self).__init__(module) + self.cluster_name = module.params['cluster_name'] + self.datacenter_name = module.params['datacenter'] + self.enable_ha = module.params['enable_ha'] + self.datacenter = None + self.cluster = None + self.host_isolation_response = getattr(vim.cluster.DasVmSettings.IsolationResponse, self.params.get('host_isolation_response')) + + if self.enable_ha and ( + self.params.get('slot_based_admission_control') or + self.params.get('reservation_based_admission_control') or + self.params.get('failover_host_admission_control')): + self.ha_admission_control = True + else: + self.ha_admission_control = False + + self.datacenter = find_datacenter_by_name(self.content, self.datacenter_name) + if self.datacenter is None: + self.module.fail_json(msg="Datacenter %s does not exist." % self.datacenter_name) + + self.cluster = self.find_cluster_by_name(cluster_name=self.cluster_name) + if self.cluster is None: + self.module.fail_json(msg="Cluster %s does not exist." % self.cluster_name) + + def get_failover_hosts(self): + """ + Get failover hosts for failover_host_admission_control policy + Returns: List of ESXi hosts sorted by name + + """ + policy = self.params.get('failover_host_admission_control') + hosts = [] + all_hosts = dict((h.name, h) for h in self.get_all_hosts_by_cluster(self.cluster_name)) + for host in policy.get('failover_hosts'): + if host in all_hosts: + hosts.append(all_hosts.get(host)) + else: + self.module.fail_json(msg="Host %s is not a member of cluster %s." % (host, self.cluster_name)) + hosts.sort(key=lambda h: h.name) + return hosts + + def check_ha_config_diff(self): + """ + Check HA configuration diff + Returns: True if there is diff, else False + + """ + das_config = self.cluster.configurationEx.dasConfig + if das_config.enabled != self.enable_ha: + return True + + if self.enable_ha and ( + das_config.vmMonitoring != self.params.get('ha_vm_monitoring') or + das_config.hostMonitoring != self.params.get('ha_host_monitoring') or + das_config.admissionControlEnabled != self.ha_admission_control or + das_config.defaultVmSettings.restartPriority != self.params.get('ha_restart_priority') or + das_config.defaultVmSettings.isolationResponse != self.host_isolation_response or + das_config.defaultVmSettings.vmToolsMonitoringSettings.vmMonitoring != self.params.get('ha_vm_monitoring') or + das_config.defaultVmSettings.vmToolsMonitoringSettings.failureInterval != self.params.get('ha_vm_failure_interval') or + das_config.defaultVmSettings.vmToolsMonitoringSettings.minUpTime != self.params.get('ha_vm_min_up_time') or + das_config.defaultVmSettings.vmToolsMonitoringSettings.maxFailures != self.params.get('ha_vm_max_failures') or + das_config.defaultVmSettings.vmToolsMonitoringSettings.maxFailureWindow != self.params.get('ha_vm_max_failure_window')): + return True + + if self.ha_admission_control: + if self.params.get('slot_based_admission_control'): + policy = self.params.get('slot_based_admission_control') + if not isinstance(das_config.admissionControlPolicy, vim.cluster.FailoverLevelAdmissionControlPolicy) or \ + das_config.admissionControlPolicy.failoverLevel != policy.get('failover_level'): + return True + elif self.params.get('reservation_based_admission_control'): + policy = self.params.get('reservation_based_admission_control') + auto_compute_percentages = policy.get('auto_compute_percentages') + if not isinstance(das_config.admissionControlPolicy, vim.cluster.FailoverResourcesAdmissionControlPolicy) or \ + das_config.admissionControlPolicy.autoComputePercentages != auto_compute_percentages or \ + das_config.admissionControlPolicy.failoverLevel != policy.get('failover_level'): + return True + if not auto_compute_percentages: + if das_config.admissionControlPolicy.cpuFailoverResourcesPercent != policy.get('cpu_failover_resources_percent') or \ + das_config.admissionControlPolicy.memoryFailoverResourcesPercent != policy.get('memory_failover_resources_percent'): + return True + elif self.params.get('failover_host_admission_control'): + policy = self.params.get('failover_host_admission_control') + if not isinstance(das_config.admissionControlPolicy, vim.cluster.FailoverHostAdmissionControlPolicy): + return True + das_config.admissionControlPolicy.failoverHosts.sort(key=lambda h: h.name) + if das_config.admissionControlPolicy.failoverHosts != self.get_failover_hosts(): + return True + + return False + + def configure_ha(self): + """ + Manage HA Configuration + + """ + changed, result = False, None + + if self.check_ha_config_diff(): + if not self.module.check_mode: + cluster_config_spec = vim.cluster.ConfigSpecEx() + cluster_config_spec.dasConfig = vim.cluster.DasConfigInfo() + cluster_config_spec.dasConfig.enabled = self.enable_ha + + if self.enable_ha: + vm_tool_spec = vim.cluster.VmToolsMonitoringSettings() + vm_tool_spec.enabled = True + vm_tool_spec.vmMonitoring = self.params.get('ha_vm_monitoring') + vm_tool_spec.failureInterval = self.params.get('ha_vm_failure_interval') + vm_tool_spec.minUpTime = self.params.get('ha_vm_min_up_time') + vm_tool_spec.maxFailures = self.params.get('ha_vm_max_failures') + vm_tool_spec.maxFailureWindow = self.params.get('ha_vm_max_failure_window') + + das_vm_config = vim.cluster.DasVmSettings() + das_vm_config.restartPriority = self.params.get('ha_restart_priority') + das_vm_config.isolationResponse = self.host_isolation_response + das_vm_config.vmToolsMonitoringSettings = vm_tool_spec + cluster_config_spec.dasConfig.defaultVmSettings = das_vm_config + + cluster_config_spec.dasConfig.admissionControlEnabled = self.ha_admission_control + + if self.ha_admission_control: + if self.params.get('slot_based_admission_control'): + cluster_config_spec.dasConfig.admissionControlPolicy = vim.cluster.FailoverLevelAdmissionControlPolicy() + policy = self.params.get('slot_based_admission_control') + cluster_config_spec.dasConfig.admissionControlPolicy.failoverLevel = policy.get('failover_level') + elif self.params.get('reservation_based_admission_control'): + cluster_config_spec.dasConfig.admissionControlPolicy = vim.cluster.FailoverResourcesAdmissionControlPolicy() + policy = self.params.get('reservation_based_admission_control') + auto_compute_percentages = policy.get('auto_compute_percentages') + cluster_config_spec.dasConfig.admissionControlPolicy.autoComputePercentages = auto_compute_percentages + cluster_config_spec.dasConfig.admissionControlPolicy.failoverLevel = policy.get('failover_level') + if not auto_compute_percentages: + cluster_config_spec.dasConfig.admissionControlPolicy.cpuFailoverResourcesPercent = \ + policy.get('cpu_failover_resources_percent') + cluster_config_spec.dasConfig.admissionControlPolicy.memoryFailoverResourcesPercent = \ + policy.get('memory_failover_resources_percent') + elif self.params.get('failover_host_admission_control'): + cluster_config_spec.dasConfig.admissionControlPolicy = vim.cluster.FailoverHostAdmissionControlPolicy() + policy = self.params.get('failover_host_admission_control') + cluster_config_spec.dasConfig.admissionControlPolicy.failoverHosts = self.get_failover_hosts() + + cluster_config_spec.dasConfig.hostMonitoring = self.params.get('ha_host_monitoring') + cluster_config_spec.dasConfig.vmMonitoring = self.params.get('ha_vm_monitoring') + + try: + task = self.cluster.ReconfigureComputeResource_Task(cluster_config_spec, True) + changed, result = wait_for_task(task) + except vmodl.RuntimeFault as runtime_fault: + self.module.fail_json(msg=to_native(runtime_fault.msg)) + except vmodl.MethodFault as method_fault: + self.module.fail_json(msg=to_native(method_fault.msg)) + except TaskError as task_e: + self.module.fail_json(msg=to_native(task_e)) + except Exception as generic_exc: + self.module.fail_json(msg="Failed to update cluster" + " due to generic exception %s" % to_native(generic_exc)) + else: + changed = True + + self.module.exit_json(changed=changed, result=result) + + +def main(): + argument_spec = vmware_argument_spec() + argument_spec.update(dict( + cluster_name=dict(type='str', required=True), + datacenter=dict(type='str', required=True, aliases=['datacenter_name']), + # HA + enable_ha=dict(type='bool', default=False), + ha_host_monitoring=dict(type='str', + default='enabled', + choices=['enabled', 'disabled']), + host_isolation_response=dict(type='str', + default='none', + choices=['none', 'powerOff', 'shutdown']), + # HA VM Monitoring related parameters + ha_vm_monitoring=dict(type='str', + choices=['vmAndAppMonitoring', 'vmMonitoringOnly', 'vmMonitoringDisabled'], + default='vmMonitoringDisabled'), + ha_vm_failure_interval=dict(type='int', default=30), + ha_vm_min_up_time=dict(type='int', default=120), + ha_vm_max_failures=dict(type='int', default=3), + ha_vm_max_failure_window=dict(type='int', default=-1), + + ha_restart_priority=dict(type='str', + choices=['high', 'low', 'medium', 'disabled'], + default='medium'), + # HA Admission Control related parameters + slot_based_admission_control=dict(type='dict', options=dict( + failover_level=dict(type='int', required=True), + )), + reservation_based_admission_control=dict(type='dict', options=dict( + auto_compute_percentages=dict(type='bool', default=True), + failover_level=dict(type='int', required=True), + cpu_failover_resources_percent=dict(type='int', default=50), + memory_failover_resources_percent=dict(type='int', default=50), + )), + failover_host_admission_control=dict(type='dict', options=dict( + failover_hosts=dict(type='list', elements='str', required=True), + )), + )) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + mutually_exclusive=[ + ['slot_based_admission_control', 'reservation_based_admission_control', 'failover_host_admission_control'] + ] + ) + + vmware_cluster_ha = VMwareCluster(module) + vmware_cluster_ha.configure_ha() + + +if __name__ == '__main__': + main() + +from django.contrib import admin + +from edc_base.modeladmin.admin import LimitedAdminInlineMixin +from getresults.admin import admin_site + +from .models import ExportHistory, ImportHistory, CsvFormat, CsvField, CsvDictionary +from getresults_csv.forms import CsvDictionaryForm + + +class CsvFieldAdmin(admin.ModelAdmin): + list_display = ('csv_format', 'name') +admin_site.register(CsvField, CsvFieldAdmin) + + +class CsvDictionaryAdmin(admin.ModelAdmin): + form = CsvDictionaryForm + list_display = ('csv_format', 'csv_field', 'processing_field', 'utestid') + search_fields = ('csv_field', 'processing_field', 'utestid__name') +admin_site.register(CsvDictionary, CsvDictionaryAdmin) + + +class CsvDictionaryInline(LimitedAdminInlineMixin, admin.TabularInline): + model = CsvDictionary + form = CsvDictionaryForm + extra = 0 + + def get_filters(self, obj): + if obj: + return (('csv_field', dict(csv_format=obj.id)),) + else: + return () + + +class CsvFormatAdmin(admin.ModelAdmin): + list_display = ('name', 'sender_model', 'delimiter', 'encoding') + inlines = [CsvDictionaryInline] +admin_site.register(CsvFormat, CsvFormatAdmin) + + +class ImportHistoryAdmin(admin.ModelAdmin): + list_display = ('source', 'import_datetime', 'record_count') + search_fields = ('source', 'import_datetime', 'record_count') +admin_site.register(ImportHistory, ImportHistoryAdmin) + + +class ExportHistoryAdmin(admin.ModelAdmin): + list_display = ('destination', 'export_datetime', 'reference') + search_fields = ('destination', 'export_datetime', 'reference') +admin_site.register(ExportHistory, ExportHistoryAdmin) + +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import socket +import platform + +from ansible.module_utils.facts.utils import get_file_content + +from ansible.module_utils.facts.collector import BaseFactCollector + +# i86pc is a Solaris and derivatives-ism +SOLARIS_I86_RE_PATTERN = r'i([3456]86|86pc)' +solaris_i86_re = re.compile(SOLARIS_I86_RE_PATTERN) + + +class PlatformFactCollector(BaseFactCollector): + name = 'platform' + _fact_ids = set(['system', + 'kernel', + 'machine', + 'python_version', + 'machine_id']) + + def collect(self, module=None, collected_facts=None): + platform_facts = {} + # platform.system() can be Linux, Darwin, Java, or Windows + platform_facts['system'] = platform.system() + platform_facts['kernel'] = platform.release() + platform_facts['machine'] = platform.machine() + + platform_facts['python_version'] = platform.python_version() + + platform_facts['fqdn'] = socket.getfqdn() + platform_facts['hostname'] = platform.node().split('.')[0] + platform_facts['nodename'] = platform.node() + + platform_facts['domain'] = '.'.join(platform_facts['fqdn'].split('.')[1:]) + + arch_bits = platform.architecture()[0] + + platform_facts['userspace_bits'] = arch_bits.replace('bit', '') + if platform_facts['machine'] == 'x86_64': + platform_facts['architecture'] = platform_facts['machine'] + if platform_facts['userspace_bits'] == '64': + platform_facts['userspace_architecture'] = 'x86_64' + elif platform_facts['userspace_bits'] == '32': + platform_facts['userspace_architecture'] = 'i386' + elif solaris_i86_re.search(platform_facts['machine']): + platform_facts['architecture'] = 'i386' + if platform_facts['userspace_bits'] == '64': + platform_facts['userspace_architecture'] = 'x86_64' + elif platform_facts['userspace_bits'] == '32': + platform_facts['userspace_architecture'] = 'i386' + else: + platform_facts['architecture'] = platform_facts['machine'] + + if platform_facts['system'] == 'AIX': + # Attempt to use getconf to figure out architecture + # fall back to bootinfo if needed + getconf_bin = module.get_bin_path('getconf') + if getconf_bin: + rc, out, err = module.run_command([getconf_bin, 'MACHINE_ARCHITECTURE']) + data = out.splitlines() + platform_facts['architecture'] = data[0] + else: + bootinfo_bin = module.get_bin_path('bootinfo') + rc, out, err = module.run_command([bootinfo_bin, '-p']) + data = out.splitlines() + platform_facts['architecture'] = data[0] + elif platform_facts['system'] == 'OpenBSD': + platform_facts['architecture'] = platform.uname()[5] + + machine_id = get_file_content("/var/lib/dbus/machine-id") or get_file_content("/etc/machine-id") + if machine_id: + machine_id = machine_id.splitlines()[0] + platform_facts["machine_id"] = machine_id + + return platform_facts + +"""Tests for :mod:`nailgun.entities`.""" +import inspect +import json +import os +from datetime import date +from datetime import datetime +from http.client import ACCEPTED +from http.client import NO_CONTENT +from unittest import mock +from unittest import TestCase + +from fauxfactory import gen_alpha +from fauxfactory import gen_integer +from fauxfactory import gen_string + +from nailgun import client +from nailgun import config +from nailgun import entities +from nailgun.entity_mixins import EntityCreateMixin +from nailgun.entity_mixins import EntityReadMixin +from nailgun.entity_mixins import EntitySearchMixin +from nailgun.entity_mixins import EntityUpdateMixin +from nailgun.entity_mixins import NoSuchPathError + +_BUILTIN_OPEN = 'builtins.open' +# For inspection comparison, a tuple matching the expected func arg spec +# https://docs.python.org/3/library/inspect.html#inspect.getfullargspec +EXPECTED_ARGSPEC = (['self', 'synchronous', 'timeout'], None, 'kwargs', (True, None), [], None, {}) +EXPECTED_ARGSPEC_TIMEOUT = ( + ['self', 'synchronous', 'timeout'], + None, + 'kwargs', + (True, 1500), + [], + None, + {}, +) +# The size of this file is a direct reflection of the size of module +# `nailgun.entities` and the Satellite API. + + +def make_entity(cls, **kwargs): + """Helper function to create entity with dummy ServerConfig""" + cfg = config.ServerConfig(url='https://foo.bar', verify=False, auth=('foo', 'bar')) + return cls(cfg, **kwargs) + + +def _get_required_field_names(entity): + """Get the names of all required fields from an entity. + + :param nailgun.entity_mixins.Entity entity: This entity is inspected. + :returns: A set in the form ``{'field_name_1', 'field_name_2', …}``. + + """ + return { + field_name for field_name, field in entity.get_fields().items() if field.required is True + } + + +# This file is divided in to three sets of test cases (`TestCase` subclasses): +# +# 1. Tests for inherited methods. +# 2. Tests for entity-specific methods. +# 3. Other tests. +# +# 1. Tests for inherited methods. ---------------------------------------- {{{1 + + +class InitTestCase(TestCase): + """Tests for all of the ``__init__`` methods. + + The tests in this class are a sanity check. They simply check to see if you + can instantiate each entity. + + """ + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_init_succeeds(self): + """Instantiate every entity. + + Assert that the returned object is an instance of the class that + produced it. + + """ + entities_ = [ + (entity, {}) + for entity in ( + # entities.ContentViewFilterRule, # see below + # entities.OperatingSystemParameter, # see below + # entities.SyncPlan, # see below + entities.AbstractComputeResource, + entities.AbstractContentViewFilter, + entities.ActivationKey, + entities.Architecture, + entities.ArfReport, + entities.Audit, + entities.AuthSourceLDAP, + entities.AzureRMComputeResource, + entities.Bookmark, + entities.Capsule, + entities.CommonParameter, + entities.ComputeAttribute, + entities.ComputeProfile, + entities.ConfigGroup, + entities.CompliancePolicies, + entities.ProvisioningTemplate, + entities.ReportTemplate, + # entities.ContentUpload, # see below + entities.ContentCredential, + entities.ContentView, + entities.ContentViewVersion, + entities.DiscoveredHost, + entities.DiscoveryRule, + entities.DockerContentViewFilter, + entities.Domain, + entities.Environment, + entities.Errata, + entities.ErratumContentViewFilter, + entities.File, + entities.Filter, + entities.ForemanStatus, + entities.ForemanTask, + entities.GPGKey, + entities.GCEComputeResource, + entities.Host, + entities.HostCollection, + entities.HostCollectionErrata, + entities.HostCollectionPackage, + entities.HostGroup, + entities.HTTPProxy, + entities.KatelloStatus, + entities.LibvirtComputeResource, + entities.LifecycleEnvironment, + entities.JobInvocation, + entities.JobTemplate, + entities.Location, + entities.Media, + entities.Model, + entities.ModuleStreamContentViewFilter, + # entities.OSDefaultTemplate, # see below + entities.OperatingSystem, + entities.Organization, + entities.OVirtComputeResource, + entities.PackageGroupContentViewFilter, + entities.PartitionTable, + entities.Permission, + entities.Ping, + entities.Package, + entities.PackageGroup, + entities.Product, + entities.ProductBulkAction, + entities.PuppetClass, + entities.RPMContentViewFilter, + entities.Realm, + entities.RecurringLogic, + entities.Report, + entities.Repository, + entities.RepositorySet, + entities.Role, + entities.RoleLDAPGroups, + entities.ScapContents, + entities.Setting, + entities.SmartClassParameters, + entities.SmartProxy, + entities.SmartVariable, + # entities.Snapshot, # see below + entities.Srpms, + entities.Status, + entities.Subnet, + entities.Subscription, + entities.TailoringFile, + entities.TemplateCombination, + entities.Template, + entities.TemplateKind, + entities.User, + entities.UserGroup, + entities.VirtWhoConfig, + entities.VMWareComputeResource, + ) + ] + entities_.extend( + [ + ( + entities.LibvirtComputeResource, + {'display_type': 'VNC', 'set_console_password': False}, + ), + (entities.ContentUpload, {'repository': 1}), + (entities.ContentViewComponent, {'composite_content_view': 1}), + (entities.ContentViewFilterRule, {'content_view_filter': 1}), + (entities.ExternalUserGroup, {'usergroup': 1}), + (entities.HostPackage, {'host': 1}), + (entities.HostSubscription, {'host': 1}), + (entities.Interface, {'host': 1}), + (entities.Image, {'compute_resource': 1}), + (entities.OperatingSystemParameter, {'operatingsystem': 1}), + (entities.OSDefaultTemplate, {'operatingsystem': 1}), + (entities.OverrideValue, {'smart_class_parameter': 1}), + (entities.OverrideValue, {'smart_variable': 1}), + (entities.Parameter, {'domain': 1}), + (entities.Parameter, {'host': 1}), + (entities.Parameter, {'hostgroup': 1}), + (entities.Parameter, {'location': 1}), + (entities.Parameter, {'operatingsystem': 1}), + (entities.Parameter, {'organization': 1}), + (entities.Parameter, {'subnet': 1}), + (entities.RepositorySet, {'product': 1}), + (entities.Snapshot, {'host': 1}), + (entities.SSHKey, {'user': 1}), + (entities.SyncPlan, {'organization': 1}), + (entities.TemplateInput, {'template': 1}), + ] + ) + for entity, params in entities_: + with self.subTest(entity): + self.assertIsInstance(entity(self.cfg, **params), entity) + + def test_required_params(self): + """Instantiate entities that require extra parameters. + + Assert that ``TypeError`` is raised if the required extra parameters + are not provided. + + """ + for entity in ( + entities.ContentViewComponent, + entities.ContentViewFilterRule, + entities.ExternalUserGroup, + entities.HostPackage, + entities.HostSubscription, + entities.Image, + entities.OverrideValue, + entities.OperatingSystemParameter, + entities.OSDefaultTemplate, + entities.Parameter, + entities.SyncPlan, + entities.TemplateInput, + ): + with self.subTest(): + with self.assertRaises(TypeError): + entity(self.cfg) + + +class PathTestCase(TestCase): + """Tests for extensions of :meth:`nailgun.entity_mixins.Entity.path`.""" + + longMessage = True + + def setUp(self): + """Set ``self.cfg`` and ``self.id_``.""" + self.cfg = config.ServerConfig('http://example.com') + self.id_ = gen_integer(min_value=1) + + def test_nowhich(self): + """Execute ``entity().path()`` and ``entity(id=…).path()``.""" + for entity, path in ( + (entities.ActivationKey, '/activation_keys'), + (entities.Capsule, '/capsules'), + (entities.ProvisioningTemplate, '/provisioning_templates'), + (entities.ReportTemplate, '/report_templates'), + (entities.Role, '/roles'), + (entities.ContentView, '/content_views'), + (entities.ContentViewVersion, '/content_view_versions'), + (entities.CompliancePolicies, '/compliance/policies'), + (entities.DiscoveredHost, '/discovered_hosts'), + (entities.DiscoveryRule, '/discovery_rules'), + (entities.Environment, '/environments'), + (entities.Errata, '/errata'), + (entities.Organization, '/organizations'), + (entities.Host, '/hosts'), + (entities.HostGroup, '/hostgroups'), + (entities.Product, '/products'), + (entities.ProductBulkAction, '/products/bulk'), + (entities.PuppetClass, '/puppetclasses'), + (entities.RHCIDeployment, '/deployments'), + (entities.Repository, '/repositories'), + (entities.Setting, '/settings'), + (entities.SmartProxy, '/smart_proxies'), + (entities.Subscription, '/subscriptions'), + (entities.ScapContents, '/scap_contents'), + (entities.VirtWhoConfig, '/foreman_virt_who_configure/api/v2/configs'), + ): + with self.subTest((entity, path)): + self.assertIn(path, entity(self.cfg).path()) + self.assertIn(f'{path}/{self.id_}', entity(self.cfg, id=self.id_).path()) + + def test_id_and_which(self): + """Execute ``entity(id=…).path(which=…)``.""" + for entity, which in ( + (entities.ActivationKey, 'add_subscriptions'), + (entities.ActivationKey, 'content_override'), + (entities.ActivationKey, 'copy'), + (entities.ActivationKey, 'host_collections'), + (entities.ActivationKey, 'releases'), + (entities.ActivationKey, 'remove_subscriptions'), + (entities.ActivationKey, 'subscriptions'), + (entities.AbstractComputeResource, 'available_images'), + (entities.AbstractComputeResource, 'available_zones'), + (entities.AbstractComputeResource, 'available_flavors'), + (entities.AbstractComputeResource, 'available_networks'), + (entities.AbstractComputeResource, 'associate'), + (entities.AbstractComputeResource, 'images'), + (entities.ArfReport, 'download_html'), + (entities.ProvisioningTemplate, 'clone'), + (entities.ReportTemplate, 'clone'), + (entities.Role, 'clone'), + (entities.ContentView, 'content_view_versions'), + (entities.ContentView, 'copy'), + (entities.ContentView, 'publish'), + (entities.ContentViewVersion, 'promote'), + (entities.DiscoveredHost, 'auto_provision'), + (entities.DiscoveredHost, 'refresh_facts'), + (entities.DiscoveredHost, 'reboot'), + (entities.Environment, 'smart_class_parameters'), + (entities.Host, 'enc'), + (entities.Host, 'errata'), + (entities.Host, 'errata/apply'), + (entities.Host, 'errata/applicability'), + (entities.Host, 'module_streams'), + (entities.Host, 'packages'), + (entities.Host, 'puppetclass_ids'), + (entities.Host, 'smart_class_parameters'), + (entities.Host, 'smart_variables'), + (entities.HostGroup, 'clone'), + (entities.HostGroup, 'puppetclass_ids'), + (entities.HostGroup, 'rebuild_config'), + (entities.HostGroup, 'smart_class_parameters'), + (entities.HostGroup, 'smart_variables'), + (entities.Organization, 'download_debug_certificate'), + (entities.Organization, 'subscriptions'), + (entities.Organization, 'subscriptions/delete_manifest'), + (entities.Organization, 'subscriptions/manifest_history'), + (entities.Organization, 'subscriptions/refresh_manifest'), + (entities.Organization, 'subscriptions/upload'), + (entities.Organization, 'sync_plans'), + (entities.Product, 'sync'), + (entities.PuppetClass, 'smart_class_parameters'), + (entities.Repository, 'errata'), + (entities.Repository, 'packages'), + (entities.Repository, 'remove_content'), + (entities.Repository, 'sync'), + (entities.Repository, 'upload_content'), + (entities.RHCIDeployment, 'deploy'), + (entities.ScapContents, 'xml'), + (entities.VirtWhoConfig, 'deploy_script'), + ): + with self.subTest((entity, which)): + path = entity(self.cfg, id=self.id_).path(which=which) + self.assertIn(f'{self.id_}/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_noid_and_which(self): + """Execute ``entity().path(which=…)``.""" + for entity, which in ( + (entities.ProductBulkAction, 'destroy'), + (entities.ProductBulkAction, 'sync'), + (entities.ProductBulkAction, 'http_proxy'), + (entities.ProductBulkAction, 'sync_plan'), + (entities.ProvisioningTemplate, 'build_pxe_default'), + (entities.ProvisioningTemplate, 'revision'), + (entities.ContentViewVersion, 'incremental_update'), + (entities.DiscoveredHost, 'auto_provision_all'), + (entities.DiscoveredHost, 'facts'), + (entities.DiscoveredHost, 'reboot_all'), + (entities.Errata, 'compare'), + (entities.ForemanTask, 'bulk_resume'), + (entities.ForemanTask, 'bulk_search'), + (entities.ForemanTask, 'summary'), + (entities.Host, 'bulk/install_content'), + (entities.Template, 'imports'), + (entities.Template, 'exports'), + ): + with self.subTest((entity, which)): + path = entity(self.cfg).path(which) + self.assertIn(which, path) + self.assertRegex(path, fr'{which}$') + + def test_no_such_path_error(self): + """Trigger :class:`nailgun.entity_mixins.NoSuchPathError` exceptions. + + Do this by calling ``entity().path(which=…)``. + + """ + for entity, which in ( + (entities.ActivationKey, 'releases'), + (entities.ContentView, 'content_view_versions'), + (entities.ContentView, 'publish'), + (entities.ContentViewVersion, 'promote'), + (entities.ForemanTask, 'self'), + (entities.HostGroup, 'rebuild_config'), + (entities.Organization, 'products'), + (entities.Organization, 'self'), + (entities.Organization, 'subscriptions'), + (entities.Organization, 'download_debug_certificate'), + (entities.Organization, 'subscriptions/delete_manifest'), + (entities.Organization, 'subscriptions/refresh_manifest'), + (entities.Organization, 'subscriptions/upload'), + (entities.Organization, 'sync_plans'), + (entities.Product, 'repository_sets'), + (entities.Repository, 'sync'), + (entities.Repository, 'upload_content'), + (entities.ScapContents, 'xml'), + (entities.RHCIDeployment, 'deploy'), + (entities.SmartProxy, 'refresh'), + (entities.VirtWhoConfig, 'deploy_script'), + (entities.VirtWhoConfig, 'configs'), + ): + with self.subTest((entity, which)): + with self.assertRaises(NoSuchPathError): + entity(self.cfg).path(which=which) + + def test_arfreport(self): + """Test :meth:`nailgun.entities.ArfReport.path`. + Assert that the following return appropriate paths: + * ``ArfReport(id=…).path()`` + * ``ArfReport(id=…).path('download_html')`` + """ + self.assertIn('compliance/arf_reports/1', entities.ArfReport(self.cfg, id=1).path()) + for which in ['download_html']: + path = entities.ArfReport( + self.cfg, + id=1, + ).path(which) + self.assertIn(f'compliance/arf_reports/1/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_os_default_template(self): + """Test ``nailgun.entities.OSDefaultTemplate.path`` + + Assert that the following return appropriate paths: + + * ``OSDefaultTemplate(id=…).path()`` + """ + self.assertIn( + 'operatingsystems/1/os_default_templates/2', + entities.OSDefaultTemplate(self.cfg, id=2, operatingsystem=1).path(), + ) + + def test_externalusergroup(self): + """Test :meth:`nailgun.entities.ExternalUserGroup.path`. + + Assert that the following return appropriate paths: + + * ``ExternalUserGroup(id=…,usergroup=…).path()`` + * ``ExternalUserGroup(id=…,usergroup=…).path('refresh')`` + + """ + self.assertIn( + 'usergroups/1/external_usergroups/2', + entities.ExternalUserGroup(self.cfg, id=2, usergroup=1).path(), + ) + for which in ['refresh']: + path = entities.ExternalUserGroup( + self.cfg, + id=2, + usergroup=1, + ).path(which) + self.assertIn(f'usergroups/1/external_usergroups/2/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_repository_set(self): + """Test :meth:`nailgun.entities.RepositorySet.path`. + + Assert that the following return appropriate paths: + + * ``RepositorySet(id=…).path()`` + * ``RepositorySet(id=…).path('available_repositories')`` + * ``RepositorySet(id=…).path('disable')`` + * ``RepositorySet(id=…).path('enable')`` + + """ + self.assertIn( + '/repository_sets/2', entities.RepositorySet(self.cfg, id=2, product=1).path() + ) + for which in ('available_repositories', 'disable', 'enable'): + path = entities.RepositorySet( + self.cfg, + id=2, + product=1, + ).path(which) + self.assertIn(f'/repository_sets/2/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_snapshot(self): + """Test :meth:`nailgun.entities.Snapshot.path`. + + Assert that the following return appropriate paths: + + * ``Snapshot(id=…).path()`` + * ``Snapshot(id=…).path('revert')`` + + """ + self.assertIn( + 'hosts/1/snapshots/snapshot-2', + entities.Snapshot(self.cfg, id='snapshot-2', host=1).path(), + ) + which = 'revert' + path = entities.Snapshot( + self.cfg, + id='snapshot-2', + host=1, + ).path(which) + self.assertIn(f'hosts/1/snapshots/snapshot-2/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_sync_plan(self): + """Test :meth:`nailgun.entities.SyncPlan.path`. + + Assert that the following return appropriate paths: + + * ``SyncPlan(id=…).path()`` + * ``SyncPlan(id=…).path('add_products')`` + * ``SyncPlan(id=…).path('remove_products')`` + + """ + self.assertIn( + 'organizations/1/sync_plans/2', + entities.SyncPlan(self.cfg, id=2, organization=1).path(), + ) + for which in ('add_products', 'remove_products'): + path = entities.SyncPlan( + self.cfg, + id=2, + organization=1, + ).path(which) + self.assertIn(f'organizations/1/sync_plans/2/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_subscription(self): + """Test :meth:`nailgun.entities.Subscription.path`. + + Assert that the following return appropriate paths: + + * ``Subscription(organization=…).path('delete_manifest')`` + * ``Subscription(organization=…).path('manifest_history')`` + * ``Subscription(organization=…).path('refresh_manifest')`` + * ``Subscription(organization=…).path('upload')`` + + """ + sub = entities.Subscription(self.cfg, organization=gen_integer(1, 100)) + for which in ('delete_manifest', 'manifest_history', 'refresh_manifest', 'upload'): + with self.subTest(which): + path = sub.path(which) + self.assertIn(f'organizations/{sub.organization.id}/subscriptions/{which}', path) + self.assertRegex(path, fr'{which}$') + + def test_capsule(self): + """Test :meth:`nailgun.entities.Capsule.path`. + + Assert that the following return appropriate paths: + + * ``Capsule().path('content_lifecycle_environments')`` + * ``Capsule().path('content_sync')`` + + """ + capsule = entities.Capsule(self.cfg, id=gen_integer(1, 100)) + for which in ('content_lifecycle_environments', 'content_sync'): + with self.subTest(which): + path = capsule.path(which) + which_parts = which.split("_", 1) + self.assertIn(f'capsules/{capsule.id}/content/{which_parts[1]}', path) + self.assertRegex(path, fr'{which_parts[0]}/{which_parts[1]}$') + + def test_hostsubscription(self): + """Test :meth:`nailgun.entities.HostSubscription.path`. + + Assert that the following return appropriate paths: + + * ``HostSubscription(host=…).path('add_subscriptions')`` + * ``HostSubscription(host=…).path('remove_subscriptions')`` + + """ + sub = entities.HostSubscription(self.cfg, host=gen_integer(1, 100)) + for which in ('add_subscriptions', 'remove_subscriptions'): + with self.subTest(which): + path = sub.path(which) + self.assertIn(f'hosts/{sub.host.id}/subscriptions/{which}', path) + self.assertRegex(path, fr'{which}$') + + +class CreateTestCase(TestCase): + """Tests for :meth:`nailgun.entity_mixins.EntityCreateMixin.create`.""" + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_generic(self): + """Call ``create`` on a variety of entities.""" + entities_ = ( + entities.ConfigGroup(self.cfg), + entities.CompliancePolicies(self.cfg), + entities.DiscoveryRule(self.cfg), + entities.DiscoveredHost(self.cfg), + entities.Domain(self.cfg), + entities.Host(self.cfg), + entities.HostCollection(self.cfg), + entities.HostGroup(self.cfg), + entities.HTTPProxy(self.cfg), + entities.Location(self.cfg), + entities.Media(self.cfg), + entities.Organization(self.cfg), + entities.Realm(self.cfg), + entities.ScapContents(self.cfg), + entities.SmartProxy(self.cfg), + entities.TailoringFile(self.cfg), + entities.UserGroup(self.cfg), + entities.VirtWhoConfig(self.cfg), + ) + for entity in entities_: + with self.subTest(entity): + with mock.patch.object(entity, 'create_json') as create_json: + with mock.patch.object(type(entity), 'read') as read: + entity.create() + self.assertEqual(create_json.call_count, 1) + self.assertEqual(create_json.call_args[0], (None,)) + self.assertEqual(read.call_count, 1) + self.assertEqual(read.call_args[0], ()) + + +class CreatePayloadTestCase(TestCase): + """Tests for extensions of ``create_payload``. + + Several classes extend the ``create_payload`` method and make it do things + like rename attributes or wrap the submitted dict of data in a second hash. + It is possible to mess this up in a variety of ways. For example, an + extended method could try to rename an attribute that does not exist. + This class attempts to find such issues by creating an entity, calling + :meth:`nailgun.entity_mixins.EntityCreateMixin.create_payload` and + asserting that a ``dict`` is returned. + + """ + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_no_attributes(self): + """Instantiate an entity and call ``create_payload`` on it.""" + entities_ = [ + (entity, {}) + for entity in ( + entities.AbstractComputeResource, + entities.Architecture, + entities.ConfigGroup, + entities.ProvisioningTemplate, + entities.ReportTemplate, + entities.DiscoveredHost, + entities.DiscoveryRule, + entities.Domain, + entities.Environment, + entities.Filter, + entities.Host, + entities.HostCollection, + entities.HostGroup, + entities.HTTPProxy, + entities.JobTemplate, + entities.LifecycleEnvironment, + entities.Location, + entities.Media, + entities.OperatingSystem, + entities.Role, + entities.ScapContents, + entities.SmartVariable, + entities.Subnet, + entities.TailoringFile, + entities.User, + entities.UserGroup, + entities.VirtWhoConfig, + ) + ] + entities_.extend( + [ + (entities.ExternalUserGroup, {'usergroup': 1}), + (entities.Image, {'compute_resource': 1}), + (entities.SyncPlan, {'organization': 1}), + (entities.ContentViewFilterRule, {'content_view_filter': 1}), + ] + ) + for entity, params in entities_: + with self.subTest(): + self.assertIsInstance(entity(self.cfg, **params).create_payload(), dict) + + def test_external_usergroup_payload(self): + """Call ``create_payload`` on a :class:`nailgun.entities.ExternalUserGroup`.""" + payload = entities.ExternalUserGroup( + self.cfg, + usergroup=1, + ).create_payload() + self.assertEqual({'usergroup_id': 1}, payload) + + def test_sync_plan(self): + """Call ``create_payload`` on a :class:`nailgun.entities.SyncPlan`.""" + self.assertIsInstance( + entities.SyncPlan( + self.cfg, + organization=1, + sync_date=datetime.now(), + ).create_payload()['sync_date'], + type(''), # different for Python 2 and 3 + ) + + def test_host_collection(self): + """Create a :class:`nailgun.entities.HostCollection`.""" + HOST_ID = 1 + ORG_ID = 1 + entity_kwargs = { + 'name': gen_alpha(), + 'description': gen_alpha(), + 'max_hosts': gen_integer(min_value=1, max_value=10), + 'unlimited_hosts': False, + 'organization': entities.Organization(self.cfg, id=ORG_ID), + 'host': [entities.Host(self.cfg, id=HOST_ID)], + } + host_collection = entities.HostCollection(self.cfg, **entity_kwargs) + payload = host_collection.create_payload() + # host and organization are translated for payload + entity_kwargs.pop('organization') + entity_kwargs.pop('host') + entity_kwargs.update({'organization_id': ORG_ID, 'host_ids': [HOST_ID]}) + self.assertDictEqual(entity_kwargs, payload) + + def test_content_view_filter_rule(self): + """Create a :class:`nailgun.entities.ContentViewFilterRule`.""" + errata_kwargs = { + "id": 1, + "uuid": "1a321570-cd30-4622-abff-2290b47ef814", + "title": "Bird_Erratum", + "errata_id": "RHEA-2012:0003", + "issued": "2012-01-27", + "updated": "2012-01-27", + "severity": "", + "description": "Bird_Erratum", + "solution": "", + "summary": "", + "reboot_suggested": False, + "name": "Bird_Erratum", + "type": "security", + "cves": [], + "hosts_available_count": 0, + "hosts_applicable_count": 0, + "packages": ["stork-0.12-2.noarch"], + "module_streams": [ + { + "name": "duck", + "stream": "0", + "version": "201809302113907", + "context": "deadbeef", + "arch": "noarch", + "id": 1, + "packages": ["duck-0.8-1.noarch"], + } + ], + } + + with mock.patch.object(entities.Errata, 'read_json') as read_json: + read_json.return_value = errata_kwargs + payload = entities.ContentViewFilterRule( + self.cfg, + content_view_filter=1, + errata=1, + ).create_payload() + self.assertEqual("RHEA-2012:0003", payload['errata_id']) + + def test_image(self): + """Create a :class:`nailgun.entities.Image`.""" + payload = entities.Image( + self.cfg, + compute_resource=1, + ).create_payload() + self.assertEqual({'image': {'compute_resource_id': 1}}, payload) + + def test_media(self): + """Create a :class:`nailgun.entities.Media`.""" + payload = entities.Media(self.cfg, path_='foo').create_payload() + self.assertNotIn('path_', payload['medium']) + self.assertIn('path', payload['medium']) + + def test_discovery_rule(self): + """Create a :class:`nailgun.entities.DiscoveryRule`.""" + payload = entities.DiscoveryRule( + self.cfg, + search_='foo', + ).create_payload() + self.assertNotIn('search_', payload['discovery_rule']) + self.assertIn('search', payload['discovery_rule']) + + def test_override_value(self): + """Create a :class:`nailgun.entities.OverrideValue`.""" + payload = entities.OverrideValue( + self.cfg, + smart_class_parameter=1, + ).create_payload() + self.assertNotIn('smart_class_parameter_id', payload) + payload = entities.OverrideValue( + self.cfg, + smart_variable=1, + ).create_payload() + self.assertNotIn('smart_variable_id', payload) + + def test_job_template(self): + """Create a :class:`nailgun.entities.JobTemplate`.""" + payload = entities.JobTemplate( + self.cfg, + effective_user={'value': 'foo'}, + name='brick system', + template='rm -rf --no-preserve-root /', + ).create_payload() + self.assertNotIn('effective_user', payload) + self.assertIn('effective_user', payload['job_template']['ssh']) + + def test_subnet(self): + """Create a :class:`nailgun.entities.Subnet`.""" + payload = entities.Subnet( + self.cfg, + from_='10.0.0.1', + ).create_payload() + self.assertNotIn('from_', payload['subnet']) + self.assertIn('from', payload['subnet']) + + +class CreateMissingTestCase(TestCase): + """Tests for extensions of ``create_missing``.""" + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + # Fields optionally populated by AuthSourceLDAP.create_missing() + cls.AS_LDAP_FIELDS = ( + 'account_password', + 'attr_firstname', + 'attr_lastname', + 'attr_login', + 'attr_mail', + ) + + def test_auth_source_ldap_v1(self): + """Test ``AuthSourceLDAP(onthefly_register=False).create_missing()``""" + entity = entities.AuthSourceLDAP(self.cfg, onthefly_register=False) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + self.assertTrue(set(self.AS_LDAP_FIELDS).isdisjoint(entity.get_values())) + + def test_auth_source_ldap_v2(self): + """Test ``AuthSourceLDAP(onthefly_register=True).create_missing()``.""" + entity = entities.AuthSourceLDAP(self.cfg, onthefly_register=True) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + self.assertTrue(set(self.AS_LDAP_FIELDS).issubset(entity.get_values())) + + def test_auth_source_ldap_v3(self): + """Does ``AuthSourceLDAP.create_missing`` overwrite fields?""" + attrs = {field: i for i, field in enumerate(self.AS_LDAP_FIELDS)} + attrs.update({'onthefly_register': True}) + entity = entities.AuthSourceLDAP(self.cfg, **attrs) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + for key, value in attrs.items(): + with self.subTest((key, value)): + self.assertEqual(getattr(entity, key), value) + + def test_report_template_v1(self): + """Test ``ReportTemplate(name='testName')``.""" + entity = entities.ReportTemplate(self.cfg, name='testName') + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual(entity.name, 'testName') + + def test_report_template_v2(self): + """Test ``ReportTemplate()``.""" + entity = entities.ReportTemplate(self.cfg) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertNotEqual(entity.name, '') + + def test_report_template_v3(self): + """Test ``ReportTemplate(default=True)``.""" + entity = entities.ReportTemplate(self.cfg, default=True) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual( + _get_required_field_names(entity), + set(entity.get_values().keys()), + ) + + def test_report_template_v4(self): + """Test ``ReportTemplate(default=False)``.""" + entity = entities.ReportTemplate(self.cfg, default=False) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual( + _get_required_field_names(entity), + set(entity.get_values().keys()), + ) + + def test_provisioning_template_v1(self): + """Test ``ProvisioningTemplate(snippet=True)``.""" + entity = entities.ProvisioningTemplate(self.cfg, snippet=True) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual( + _get_required_field_names(entity), + set(entity.get_values().keys()), + ) + + def test_provisioning_template_v2(self): + """Test ``ProvisioningTemplate(snippet=False)``.""" + entity = entities.ProvisioningTemplate(self.cfg, snippet=False) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual( + _get_required_field_names(entity).union(['template_kind']), + set(entity.get_values().keys()), + ) + + def test_provisioning_template_v3(self): + """Test ``ProvisioningTemplate(snippet=False, template_kind=…)``.""" + tk_id = gen_integer() + entity = entities.ProvisioningTemplate( + self.cfg, + snippet=False, + template_kind=tk_id, + ) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual( + _get_required_field_names(entity).union(['template_kind']), + set(entity.get_values().keys()), + ) + self.assertEqual(entity.template_kind.id, tk_id) + + def test_domain_v1(self): + """Test ``Domain(name='UPPER')``.""" + entity = entities.Domain(self.cfg, name='UPPER') + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertEqual(entity.name, 'UPPER') + + def test_domain_v2(self): + """Test ``Domain()``.""" + entity = entities.Domain(self.cfg) + with mock.patch.object(EntityCreateMixin, 'create_raw'): + with mock.patch.object(EntityReadMixin, 'read_raw'): + entity.create_missing() + self.assertTrue(entity.name.islower()) + + def test_external_usergroup(self): + """Test ``ExternalUserGroup()`` """ + entity = entities.ExternalUserGroup(self.cfg, usergroup=1) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + self.assertTrue(entity.get_fields()['usergroup'].required) + + def test_host_v1(self): + """Test ``Host()``.""" + entity = entities.Host(self.cfg) + with mock.patch.object(EntityCreateMixin, 'create_json'): + with mock.patch.object(EntityReadMixin, 'read_json'): + with mock.patch.object(EntityReadMixin, 'read'): + entity.create_missing() + self.assertEqual( + set(entity.get_values().keys()), + _get_required_field_names(entity).union( + ( + 'architecture', + 'domain', + 'environment', + 'mac', + 'medium', + 'operatingsystem', + 'ptable', + 'root_pass', + ) + ), + ) + + def test_host_v2(self): + """Test ``Host()`` with providing all the optional entities unlinked""" + org = entities.Organization(self.cfg, id=1) + loc = entities.Location(self.cfg, id=1) + domain = entities.Domain( + self.cfg, + id=1, + location=[2], + organization=[2], + ) + env = entities.Environment( + self.cfg, + id=1, + location=[2], + organization=[2], + ) + arch = entities.Architecture(self.cfg, id=1) + ptable = entities.PartitionTable( + self.cfg, + id=1, + location=[2], + organization=[2], + ) + oper_sys = entities.OperatingSystem( + self.cfg, + id=1, + architecture=[2], + ptable=[2], + ) + media = entities.Media( + self.cfg, + id=1, + location=[2], + operatingsystem=[2], + organization=[2], + ) + entity = entities.Host( + self.cfg, + architecture=arch, + domain=domain, + environment=env, + location=loc, + medium=media, + operatingsystem=oper_sys, + organization=org, + ptable=ptable, + ) + with mock.patch.object(EntityCreateMixin, 'create_json'): + with mock.patch.object(EntityReadMixin, 'read_json'): + with mock.patch.object(EntityUpdateMixin, 'update_json'): + with mock.patch.object(EntityReadMixin, 'read'): + entity.create_missing() + for subentity in domain, env, media: + self.assertIn(loc.id, [loc_.id for loc_ in subentity.location]) + self.assertIn(org.id, [org_.id for org_ in subentity.organization]) + self.assertIn(arch.id, [arch_.id for arch_ in oper_sys.architecture]) + self.assertIn(ptable.id, [ptable_.id for ptable_ in oper_sys.ptable]) + self.assertIn(oper_sys.id, [os_.id for os_ in media.operatingsystem]) + + def test_host_v3(self): + """Test ``Host()`` providing optional entities with id only. + Check that additional read was called for that entities. + """ + optional = { + 'domain': entities.Domain(self.cfg, id=1), + 'env': entities.Environment(self.cfg, id=1), + 'arch': entities.Architecture(self.cfg, id=1), + 'oper_sys': entities.OperatingSystem(self.cfg, id=1), + 'media': entities.Media(self.cfg, id=1), + } + entity = entities.Host( + self.cfg, + architecture=optional['arch'], + domain=optional['domain'], + environment=optional['env'], + medium=optional['media'], + operatingsystem=optional['oper_sys'], + ) + with mock.patch.object(EntityCreateMixin, 'create_json'): + with mock.patch.object(EntityReadMixin, 'read_json'): + with mock.patch.object(EntityUpdateMixin, 'update_json'): + with mock.patch.object(EntityReadMixin, 'read') as read: + entity.create_missing() + self.assertGreaterEqual(read.call_count, len(optional)) + + def test_lifecycle_environment_v1(self): + """Test ``LifecycleEnvironment(name='Library')``.""" + entity = entities.LifecycleEnvironment(self.cfg, name='Library') + with mock.patch.object(EntityCreateMixin, 'create_missing'): + with mock.patch.object(EntitySearchMixin, 'search') as search: + entity.create_missing() + self.assertEqual(search.call_count, 0) + + def test_lifecycle_environment_v2(self): + """Test ``LifecycleEnvironment(name='not Library')``.""" + entity = entities.LifecycleEnvironment( + self.cfg, + name='not Library', + organization=1, + ) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + with mock.patch.object(EntitySearchMixin, 'search') as search: + search.return_value = [gen_integer()] + entity.create_missing() + self.assertEqual(search.call_count, 1) + self.assertEqual(entity.prior, search.return_value[0]) + + def test_lifecycle_environment_v3(self): + """What happens when the "Library" lifecycle env cannot be found?""" + entity = entities.LifecycleEnvironment( + self.cfg, + name='not Library', + organization=1, + ) + with mock.patch.object(EntityCreateMixin, 'create_missing'): + with mock.patch.object(EntitySearchMixin, 'search') as search: + search.return_value = [] + with self.assertRaises(entities.APIResponseError): + entity.create_missing() + + def test_repository_v1(self): + """Test ``Repository(content_type='docker')``.""" + entity = entities.Repository(self.cfg, content_type='docker') + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + self.assertTrue(entity.get_fields()['docker_upstream_name'].required) + + def test_repository_v2(self): + """Test ``Repository(content_type='not docker')``.""" + entity = entities.Repository(self.cfg, content_type='not docker') + with mock.patch.object(EntityCreateMixin, 'create_missing'): + entity.create_missing() + self.assertFalse(entity.get_fields()['docker_upstream_name'].required) + + +class ReadTestCase(TestCase): + """Tests for :meth:`nailgun.entity_mixins.EntityReadMixin.read`.""" + + def setUp(self): + """Set a server configuration at ``self.cfg``.""" + self.cfg = config.ServerConfig('http://example.com') + + def test_entity_arg(self): + """Call ``read`` on entities that require parameters for instantiation. + + Some entities require extra parameters when being instantiated. As a + result, these entities must extend + :meth:`nailgun.entity_mixins.EntityReadMixin.read` by providing a value + for the ``entity`` argument. Assert that these entities pass their + server configuration objects to the child entities that they create and + pass in to the ``entity`` argument. + + """ + for entity in ( + entities.ContentViewFilterRule( + self.cfg, + content_view_filter=2, + ), + entities.ContentViewComponent(self.cfg, composite_content_view=2, content_view=1), + entities.ExternalUserGroup(self.cfg, usergroup=1), + entities.Interface(self.cfg, host=2), + entities.Image(self.cfg, compute_resource=1), + entities.OperatingSystemParameter(self.cfg, operatingsystem=2), + entities.OSDefaultTemplate(self.cfg, operatingsystem=2), + entities.OverrideValue(self.cfg, smart_class_parameter=2), + entities.OverrideValue(self.cfg, smart_variable=2), + entities.Parameter(self.cfg, domain=2), + entities.Parameter(self.cfg, host=2), + entities.Parameter(self.cfg, hostgroup=2), + entities.Parameter(self.cfg, location=2), + entities.Parameter(self.cfg, operatingsystem=2), + entities.Parameter(self.cfg, organization=2), + entities.Parameter(self.cfg, subnet=2), + entities.RepositorySet(self.cfg, product=2), + entities.Snapshot(self.cfg, host=2), + entities.SSHKey(self.cfg, user=2), + entities.SyncPlan(self.cfg, organization=2), + ): + # We mock read_json() because it may be called by read(). + with mock.patch.object(EntityReadMixin, 'read_json'): + with mock.patch.object(EntityReadMixin, 'read') as read: + entity.read() + self.assertEqual(read.call_count, 1) + # read.call_args[0][0] is the `entity` argument to read() + self.assertEqual(read.call_args[0][0]._server_config, self.cfg) + + def test_attrs_arg_v1(self): + """Ensure ``read`` and ``read_json`` are both called once. + + This test is only appropriate for entities that override the ``read`` + method in order to fiddle with the ``attrs`` argument. + + """ + for entity in ( + # entities.DiscoveryRule, # see test_discovery_rule + # entities.HostGroup, # see HostGroupTestCase.test_read + # entities.Product, # See Product.test_read + # entities.UserGroup, # see test_attrs_arg_v2 + entities.ContentView, + entities.Domain, + entities.Filter, + entities.Host, + entities.Media, + entities.RHCIDeployment, + ): + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + with mock.patch.object(EntityReadMixin, 'read') as read: + with self.subTest(): + entity(self.cfg).read() + self.assertEqual(read_json.call_count, 1) + self.assertEqual(read.call_count, 1) + + def test_attrs_arg_v2(self): + """Ensure ``read``, ``read_json`` and ``client.put`` are called once. + + This test is only appropriate for entities that override the ``read`` + method in order to fiddle with the ``attrs`` argument. + + """ + # test_data is a single-use variable. We use it anyway for formatting + # purposes. + test_data = ((entities.UserGroup(self.cfg, id=1), {'admin': 'foo'}),) + for entity, server_response in test_data: + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + read_json.return_value = {} + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object(client, 'put') as put: + put.return_value.json.return_value = server_response + entity.read() + self.assertEqual(read_json.call_count, 1) + self.assertEqual(read.call_count, 1) + self.assertEqual(put.call_count, 1) + self.assertEqual(read.call_args[0][1], server_response) + + def test_entity_ids(self): + """Test cases where the server returns unusually named attributes. + + Assert that the returned attributes are renamed to be more regular + before calling ``read()``. + + """ + # test_data is a single-use variable. We use it anyway for formatting + # purposes. + test_data = ( + ( + entities.Domain(self.cfg), + {'parameters': None}, + {'domain_parameters_attributes': None}, + ), + ( + entities.Host(self.cfg), + {'parameters': None}, + {'host_parameters_attributes': None}, + ), + ( + entities.Filter(self.cfg), + {'override?': None, 'unlimited?': None}, + {'override': None, 'unlimited': None}, + ), + ) + for entity, attrs_before, attrs_after in test_data: + with self.subTest(entity): + with mock.patch.object(EntityReadMixin, 'read') as read: + entity.read(attrs=attrs_before) + self.assertEqual(read.call_args[0][1], attrs_after) + + def test_ignore_arg_v1(self): + """Call ``read`` on a variety of entities.``. + + Assert that the ``ignore`` argument is correctly passed on. + + """ + for entity, ignored_attrs in ( + (entities.AzureRMComputeResource, {'secret_key'}), + (entities.Errata, {'content_view_version', 'environment', 'repository'}), + (entities.OVirtComputeResource, {'password'}), + (entities.SmartProxy, {'download_policy'}), + (entities.SmartClassParameters, {'hidden_value'}), + (entities.SmartVariable, {'hidden_value'}), + ( + entities.Subnet, + {'discovery', 'remote_execution_proxy', 'subnet_parameters_attributes'}, + ), + (entities.Subscription, {'organization'}), + (entities.Repository, {'organization', 'upstream_password'}), + (entities.User, {'password'}), + (entities.ScapContents, {'scap_file'}), + (entities.TailoringFile, {'scap_file'}), + (entities.VirtWhoConfig, {'hypervisor_password', 'http_proxy_id'}), + (entities.VMWareComputeResource, {'password'}), + (entities.DiscoveredHost, {'ip', 'mac', 'root_pass', 'hostgroup'}), + ): + with self.subTest(entity): + with mock.patch.object(EntityReadMixin, 'read') as read, mock.patch.object( + EntityReadMixin, 'read_json' + ): + entity(self.cfg).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertEqual(ignored_attrs, read.call_args[0][2]) + + def test_ignore_arg_v3(self): + """Call :meth:`nailgun.entities.AuthSourceLDAP.read`. + + Assert that the entity ignores the 'account_password' field. + + """ + with mock.patch.object(EntityUpdateMixin, 'update_json') as u_json: + with mock.patch.object(EntityReadMixin, 'read') as read: + entities.AuthSourceLDAP(self.cfg).read() + self.assertEqual(u_json.call_count, 1) + self.assertEqual(read.call_count, 1) + self.assertEqual({'account_password'}, read.call_args[0][2]) + + def test_ignore_arg_v4(self): + """Call :meth:`nailgun.entities.User.read`. + + Assert that entity`s predefined values of ``ignore`` are always + correctly passed on. + + """ + for input_ignore, actual_ignore in ( + (None, {'password'}), + ({'password'}, {'password'}), + ({'email'}, {'email', 'password'}), + ({'email', 'password'}, {'email', 'password'}), + ): + with self.subTest(input_ignore): + with mock.patch.object(EntityReadMixin, 'read') as read: + entities.User(self.cfg).read(ignore=input_ignore) + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertEqual(actual_ignore, read.call_args[0][2]) + + def test_interface_ignore_arg(self): + """Call :meth:`nailgun.entities.Interface.read`. + + Assert that entity`s predefined values of ``ignore`` are always + correctly passed on. + """ + for input_type, actual_ignore in ( + ( + 'interface', + { + 'host', + 'username', + 'password', + 'provider', + 'mode', + 'bond_options', + 'attached_to', + 'tag', + 'attached_devices', + }, + ), + ('bmc', {'host', 'mode', 'bond_options', 'attached_to', 'tag', 'attached_devices'}), + ('bond', {'host', 'username', 'password', 'provider', 'attached_to', 'tag'}), + ( + 'bridge', + { + 'host', + 'username', + 'password', + 'provider', + 'mode', + 'bond_options', + 'attached_to', + 'tag', + }, + ), + ( + 'virtual', + { + 'host', + 'username', + 'password', + 'provider', + 'mode', + 'bond_options', + 'attached_devices', + }, + ), + ): + with self.subTest(input_type): + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={'type': input_type}, + ): + entities.Interface(self.cfg, id=2, host=2, type=input_type).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertEqual(actual_ignore, read.call_args[0][2]) + + def test_parameter_ignore_arg(self): + """Call :meth:`nailgun.entities.Parameter.read`. + + Assert that entity`s predefined values of ``ignore`` are always + correctly passed on. + """ + parents = { + 'domain', + 'host', + 'hostgroup', + 'location', + 'operatingsystem', + 'organization', + 'subnet', + } + for parent in parents: + with self.subTest(parent): + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={parent: 3}, + ): + entities.Parameter(self.cfg, id=2, **{parent: 3}).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertEqual(parents, read.call_args[0][2]) + + def test_snapshot_ignore_arg(self): + """Call :meth:`nailgun.entities.Snapshot.read`. + + Assert that entity`s predefined values of ``ignore`` are always + correctly passed on. + """ + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={'host': 3}, + ): + entities.Snapshot(self.cfg, id=2, host=3).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertEqual({'host'}, read.call_args[0][2]) + + def test_host_with_interface(self): + """Call :meth:`nailgun.entities.Host.read`. + + Assert that host will have interfaces initialized and assigned + correctly. + """ + with mock.patch.object( + EntityReadMixin, + 'read', + return_value=entities.Host(self.cfg, id=2), + ): + with mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={ + 'interfaces': [{'id': 2}, {'id': 3}], + 'parameters': None, + }, + ): + host = entities.Host(self.cfg, id=2).read() + self.assertTrue(hasattr(host, 'interface')) + self.assertTrue(isinstance(host.interface, list)) + for interface in host.interface: + self.assertTrue(isinstance(interface, entities.Interface)) + self.assertEqual({interface.id for interface in host.interface}, {2, 3}) + + def test_discovery_rule(self): + """Call :meth:`nailgun.entities.DiscoveryRule.read`. + + Ensure that the ``max_count`` attribute is fetched. + + """ + with mock.patch.object(EntityUpdateMixin, 'update_json') as u_json: + u_json.return_value = {'max_count': 'max_count'} + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + read_json.return_value = {'id': 'id', 'search': 'search'} + with mock.patch.object(EntityReadMixin, 'read') as read: + entities.DiscoveryRule(self.cfg).read() + for mock_obj in (u_json, read_json, read): + self.assertEqual(mock_obj.call_count, 1) + self.assertEqual(u_json.call_args, mock.call([])) + + def test_product_with_sync_plan(self): + """Call :meth:`nailgun.entities.Product.read` for a product with sync + plan assigned. + + Ensure that the sync plan entity was correctly fetched. + + """ + sync_plan = entities.SyncPlan(self.cfg, id=1, organization=1) + product = entities.Product(self.cfg, id=1, organization=1) + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + with mock.patch.object(EntityReadMixin, 'read') as read: + read_json.return_value = { + 'sync_plan_id': 1, + 'sync_plan': {'name': 'test_sync_plan'}, + } + read.return_value = product + product = product.read() + self.assertTrue(hasattr(product, 'sync_plan')) + self.assertEqual(product.sync_plan.id, sync_plan.id) + + def test_hostgroup_ignore_root_pass(self): + """Call :meth:`nailgun.entities.HostGroup.read`. + + Assert that the entity ignores the ``root_pass`` field. + + """ + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object(EntityReadMixin, 'read_json'): + entities.HostGroup(self.cfg).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertIn('root_pass', read.call_args[0][2]) + + def test_http_proxy_ignore_arg(self): + """Call :meth:`nailgun.entities.HTTPProxy.read`. + Assert that the entity ignores the ``password, organization and location`` field. + """ + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object(EntityReadMixin, 'read_json'): + entities.HTTPProxy(self.cfg).read() + # `call_args` is a two-tuple of (positional, keyword) args. + self.assertIn('password', read.call_args[0][2]) + self.assertIn('organization', read.call_args[0][2]) + self.assertIn('location', read.call_args[0][2]) + + def test_usergroup_with_external_usergroup(self): + """Call :meth:`nailgun.entities.ExternalUserGroup.read` for a usergroup with external + usergroup assigned. + + Ensure that the external usergroup entity was correctly fetched. + + """ + with mock.patch.object(EntityReadMixin, 'read') as read: + with mock.patch.object(EntityReadMixin, 'read_json'): + ext_usergrp = entities.ExternalUserGroup(self.cfg, usergroup=1).read() + usergrp = ext_usergrp.read() + self.assertTrue(hasattr(usergrp, 'usergroup')) + self.assertIn('usergroup', read.call_args[0][2]) + + def test_subnet(self): + """Call :meth:`nailgun.entities.Subnet.read`. + + Ensure that the ``from_`` attribute is successfully set. + + """ + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + read_json.return_value = {'from': 'foo'} + with mock.patch.object(EntityReadMixin, 'read') as read: + entities.Subnet(self.cfg).read() + for mock_obj in (read_json, read): + self.assertEqual(mock_obj.call_count, 1) + self.assertIn('from_', read.call_args[0][1]) + + +class SearchTestCase(TestCase): + """Tests for + :meth:`nailgun.entity_mixins.EntitySearchMixin.search`. + """ + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_product_with_sync_plan(self): + """Call :meth:`nailgun.entities.Product.search` for a product with sync + plan assigned. + + Ensure that the sync plan entity was correctly fetched. + + """ + with mock.patch.object(EntitySearchMixin, 'search_json') as search_json: + # Synplan set + search_json.return_value = { + 'results': [ + { + 'id': 2, + 'name': 'test_product', + 'organization': { + 'id': 1, + 'label': 'Default_Organization', + 'name': 'Default Organization', + }, + 'organization_id': 1, + 'sync_plan': {'id': 1, 'interval': 'hourly', 'name': 'sync1'}, + 'sync_plan_id': 1, + } + ] + } + result = entities.Product(self.cfg, organization=1).search() + self.assertIsNotNone(result[0].sync_plan) + self.assertEqual(result[0].sync_plan.id, 1) + # Synplan not set + search_json.return_value = { + 'results': [ + { + 'id': 3, + 'name': 'test_product2', + 'organization': { + 'id': 1, + 'label': 'Default_Organization', + 'name': 'Default Organization', + }, + 'organization_id': 1, + 'sync_plan': None, + 'sync_plan_id': None, + } + ] + } + result = entities.Product(self.cfg, organization=1).search() + self.assertIsNone(result[0].sync_plan) + + def test_host_with_image(self): + """Call :meth:`nailgun.entities.Host.search` for a host with image + assigned. + + Ensure that the image entity was correctly fetched. + + """ + with mock.patch.object(EntitySearchMixin, 'search_json') as search_json: + # Image is set + search_json.return_value = { + 'results': [ + { + 'id': 2, + 'name': 'host1', + 'organization': {'id': 1, 'name': 'Default Organization'}, + 'organization_id': 1, + 'image_name': 'rhel7_image', + 'image_file': '/usr/share/imagefile/xyz7.img', + 'image_id': 1, + } + ] + } + result = entities.Host(self.cfg, organization=1).search() + self.assertIsNotNone(result[0].image) + self.assertEqual(result[0].image.id, 1) + # image not set + search_json.return_value = { + 'results': [ + { + 'id': 3, + 'name': 'host2', + 'organization': {'id': 1, 'name': 'Default Organization'}, + 'organization_id': 1, + 'image_name': None, + 'image_file': '', + 'image_id': None, + } + ] + } + result = entities.Host(self.cfg, organization=1).search() + self.assertIsNone(result[0].image) + + +class SearchNormalizeTestCase(TestCase): + """Tests for + :meth:`nailgun.entity_mixins.EntitySearchMixin.search_normalize`. + """ + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_snapshot(self): + """Test :meth:`nailgun.entities.Snapshot.search_normalize`. + + Assert that ``host_id`` was added with correct user's id to search + results. + """ + results = [ + {'id': 1, 'name': 'foo'}, + {'id': 2, 'name': 'bar', 'description': 'This is bar'}, + ] + with mock.patch.object( + EntitySearchMixin, + 'search_normalize', + ) as search_normalize: + entities.Snapshot(self.cfg, host=4).search_normalize(results) + for args in search_normalize.call_args[0][0]: + self.assertIn('host_id', args) + self.assertEqual(args['host_id'], 4) + + def test_sshkey(self): + """Test :meth:`nailgun.entities.SSHKey.search_normalize`. + + Assert that ``user_id`` was added with correct user's id to search + results. + """ + results = [ + {'id': 1, 'login': 'foo'}, + {'id': 2, 'login': 'bar'}, + ] + with mock.patch.object( + EntitySearchMixin, + 'search_normalize', + ) as search_normalize: + entities.SSHKey(self.cfg, user=4).search_normalize(results) + for args in search_normalize.call_args[0][0]: + self.assertIn('user_id', args) + self.assertEqual(args['user_id'], 4) + + def test_interface(self): + """Test :meth:`nailgun.entities.Interface.search_normalize`. + + Assert that ``host_id`` was added with correct host's id to search + results. + """ + results = [ + {'id': 1, 'name': 'foo'}, + {'id': 2, 'name': 'bar'}, + ] + with mock.patch.object( + EntitySearchMixin, + 'search_normalize', + ) as search_normalize: + entities.Interface(self.cfg, host=3).search_normalize(results) + for args in search_normalize.call_args[0][0]: + self.assertIn('host_id', args) + self.assertEqual(args['host_id'], 3) + + def test_host_with_image(self): + """Call :meth:`nailgun.entities.Host.read` for a host with image + assigned. + + Ensure that the image entity was correctly fetched. + """ + image = entities.Image(self.cfg, id=1, compute_resource=1) + host = entities.Host(self.cfg, id=1) + with mock.patch.object(EntityReadMixin, 'read_json') as read_json: + with mock.patch.object(EntityReadMixin, 'read') as read: + # Image was set + read_json.return_value = { + 'image_id': 1, + 'compute_resource_id': 1, + 'parameters': {}, + } + read.return_value = host + host = host.read() + self.assertTrue(hasattr(host, 'image')) + self.assertEqual(host.image.id, image.id) + # Image wasn't set + read_json.return_value = { + 'parameters': {}, + } + read.return_value = host + host = host.read() + self.assertTrue(hasattr(host, 'image')) + self.assertIsNone(host.image) + + +class UpdateTestCase(TestCase): + """Tests for :meth:`nailgun.entity_mixins.EntityUpdateMixin.update`.""" + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_generic(self): + """Call ``update`` on a variety of entities.""" + entities_ = ( + entities.AbstractComputeResource(self.cfg), + entities.Architecture(self.cfg), + entities.ContentCredential(self.cfg), + entities.ComputeProfile(self.cfg), + entities.ConfigGroup(self.cfg), + entities.DiscoveryRule(self.cfg), + entities.Domain(self.cfg), + entities.Environment(self.cfg), + entities.GPGKey(self.cfg), + entities.Host(self.cfg), + entities.HostCollection(self.cfg), + entities.HostGroup(self.cfg), + entities.HTTPProxy(self.cfg), + entities.LifecycleEnvironment(self.cfg), + entities.Location(self.cfg), + entities.Media(self.cfg), + entities.Organization(self.cfg), + entities.ScapContents(self.cfg), + entities.SmartProxy(self.cfg), + entities.TailoringFile(self.cfg), + entities.User(self.cfg), + entities.UserGroup(self.cfg), + ) + for entity in entities_: + with self.subTest(entity): + + # Call update() + with mock.patch.object(entity, 'update_json') as update_json: + with mock.patch.object(entity, 'read') as read: + self.assertEqual(entity.update(), read.return_value) + self.assertEqual(update_json.call_count, 1) + self.assertEqual(update_json.call_args[0], (None,)) + self.assertEqual(read.call_count, 1) + self.assertEqual(read.call_args[0], ()) + + # Call update(fields) + fields = gen_integer() + with mock.patch.object(entity, 'update_json') as update_json: + with mock.patch.object(entity, 'read') as read: + self.assertEqual( + entity.update(fields), + read.return_value, + ) + self.assertEqual(update_json.call_count, 1) + self.assertEqual(update_json.call_args[0], (fields,)) + self.assertEqual(read.call_count, 1) + self.assertEqual(read.call_args[0], ()) + + +class SearchPayloadTestCase(TestCase): + """Tests for extensions of ``search_upload``. """ + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_generic(self): + """Instantiate a variety of entities and call ``search_payload``.""" + entities_ = [(entities.ContentViewFilterRule, {'content_view_filter': 1})] + + for entity, params in entities_: + with self.subTest(): + self.assertIsInstance(entity(self.cfg, **params).search_payload(), dict) + + def test_content_view_filter_rule(self): + """errata_id field should be Errata ID when sent to the server, + not DB ID. + """ + errata_kwargs = { + "id": 1, + "uuid": "1a321570-cd30-4622-abff-2290b47ef814", + "title": "Bird_Erratum", + "errata_id": "RHEA-2012:0003", + "issued": "2012-01-27", + "updated": "2012-01-27", + "severity": "", + "description": "Bird_Erratum", + "solution": "", + "summary": "", + "reboot_suggested": False, + "name": "Bird_Erratum", + "type": "security", + "cves": [], + "hosts_available_count": 0, + "hosts_applicable_count": 0, + "packages": ["stork-0.12-2.noarch"], + "module_streams": [ + { + "name": "duck", + "stream": "0", + "version": "201809302113907", + "context": "deadbeef", + "arch": "noarch", + "id": 1, + "packages": ["duck-0.8-1.noarch"], + } + ], + } + + with mock.patch.object(entities.Errata, 'read_json') as read_json: + read_json.return_value = errata_kwargs + payload = entities.ContentViewFilterRule( + self.cfg, + content_view_filter=1, + errata=1, + ).search_payload() + self.assertEqual("RHEA-2012:0003", payload['errata_id']) + + +class UpdatePayloadTestCase(TestCase): + """Tests for extensions of ``update_payload``.""" + + @classmethod + def setUpClass(cls): + """Set a server configuration at ``cls.cfg``.""" + cls.cfg = config.ServerConfig('http://example.com') + + def test_generic(self): + """Instantiate a variety of entities and call ``update_payload``.""" + entities_payloads = [ + (entities.AbstractComputeResource, {'compute_resource': {}}), + (entities.Filter, {'filter': {}}), + (entities.ProvisioningTemplate, {'provisioning_template': {}}), + (entities.ReportTemplate, {'report_template': {}}), + (entities.DiscoveredHost, {'discovered_host': {}}), + (entities.DiscoveryRule, {'discovery_rule': {}}), + (entities.Domain, {'domain': {}}), + (entities.Environment, {'environment': {}}), + (entities.Host, {'host': {}}), + (entities.HostGroup, {'hostgroup': {}}), + (entities.HTTPProxy, {'http_proxy': {}}), + (entities.Location, {'location': {}}), + (entities.Media, {'medium': {}}), + (entities.OperatingSystem, {'operatingsystem': {}}), + (entities.Organization, {'organization': {}}), + (entities.Role, {'role': {}}), + (entities.Setting, {'setting': {}}), + (entities.SmartProxy, {'smart_proxy': {}}), + (entities.SmartVariable, {'smart_variable': {}}), + (entities.Subnet, {'subnet': {}}), + (entities.User, {'user': {}}), + (entities.UserGroup, {'usergroup': {}}), + (entities.VirtWhoConfig, {'foreman_virt_who_configure_config': {}}), + ] + for entity, payload in entities_payloads: + with self.subTest((entity, payload)): + self.assertEqual(entity(self.cfg).update_payload(), payload) + + def test_syncplan_sync_date(self): + """Test ``update_payload`` for different syncplan sync_date formats.""" + date_string = '2015-07-20 20:54:38' + date_datetime = datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S') + kwargs_responses = [ + ( + {'organization': 1}, + {'organization_id': 1}, + ), + ( + {'organization': 1, 'sync_date': date_string}, + {'organization_id': 1, 'sync_date': date_string}, + ), + ( + {'organization': 1, 'sync_date': date_datetime}, + {'organization_id': 1, 'sync_date': date_string}, + ), + ] + for kwargs, payload in kwargs_responses: + with self.subTest((kwargs, payload)): + self.assertEqual( + entities.SyncPlan(self.cfg, **kwargs).update_payload(), + payload, + ) + + def test_content_view_filter_rule(self): + """errata_id field should be 'translated' from DB ID to Errata ID.""" + errata_kwargs = { + "id": 1, + "uuid": "1a321570-cd30-4622-abff-2290b47ef814", + "title": "Bird_Erratum", + "errata_id": "RHEA-2012:0003", + "issued": "2012-01-27", + "updated": "2012-01-27", + "severity": "", + "description": "Bird_Erratum", + "solution": "", + "summary": "", + "reboot_suggested": False, + "name": "Bird_Erratum", + "type": "security", + "cves": [], + "hosts_available_count": 0, + "hosts_applicable_count": 0, + "packages": ["stork-0.12-2.noarch"], + "module_streams": [ + { + "name": "duck", + "stream": "0", + "version": "201809302113907", + "context": "deadbeef", + "arch": "noarch", + "id": 1, + "packages": ["duck-0.8-1.noarch"], + } + ], + } + + with mock.patch.object(entities.Errata, 'read_json') as read_json: + read_json.return_value = errata_kwargs + payload = entities.ContentViewFilterRule( + self.cfg, + content_view_filter=1, + errata=1, + ).update_payload() + self.assertEqual("RHEA-2012:0003", payload['errata_id']) + + def test_discovery_rule_search(self): + """Check whether ``DiscoveryRule`` updates its ``search_`` field. + + The field should be renamed from ``search_`` to ``search`` when + ``update_payload`` is called. + + """ + payload = entities.DiscoveryRule( + self.cfg, + search_='foo', + ).update_payload() + self.assertNotIn('search_', payload['discovery_rule']) + self.assertIn('search', payload['discovery_rule']) + + def test_image(self): + """Check whether ``Image`` updates its ``path_`` field. + + The field should be renamed from ``path_`` to ``path`` when + ``update_payload`` is called. + + """ + payload = entities.Image( + self.cfg, + compute_resource=1, + ).update_payload() + self.assertEqual({'image': {'compute_resource_id': 1}}, payload) + + def test_media_path(self): + """Check whether ``Media`` updates its ``path_`` field. + + The field should be renamed from ``path_`` to ``path`` when + ``update_payload`` is called. + + """ + payload = entities.Media(self.cfg, path_='foo').update_payload() + self.assertNotIn('path_', payload['medium']) + self.assertIn('path', payload['medium']) + + def test_hostcollection_updatable_fields(self): + org1 = entities.Organization(self.cfg, name='org1') + org2 = entities.Organization(self.cfg, name='org2') + host_collection = entities.HostCollection(self.cfg, name='oldname', organization=org1) + host_collection.name = 'newname' + host_collection.organization_id = org2 + payload = host_collection.update_payload() + self.assertEqual(payload['name'], 'newname') + self.assertNotIn('organization', payload.keys()) # organization NOT changed + self.assertNotIn('organization_id', payload.keys()) # organization NOT changed + + def test_job_template(self): + """Create a :class:`nailgun.entities.JobTemplate`.""" + payload = entities.JobTemplate( + self.cfg, + effective_user={'value': 'foo'}, + name='brick system', + template='rm -rf --no-preserve-root /', + ).update_payload() + self.assertNotIn('effective_user', payload) + self.assertIn('effective_user', payload['job_template']['ssh']) + + def test_organization_rh_repo_url(self): + """Check whether ``Organization`` updates its ``redhat_repository_url`` field. + + The field should be copied from + ``p['organization']['redhat_repository_url']`` to + ``p['redhat_repository_url']`` + when ``update_payload`` is called. + """ + payload = entities.Organization( + self.cfg, + redhat_repository_url=["https://cdn.redhat.com"], + ).update_payload() + self.assertIn('redhat_repository_url', payload) + + def test_os_default_template(self): + """Check, that ``os_default_template`` serves ``template_kind_id`` and + ``provisioning_template_id`` only wrapped in sub dict + See: `Redmine #21169`_. + + .. _Redmine #21169: http://projects.theforeman.org/issues/21169 + """ + payload = entities.OSDefaultTemplate( + self.cfg, + operatingsystem=entities.OperatingSystem(self.cfg, id=1), + template_kind=entities.TemplateKind(self.cfg, id=2), + provisioning_template=entities.ProvisioningTemplate(self.cfg, id=3), + ).update_payload() + self.assertNotIn('template_kind_id', payload) + self.assertNotIn('provisioning_template_id', payload) + self.assertIn('template_kind_id', payload['os_default_template']) + self.assertIn('provisioning_template_id', payload['os_default_template']) + + def test_subnet_from(self): + """Check whether ``Subnet`` updates its ``from_`` field. + + The field should be renamed from ``from_`` to ``from`` when + ``update_payload`` is called. + + """ + payload = entities.Subnet( + self.cfg, + from_='foo', + ).update_payload() + self.assertNotIn('from_', payload['subnet']) + self.assertIn('from', payload['subnet']) + + +# 2. Tests for entity-specific methods. ---------------------------------- {{{1 + + +class GenericTestCase(TestCase): + """Generic tests for the helper methods on entities.""" + + @classmethod + def setUpClass(cls): + """Create test data as ``cls.methods_requests``. + + ``methods_requests`` is a tuple of two-tuples, like so:: + + ( + entity_obj1.method, 'post', + entity_obj2.method, 'post', + entity_obj3.method1, 'get', + entity_obj3.method2, 'put', + ) + + """ + cfg = config.ServerConfig('http://example.com') + generic = {'server_config': cfg, 'id': 1} + external_usergroup = {'server_config': cfg, 'id': 1, 'usergroup': 2} + sync_plan = {'server_config': cfg, 'id': 1, 'organization': 2} + hostsubscription = {'server_config': cfg, 'host': 1} + cls.methods_requests = ( + (entities.AbstractComputeResource(**generic).available_flavors, 'get'), + (entities.AbstractComputeResource(**generic).available_images, 'get'), + (entities.AbstractComputeResource(**generic).available_zones, 'get'), + (entities.AbstractComputeResource(**generic).available_networks, 'get'), + (entities.AbstractComputeResource(**generic).associate, 'put'), + (entities.AbstractComputeResource(**generic).images, 'get'), + (entities.ActivationKey(**generic).add_host_collection, 'post'), + (entities.ActivationKey(**generic).add_subscriptions, 'put'), + (entities.ActivationKey(**generic).remove_subscriptions, 'put'), + (entities.ActivationKey(**generic).subscriptions, 'get'), + (entities.ActivationKey(**generic).content_override, 'put'), + (entities.ActivationKey(**generic).product_content, 'get'), + (entities.ActivationKey(**generic).remove_host_collection, 'put'), + (entities.Capsule(**generic).content_add_lifecycle_environment, 'post'), + (entities.ArfReport(**generic).download_html, 'get'), + (entities.Capsule(**generic).content_get_sync, 'get'), + (entities.Capsule(**generic).content_lifecycle_environments, 'get'), + (entities.Capsule(**generic).content_sync, 'post'), + (entities.Role(**generic).clone, 'post'), + (entities.ProvisioningTemplate(**generic).build_pxe_default, 'post'), + (entities.ProvisioningTemplate(**generic).clone, 'post'), + (entities.ReportTemplate(**generic).clone, 'post'), + (entities.ContentView(**generic).copy, 'post'), + (entities.ContentView(**generic).publish, 'post'), + (entities.ContentViewVersion(**generic).incremental_update, 'post'), + (entities.ContentViewVersion(**generic).promote, 'post'), + (entities.DiscoveredHost(cfg).facts, 'post'), + (entities.DiscoveredHost(**generic).refresh_facts, 'put'), + (entities.DiscoveredHost(**generic).reboot, 'put'), + (entities.Environment(**generic).list_scparams, 'get'), + (entities.Errata(**generic).compare, 'get'), + (entities.ExternalUserGroup(**external_usergroup).refresh, 'put'), + (entities.ForemanTask(cfg).summary, 'get'), + (entities.Organization(**generic).download_debug_certificate, 'get'), + (entities.Host(**generic).add_puppetclass, 'post'), + (entities.Host(**generic).enc, 'get'), + (entities.Host(**generic).errata, 'get'), + (entities.Host(**generic).errata_apply, 'put'), + (entities.Host(**generic).get_facts, 'get'), + (entities.Host(**generic).install_content, 'put'), + (entities.Host(**generic).list_scparams, 'get'), + (entities.Host(**generic).list_smart_variables, 'get'), + (entities.Host(**generic).module_streams, 'get'), + (entities.Host(**generic).packages, 'get'), + (entities.Host(**generic).power, 'put'), + (entities.Host(**generic).upload_facts, 'post'), + (entities.Host(**generic).traces, 'get'), + (entities.Host(**generic).resolve_traces, 'put'), + (entities.Host(**generic).bulk_destroy, 'put'), + (entities.Host(**generic).bulk_traces, 'post'), + (entities.Host(**generic).bulk_resolve_traces, 'put'), + (entities.HostGroup(**generic).add_puppetclass, 'post'), + (entities.HostGroup(**generic).clone, 'post'), + (entities.HostGroup(**generic).list_scparams, 'get'), + (entities.HostGroup(**generic).list_smart_variables, 'get'), + (entities.HostSubscription(**hostsubscription).add_subscriptions, 'put'), + (entities.HostSubscription(**hostsubscription).remove_subscriptions, 'put'), + (entities.Product(**generic).sync, 'post'), + (entities.ProductBulkAction(**generic).destroy, 'put'), + (entities.ProductBulkAction(**generic).sync, 'put'), + (entities.ProductBulkAction(**generic).http_proxy, 'put'), + (entities.ProductBulkAction(**generic).sync_plan, 'put'), + (entities.PuppetClass(**generic).list_scparams, 'get'), + (entities.PuppetClass(**generic).list_smart_variables, 'get'), + (entities.RHCIDeployment(**generic).deploy, 'put'), + (entities.RecurringLogic(**generic).cancel, 'post'), + (entities.Repository(**generic).errata, 'get'), + (entities.Repository(**generic).packages, 'get'), + (entities.Repository(**generic).module_streams, 'get'), + (entities.Repository(**generic).remove_content, 'put'), + (entities.Repository(**generic).sync, 'post'), + (entities.ScapContents(**generic).xml, 'get'), + (entities.SmartProxy(**generic).import_puppetclasses, 'post'), + (entities.SmartProxy(**generic).refresh, 'put'), + (entities.SyncPlan(**sync_plan).add_products, 'put'), + (entities.SyncPlan(**sync_plan).remove_products, 'put'), + (entities.Template(**generic).imports, 'post'), + (entities.Template(**generic).exports, 'post'), + (entities.VirtWhoConfig(**generic).deploy_script, 'get'), + ) + repo_set = {'server_config': cfg, 'id': 1, 'product': 2} + snapshot = {'server_config': cfg, 'id': 'snapshot-1', 'host': 1} + cls.intelligent_methods_requests = ( + (entities.RepositorySet(**repo_set).available_repositories, 'get', {'product_id': 2}), + (entities.RepositorySet(**repo_set).disable, 'put', {'product_id': 2}), + (entities.RepositorySet(**repo_set).enable, 'put', {'product_id': 2}), + (entities.Snapshot(**snapshot).revert, 'put', {}), + ) + + def test_generic(self): + """Check that a variety of helper methods are sane. + + Assert that: + + * Each method has a correct signature. + * Each method calls `client.*` once. + * Each method passes the right arguments to `client.*`. + * Each method calls `entities._handle_response` once. + * The result of `_handle_response(…)` is the return value. + + """ + for method, request in self.methods_requests: + with self.subTest((method, request)): + self.assertEqual(inspect.getfullargspec(method), EXPECTED_ARGSPEC) + kwargs = {'kwarg': gen_integer()} + with mock.patch.object(entities, '_handle_response') as handlr: + with mock.patch.object(client, request) as client_request: + response = method(**kwargs) + self.assertEqual(client_request.call_count, 1) + self.assertEqual(len(client_request.call_args[0]), 1) + self.assertEqual(client_request.call_args[1], kwargs) + self.assertEqual(handlr.call_count, 1) + self.assertEqual(handlr.return_value, response) + + def test_intelligent(self): + """Check that intelligent methods that send additional data are sane. + + Assert that: + + * Each method calls `client.*` once. + * Each method passes the right arguments to `client.*`. + * Each method calls `entities._handle_response` once. + * The result of `_handle_response(…)` is the return value. + + """ + for method, request, data in self.intelligent_methods_requests: + with self.subTest((method, request)): + kwargs = {'kwarg': gen_integer(), 'data': data} + with mock.patch.object(entities, '_handle_response') as handlr: + with mock.patch.object(client, request) as client_request: + response = method(**kwargs) + self.assertEqual(client_request.call_count, 1) + self.assertEqual(len(client_request.call_args[0]), 1) + self.assertEqual(client_request.call_args[1], kwargs) + self.assertEqual(handlr.call_count, 1) + self.assertEqual(handlr.return_value, response) + + +class ForemanStatusTestCase(TestCase): + """Tests for :class:`nailgun.entities.ForemanStatus`.""" + + def setUp(self): + """Set a server configuration at ``self.cfg``.""" + self.cfg = config.ServerConfig('http://example.com') + self.entity = entities.ForemanStatus(self.cfg) + self.read_json_pacther = mock.patch.object(self.entity, 'read_json') + + def test_read(self): + """Ensure ``read`` and ``read_json`` are called once.""" + read_json = self.read_json_pacther.start() + read_json.return_value = { + 'result': 'ok', + 'status': 200, + 'version': '1.19.0', + 'api_version': 2, + } + self.entity.read() + self.assertEqual(read_json.call_count, 1) + self.read_json_pacther.stop() + + +class FileTestCase(TestCase): + """Class with entity File tests""" + + def test_to_json(self): + """Check json serialisation on nested entities""" + file_kwargs = { + 'id': 1, + 'name': 'test_file.txt', + 'path': 'test_file.txt', + 'uuid': '3a013738-e5b8-43b2-81f5-3732b6e42776', + 'checksum': ('16c946e116072838b213f622298b74baa75c52c8fee50a6230b4680e3c136fb1'), + } + cfg = config.ServerConfig(url='https://foo.bar', verify=False, auth=('foo', 'bar')) + repo_kwargs = {'id': 3, 'content_type': 'file'} + repo = entities.Repository(cfg, **repo_kwargs) + file = entities.File(cfg, repository=repo, **file_kwargs) + file_kwargs['repository'] = repo_kwargs + self.assertDictEqual(file_kwargs, json.loads(file.to_json())) + + +class ForemanTaskTestCase(TestCase): + """Tests for :class:`nailgun.entities.ForemanTask`.""" + + def setUp(self): + """Set ``self.foreman_task``.""" + self.foreman_task = entities.ForemanTask( + config.ServerConfig('http://example.com'), + id=gen_integer(min_value=1), + ) + + def test_poll(self): + """Call :meth:`nailgun.entities.ForemanTask.poll`.""" + for kwargs in ( + {}, + {'poll_rate': gen_integer()}, + {'timeout': gen_integer()}, + {'poll_rate': gen_integer(), 'timeout': gen_integer()}, + ): + with self.subTest(kwargs): + with mock.patch.object(entities, '_poll_task') as poll_task: + self.foreman_task.poll(**kwargs) + self.assertEqual(poll_task.call_count, 1) + self.assertEqual( + poll_task.call_args[0][2], + kwargs.get('poll_rate', None), + ) + self.assertEqual( + poll_task.call_args[0][3], + kwargs.get('timeout', None), + ) + + +class ContentUploadTestCase(TestCase): + """Tests for :class:`nailgun.entities.ContentUpload`.""" + + def setUp(self): + """Set ``self.repo``.""" + server_config = config.ServerConfig('http://example.com') + repo = entities.Repository( + server_config, + id=gen_integer(min_value=1), + ) + self.content_upload = entities.ContentUpload(server_config, repository=repo) + + def test_content_upload_create(self): + """Test ``nailgun.entities.ContentUpload.create``. + + Make the (mock) server return a "success" status. Make the same + assertions as for + :meth:`tests.test_entities.GenericTestCase.test_generic`. + + """ + with mock.patch.object(client, 'post') as post: + self.content_upload.create() + self.assertEqual(post.call_count, 1) + self.assertEqual(len(post.call_args[0]), 2) + + def test_content_upload_delete(self): + """Test ``nailgun.entities.ContentUpload.delete``. + + Make the (mock) server return a "success" status. Make the same + assertions as for + :meth:`tests.test_entities.GenericTestCase.test_generic`. +