diff --git "a/codeparrot-valid_1006.txt" "b/codeparrot-valid_1006.txt" new file mode 100644--- /dev/null +++ "b/codeparrot-valid_1006.txt" @@ -0,0 +1,10000 @@ + + @asyncio.coroutine + def connect(): + t, p = yield from self.loop.connect_read_pipe(lambda: proto, + master_read_obj) + self.assertIs(p, proto) + self.assertIs(t, proto.transport) + self.assertEqual(['INITIAL', 'CONNECTED'], proto.state) + self.assertEqual(0, proto.nbytes) + + self.loop.run_until_complete(connect()) + + os.write(slave, b'1') + test_utils.run_until(self.loop, lambda: proto.nbytes) + self.assertEqual(1, proto.nbytes) + + os.write(slave, b'2345') + test_utils.run_until(self.loop, lambda: proto.nbytes >= 5) + self.assertEqual(['INITIAL', 'CONNECTED'], proto.state) + self.assertEqual(5, proto.nbytes) + + os.close(slave) + self.loop.run_until_complete(proto.done) + self.assertEqual( + ['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state) + # extra info is available + self.assertIsNotNone(proto.transport.get_extra_info('pipe')) + + @unittest.skipUnless(sys.platform != 'win32', + "Don't support pipes for Windows") + def test_write_pipe(self): + rpipe, wpipe = os.pipe() + pipeobj = io.open(wpipe, 'wb', 1024) + + proto = MyWritePipeProto(loop=self.loop) + connect = self.loop.connect_write_pipe(lambda: proto, pipeobj) + transport, p = self.loop.run_until_complete(connect) + self.assertIs(p, proto) + self.assertIs(transport, proto.transport) + self.assertEqual('CONNECTED', proto.state) + + transport.write(b'1') + + data = bytearray() + def reader(data): + chunk = os.read(rpipe, 1024) + data += chunk + return len(data) + + test_utils.run_until(self.loop, lambda: reader(data) >= 1) + self.assertEqual(b'1', data) + + transport.write(b'2345') + test_utils.run_until(self.loop, lambda: reader(data) >= 5) + self.assertEqual(b'12345', data) + self.assertEqual('CONNECTED', proto.state) + + os.close(rpipe) + + # extra info is available + self.assertIsNotNone(proto.transport.get_extra_info('pipe')) + + # close connection + proto.transport.close() + self.loop.run_until_complete(proto.done) + self.assertEqual('CLOSED', proto.state) + + @unittest.skipUnless(sys.platform != 'win32', + "Don't support pipes for Windows") + def test_write_pipe_disconnect_on_close(self): + rsock, wsock = test_utils.socketpair() + rsock.setblocking(False) + pipeobj = io.open(wsock.detach(), 'wb', 1024) + + proto = MyWritePipeProto(loop=self.loop) + connect = self.loop.connect_write_pipe(lambda: proto, pipeobj) + transport, p = self.loop.run_until_complete(connect) + self.assertIs(p, proto) + self.assertIs(transport, proto.transport) + self.assertEqual('CONNECTED', proto.state) + + transport.write(b'1') + data = self.loop.run_until_complete(self.loop.sock_recv(rsock, 1024)) + self.assertEqual(b'1', data) + + rsock.close() + + self.loop.run_until_complete(proto.done) + self.assertEqual('CLOSED', proto.state) + + @unittest.skipUnless(sys.platform != 'win32', + "Don't support pipes for Windows") + # select, poll and kqueue don't support character devices (PTY) on Mac OS X + # older than 10.6 (Snow Leopard) + @support.requires_mac_ver(10, 6) + def test_write_pty(self): + master, slave = os.openpty() + slave_write_obj = io.open(slave, 'wb', 0) + + proto = MyWritePipeProto(loop=self.loop) + connect = self.loop.connect_write_pipe(lambda: proto, slave_write_obj) + transport, p = self.loop.run_until_complete(connect) + self.assertIs(p, proto) + self.assertIs(transport, proto.transport) + self.assertEqual('CONNECTED', proto.state) + + transport.write(b'1') + + data = bytearray() + def reader(data): + chunk = os.read(master, 1024) + data += chunk + return len(data) + + test_utils.run_until(self.loop, lambda: reader(data) >= 1, + timeout=10) + self.assertEqual(b'1', data) + + transport.write(b'2345') + test_utils.run_until(self.loop, lambda: reader(data) >= 5, + timeout=10) + self.assertEqual(b'12345', data) + self.assertEqual('CONNECTED', proto.state) + + os.close(master) + + # extra info is available + self.assertIsNotNone(proto.transport.get_extra_info('pipe')) + + # close connection + proto.transport.close() + self.loop.run_until_complete(proto.done) + self.assertEqual('CLOSED', proto.state) + + def test_prompt_cancellation(self): + r, w = test_utils.socketpair() + r.setblocking(False) + f = self.loop.sock_recv(r, 1) + ov = getattr(f, 'ov', None) + if ov is not None: + self.assertTrue(ov.pending) + + @asyncio.coroutine + def main(): + try: + self.loop.call_soon(f.cancel) + yield from f + except asyncio.CancelledError: + res = 'cancelled' + else: + res = None + finally: + self.loop.stop() + return res + + start = time.monotonic() + t = asyncio.Task(main(), loop=self.loop) + self.loop.run_forever() + elapsed = time.monotonic() - start + + self.assertLess(elapsed, 0.1) + self.assertEqual(t.result(), 'cancelled') + self.assertRaises(asyncio.CancelledError, f.result) + if ov is not None: + self.assertFalse(ov.pending) + self.loop._stop_serving(r) + + r.close() + w.close() + + def test_timeout_rounding(self): + def _run_once(): + self.loop._run_once_counter += 1 + orig_run_once() + + orig_run_once = self.loop._run_once + self.loop._run_once_counter = 0 + self.loop._run_once = _run_once + + @asyncio.coroutine + def wait(): + loop = self.loop + yield from asyncio.sleep(1e-2, loop=loop) + yield from asyncio.sleep(1e-4, loop=loop) + yield from asyncio.sleep(1e-6, loop=loop) + yield from asyncio.sleep(1e-8, loop=loop) + yield from asyncio.sleep(1e-10, loop=loop) + + self.loop.run_until_complete(wait()) + # The ideal number of call is 12, but on some platforms, the selector + # may sleep at little bit less than timeout depending on the resolution + # of the clock used by the kernel. Tolerate a few useless calls on + # these platforms. + self.assertLessEqual(self.loop._run_once_counter, 20, + {'clock_resolution': self.loop._clock_resolution, + 'selector': self.loop._selector.__class__.__name__}) + + def test_sock_connect_address(self): + addresses = [(socket.AF_INET, ('www.python.org', 80))] + if support.IPV6_ENABLED: + addresses.extend(( + (socket.AF_INET6, ('www.python.org', 80)), + (socket.AF_INET6, ('www.python.org', 80, 0, 0)), + )) + + for family, address in addresses: + for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + sock = socket.socket(family, sock_type) + with sock: + sock.setblocking(False) + connect = self.loop.sock_connect(sock, address) + with self.assertRaises(ValueError) as cm: + self.loop.run_until_complete(connect) + self.assertIn('address must be resolved', + str(cm.exception)) + + def test_remove_fds_after_closing(self): + loop = self.create_event_loop() + callback = lambda: None + r, w = test_utils.socketpair() + self.addCleanup(r.close) + self.addCleanup(w.close) + loop.add_reader(r, callback) + loop.add_writer(w, callback) + loop.close() + self.assertFalse(loop.remove_reader(r)) + self.assertFalse(loop.remove_writer(w)) + + def test_add_fds_after_closing(self): + loop = self.create_event_loop() + callback = lambda: None + r, w = test_utils.socketpair() + self.addCleanup(r.close) + self.addCleanup(w.close) + loop.close() + with self.assertRaises(RuntimeError): + loop.add_reader(r, callback) + with self.assertRaises(RuntimeError): + loop.add_writer(w, callback) + + def test_close_running_event_loop(self): + @asyncio.coroutine + def close_loop(loop): + self.loop.close() + + coro = close_loop(self.loop) + with self.assertRaises(RuntimeError): + self.loop.run_until_complete(coro) + + +class SubprocessTestsMixin: + + def check_terminated(self, returncode): + if sys.platform == 'win32': + self.assertIsInstance(returncode, int) + # expect 1 but sometimes get 0 + else: + self.assertEqual(-signal.SIGTERM, returncode) + + def check_killed(self, returncode): + if sys.platform == 'win32': + self.assertIsInstance(returncode, int) + # expect 1 but sometimes get 0 + else: + self.assertEqual(-signal.SIGKILL, returncode) + + def test_subprocess_exec(self): + prog = os.path.join(os.path.dirname(__file__), 'echo.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + self.assertEqual('CONNECTED', proto.state) + + stdin = transp.get_pipe_transport(0) + stdin.write(b'Python The Winner') + self.loop.run_until_complete(proto.got_data[1].wait()) + transp.close() + self.loop.run_until_complete(proto.completed) + self.check_terminated(proto.returncode) + self.assertEqual(b'Python The Winner', proto.data[1]) + + def test_subprocess_interactive(self): + prog = os.path.join(os.path.dirname(__file__), 'echo.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + self.assertEqual('CONNECTED', proto.state) + + try: + stdin = transp.get_pipe_transport(0) + stdin.write(b'Python ') + self.loop.run_until_complete(proto.got_data[1].wait()) + proto.got_data[1].clear() + self.assertEqual(b'Python ', proto.data[1]) + + stdin.write(b'The Winner') + self.loop.run_until_complete(proto.got_data[1].wait()) + self.assertEqual(b'Python The Winner', proto.data[1]) + finally: + transp.close() + + self.loop.run_until_complete(proto.completed) + self.check_terminated(proto.returncode) + + def test_subprocess_shell(self): + connect = self.loop.subprocess_shell( + functools.partial(MySubprocessProtocol, self.loop), + 'echo Python') + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + transp.get_pipe_transport(0).close() + self.loop.run_until_complete(proto.completed) + self.assertEqual(0, proto.returncode) + self.assertTrue(all(f.done() for f in proto.disconnects.values())) + self.assertEqual(proto.data[1].rstrip(b'\r\n'), b'Python') + self.assertEqual(proto.data[2], b'') + + def test_subprocess_exitcode(self): + connect = self.loop.subprocess_shell( + functools.partial(MySubprocessProtocol, self.loop), + 'exit 7', stdin=None, stdout=None, stderr=None) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.completed) + self.assertEqual(7, proto.returncode) + + def test_subprocess_close_after_finish(self): + connect = self.loop.subprocess_shell( + functools.partial(MySubprocessProtocol, self.loop), + 'exit 7', stdin=None, stdout=None, stderr=None) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.assertIsNone(transp.get_pipe_transport(0)) + self.assertIsNone(transp.get_pipe_transport(1)) + self.assertIsNone(transp.get_pipe_transport(2)) + self.loop.run_until_complete(proto.completed) + self.assertEqual(7, proto.returncode) + self.assertIsNone(transp.close()) + + def test_subprocess_kill(self): + prog = os.path.join(os.path.dirname(__file__), 'echo.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + transp.kill() + self.loop.run_until_complete(proto.completed) + self.check_killed(proto.returncode) + + def test_subprocess_terminate(self): + prog = os.path.join(os.path.dirname(__file__), 'echo.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + transp.terminate() + self.loop.run_until_complete(proto.completed) + self.check_terminated(proto.returncode) + + @unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP") + def test_subprocess_send_signal(self): + prog = os.path.join(os.path.dirname(__file__), 'echo.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + transp.send_signal(signal.SIGHUP) + self.loop.run_until_complete(proto.completed) + self.assertEqual(-signal.SIGHUP, proto.returncode) + + def test_subprocess_stderr(self): + prog = os.path.join(os.path.dirname(__file__), 'echo2.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + stdin = transp.get_pipe_transport(0) + stdin.write(b'test') + + self.loop.run_until_complete(proto.completed) + + transp.close() + self.assertEqual(b'OUT:test', proto.data[1]) + self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2]) + self.assertEqual(0, proto.returncode) + + def test_subprocess_stderr_redirect_to_stdout(self): + prog = os.path.join(os.path.dirname(__file__), 'echo2.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog, stderr=subprocess.STDOUT) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + stdin = transp.get_pipe_transport(0) + self.assertIsNotNone(transp.get_pipe_transport(1)) + self.assertIsNone(transp.get_pipe_transport(2)) + + stdin.write(b'test') + self.loop.run_until_complete(proto.completed) + self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'), + proto.data[1]) + self.assertEqual(b'', proto.data[2]) + + transp.close() + self.assertEqual(0, proto.returncode) + + def test_subprocess_close_client_stream(self): + prog = os.path.join(os.path.dirname(__file__), 'echo3.py') + + connect = self.loop.subprocess_exec( + functools.partial(MySubprocessProtocol, self.loop), + sys.executable, prog) + transp, proto = self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.connected) + + stdin = transp.get_pipe_transport(0) + stdout = transp.get_pipe_transport(1) + stdin.write(b'test') + self.loop.run_until_complete(proto.got_data[1].wait()) + self.assertEqual(b'OUT:test', proto.data[1]) + + stdout.close() + self.loop.run_until_complete(proto.disconnects[1]) + stdin.write(b'xxx') + self.loop.run_until_complete(proto.got_data[2].wait()) + if sys.platform != 'win32': + self.assertEqual(b'ERR:BrokenPipeError', proto.data[2]) + else: + # After closing the read-end of a pipe, writing to the + # write-end using os.write() fails with errno==EINVAL and + # GetLastError()==ERROR_INVALID_NAME on Windows!?! (Using + # WriteFile() we get ERROR_BROKEN_PIPE as expected.) + self.assertEqual(b'ERR:OSError', proto.data[2]) + transp.close() + self.loop.run_until_complete(proto.completed) + self.check_terminated(proto.returncode) + + def test_subprocess_wait_no_same_group(self): + # start the new process in a new session + connect = self.loop.subprocess_shell( + functools.partial(MySubprocessProtocol, self.loop), + 'exit 7', stdin=None, stdout=None, stderr=None, + start_new_session=True) + _, proto = yield self.loop.run_until_complete(connect) + self.assertIsInstance(proto, MySubprocessProtocol) + self.loop.run_until_complete(proto.completed) + self.assertEqual(7, proto.returncode) + + def test_subprocess_exec_invalid_args(self): + @asyncio.coroutine + def connect(**kwds): + yield from self.loop.subprocess_exec( + asyncio.SubprocessProtocol, + 'pwd', **kwds) + + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(universal_newlines=True)) + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(bufsize=4096)) + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(shell=True)) + + def test_subprocess_shell_invalid_args(self): + @asyncio.coroutine + def connect(cmd=None, **kwds): + if not cmd: + cmd = 'pwd' + yield from self.loop.subprocess_shell( + asyncio.SubprocessProtocol, + cmd, **kwds) + + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(['ls', '-l'])) + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(universal_newlines=True)) + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(bufsize=4096)) + with self.assertRaises(ValueError): + self.loop.run_until_complete(connect(shell=False)) + + +if sys.platform == 'win32': + + class SelectEventLoopTests(EventLoopTestsMixin, test_utils.TestCase): + + def create_event_loop(self): + return asyncio.SelectorEventLoop() + + class ProactorEventLoopTests(EventLoopTestsMixin, + SubprocessTestsMixin, + test_utils.TestCase): + + def create_event_loop(self): + return asyncio.ProactorEventLoop() + + def test_create_ssl_connection(self): + raise unittest.SkipTest("IocpEventLoop incompatible with SSL") + + def test_create_server_ssl(self): + raise unittest.SkipTest("IocpEventLoop incompatible with SSL") + + def test_create_server_ssl_verify_failed(self): + raise unittest.SkipTest("IocpEventLoop incompatible with SSL") + + def test_create_server_ssl_match_failed(self): + raise unittest.SkipTest("IocpEventLoop incompatible with SSL") + + def test_create_server_ssl_verified(self): + raise unittest.SkipTest("IocpEventLoop incompatible with SSL") + + def test_reader_callback(self): + raise unittest.SkipTest("IocpEventLoop does not have add_reader()") + + def test_reader_callback_cancel(self): + raise unittest.SkipTest("IocpEventLoop does not have add_reader()") + + def test_writer_callback(self): + raise unittest.SkipTest("IocpEventLoop does not have add_writer()") + + def test_writer_callback_cancel(self): + raise unittest.SkipTest("IocpEventLoop does not have add_writer()") + + def test_create_datagram_endpoint(self): + raise unittest.SkipTest( + "IocpEventLoop does not have create_datagram_endpoint()") + + def test_remove_fds_after_closing(self): + raise unittest.SkipTest("IocpEventLoop does not have add_reader()") +else: + from asyncio import selectors + + class UnixEventLoopTestsMixin(EventLoopTestsMixin): + def setUp(self): + super().setUp() + watcher = asyncio.SafeChildWatcher() + watcher.attach_loop(self.loop) + asyncio.set_child_watcher(watcher) + + def tearDown(self): + asyncio.set_child_watcher(None) + super().tearDown() + + if hasattr(selectors, 'KqueueSelector'): + class KqueueEventLoopTests(UnixEventLoopTestsMixin, + SubprocessTestsMixin, + test_utils.TestCase): + + def create_event_loop(self): + return asyncio.SelectorEventLoop( + selectors.KqueueSelector()) + + # kqueue doesn't support character devices (PTY) on Mac OS X older + # than 10.9 (Maverick) + @support.requires_mac_ver(10, 9) + # Issue #20667: KqueueEventLoopTests.test_read_pty_output() + # hangs on OpenBSD 5.5 + @unittest.skipIf(sys.platform.startswith('openbsd'), + 'test hangs on OpenBSD') + def test_read_pty_output(self): + super().test_read_pty_output() + + # kqueue doesn't support character devices (PTY) on Mac OS X older + # than 10.9 (Maverick) + @support.requires_mac_ver(10, 9) + def test_write_pty(self): + super().test_write_pty() + + if hasattr(selectors, 'EpollSelector'): + class EPollEventLoopTests(UnixEventLoopTestsMixin, + SubprocessTestsMixin, + test_utils.TestCase): + + def create_event_loop(self): + return asyncio.SelectorEventLoop(selectors.EpollSelector()) + + if hasattr(selectors, 'PollSelector'): + class PollEventLoopTests(UnixEventLoopTestsMixin, + SubprocessTestsMixin, + test_utils.TestCase): + + def create_event_loop(self): + return asyncio.SelectorEventLoop(selectors.PollSelector()) + + # Should always exist. + class SelectEventLoopTests(UnixEventLoopTestsMixin, + SubprocessTestsMixin, + test_utils.TestCase): + + def create_event_loop(self): + return asyncio.SelectorEventLoop(selectors.SelectSelector()) + + +def noop(*args): + pass + + +class HandleTests(test_utils.TestCase): + + def setUp(self): + self.loop = mock.Mock() + self.loop.get_debug.return_value = True + + def test_handle(self): + def callback(*args): + return args + + args = () + h = asyncio.Handle(callback, args, self.loop) + self.assertIs(h._callback, callback) + self.assertIs(h._args, args) + self.assertFalse(h._cancelled) + + h.cancel() + self.assertTrue(h._cancelled) + + def test_handle_from_handle(self): + def callback(*args): + return args + h1 = asyncio.Handle(callback, (), loop=self.loop) + self.assertRaises( + AssertionError, asyncio.Handle, h1, (), self.loop) + + def test_callback_with_exception(self): + def callback(): + raise ValueError() + + self.loop = mock.Mock() + self.loop.call_exception_handler = mock.Mock() + + h = asyncio.Handle(callback, (), self.loop) + h._run() + + self.loop.call_exception_handler.assert_called_with({ + 'message': test_utils.MockPattern('Exception in callback.*'), + 'exception': mock.ANY, + 'handle': h, + 'source_traceback': h._source_traceback, + }) + + def test_handle_weakref(self): + wd = weakref.WeakValueDictionary() + h = asyncio.Handle(lambda: None, (), self.loop) + wd['h'] = h # Would fail without __weakref__ slot. + + def test_handle_repr(self): + self.loop.get_debug.return_value = False + + # simple function + h = asyncio.Handle(noop, (1, 2), self.loop) + filename, lineno = test_utils.get_function_source(noop) + self.assertEqual(repr(h), + '' + % (filename, lineno)) + + # cancelled handle + h.cancel() + self.assertEqual(repr(h), + '') + + # decorated function + cb = asyncio.coroutine(noop) + h = asyncio.Handle(cb, (), self.loop) + self.assertEqual(repr(h), + '' + % (filename, lineno)) + + # partial function + cb = functools.partial(noop, 1, 2) + h = asyncio.Handle(cb, (3,), self.loop) + regex = (r'^$' + % (re.escape(filename), lineno)) + self.assertRegex(repr(h), regex) + + # partial method + if sys.version_info >= (3, 4): + method = HandleTests.test_handle_repr + cb = functools.partialmethod(method) + filename, lineno = test_utils.get_function_source(method) + h = asyncio.Handle(cb, (), self.loop) + + cb_regex = r'' + cb_regex = (r'functools.partialmethod\(%s, , \)\(\)' % cb_regex) + regex = (r'^$' + % (cb_regex, re.escape(filename), lineno)) + self.assertRegex(repr(h), regex) + + def test_handle_repr_debug(self): + self.loop.get_debug.return_value = True + + # simple function + create_filename = __file__ + create_lineno = sys._getframe().f_lineno + 1 + h = asyncio.Handle(noop, (1, 2), self.loop) + filename, lineno = test_utils.get_function_source(noop) + self.assertEqual(repr(h), + '' + % (filename, lineno, create_filename, create_lineno)) + + # cancelled handle + h.cancel() + self.assertEqual(repr(h), + '' + % (filename, lineno, create_filename, create_lineno)) + + def test_handle_source_traceback(self): + loop = asyncio.get_event_loop_policy().new_event_loop() + loop.set_debug(True) + self.set_event_loop(loop) + + def check_source_traceback(h): + lineno = sys._getframe(1).f_lineno - 1 + self.assertIsInstance(h._source_traceback, list) + self.assertEqual(h._source_traceback[-1][:3], + (__file__, + lineno, + 'test_handle_source_traceback')) + + # call_soon + h = loop.call_soon(noop) + check_source_traceback(h) + + # call_soon_threadsafe + h = loop.call_soon_threadsafe(noop) + check_source_traceback(h) + + # call_later + h = loop.call_later(0, noop) + check_source_traceback(h) + + # call_at + h = loop.call_later(0, noop) + check_source_traceback(h) + + +class TimerTests(unittest.TestCase): + + def setUp(self): + self.loop = mock.Mock() + + def test_hash(self): + when = time.monotonic() + h = asyncio.TimerHandle(when, lambda: False, (), + mock.Mock()) + self.assertEqual(hash(h), hash(when)) + + def test_timer(self): + def callback(*args): + return args + + args = (1, 2, 3) + when = time.monotonic() + h = asyncio.TimerHandle(when, callback, args, mock.Mock()) + self.assertIs(h._callback, callback) + self.assertIs(h._args, args) + self.assertFalse(h._cancelled) + + # cancel + h.cancel() + self.assertTrue(h._cancelled) + self.assertIsNone(h._callback) + self.assertIsNone(h._args) + + # when cannot be None + self.assertRaises(AssertionError, + asyncio.TimerHandle, None, callback, args, + self.loop) + + def test_timer_repr(self): + self.loop.get_debug.return_value = False + + # simple function + h = asyncio.TimerHandle(123, noop, (), self.loop) + src = test_utils.get_function_source(noop) + self.assertEqual(repr(h), + '' % src) + + # cancelled handle + h.cancel() + self.assertEqual(repr(h), + '') + + def test_timer_repr_debug(self): + self.loop.get_debug.return_value = True + + # simple function + create_filename = __file__ + create_lineno = sys._getframe().f_lineno + 1 + h = asyncio.TimerHandle(123, noop, (), self.loop) + filename, lineno = test_utils.get_function_source(noop) + self.assertEqual(repr(h), + '' + % (filename, lineno, create_filename, create_lineno)) + + # cancelled handle + h.cancel() + self.assertEqual(repr(h), + '' + % (filename, lineno, create_filename, create_lineno)) + + + def test_timer_comparison(self): + def callback(*args): + return args + + when = time.monotonic() + + h1 = asyncio.TimerHandle(when, callback, (), self.loop) + h2 = asyncio.TimerHandle(when, callback, (), self.loop) + # TODO: Use assertLess etc. + self.assertFalse(h1 < h2) + self.assertFalse(h2 < h1) + self.assertTrue(h1 <= h2) + self.assertTrue(h2 <= h1) + self.assertFalse(h1 > h2) + self.assertFalse(h2 > h1) + self.assertTrue(h1 >= h2) + self.assertTrue(h2 >= h1) + self.assertTrue(h1 == h2) + self.assertFalse(h1 != h2) + + h2.cancel() + self.assertFalse(h1 == h2) + + h1 = asyncio.TimerHandle(when, callback, (), self.loop) + h2 = asyncio.TimerHandle(when + 10.0, callback, (), self.loop) + self.assertTrue(h1 < h2) + self.assertFalse(h2 < h1) + self.assertTrue(h1 <= h2) + self.assertFalse(h2 <= h1) + self.assertFalse(h1 > h2) + self.assertTrue(h2 > h1) + self.assertFalse(h1 >= h2) + self.assertTrue(h2 >= h1) + self.assertFalse(h1 == h2) + self.assertTrue(h1 != h2) + + h3 = asyncio.Handle(callback, (), self.loop) + self.assertIs(NotImplemented, h1.__eq__(h3)) + self.assertIs(NotImplemented, h1.__ne__(h3)) + + +class AbstractEventLoopTests(unittest.TestCase): + + def test_not_implemented(self): + f = mock.Mock() + loop = asyncio.AbstractEventLoop() + self.assertRaises( + NotImplementedError, loop.run_forever) + self.assertRaises( + NotImplementedError, loop.run_until_complete, None) + self.assertRaises( + NotImplementedError, loop.stop) + self.assertRaises( + NotImplementedError, loop.is_running) + self.assertRaises( + NotImplementedError, loop.is_closed) + self.assertRaises( + NotImplementedError, loop.close) + self.assertRaises( + NotImplementedError, loop.create_task, None) + self.assertRaises( + NotImplementedError, loop.call_later, None, None) + self.assertRaises( + NotImplementedError, loop.call_at, f, f) + self.assertRaises( + NotImplementedError, loop.call_soon, None) + self.assertRaises( + NotImplementedError, loop.time) + self.assertRaises( + NotImplementedError, loop.call_soon_threadsafe, None) + self.assertRaises( + NotImplementedError, loop.run_in_executor, f, f) + self.assertRaises( + NotImplementedError, loop.set_default_executor, f) + self.assertRaises( + NotImplementedError, loop.getaddrinfo, 'localhost', 8080) + self.assertRaises( + NotImplementedError, loop.getnameinfo, ('localhost', 8080)) + self.assertRaises( + NotImplementedError, loop.create_connection, f) + self.assertRaises( + NotImplementedError, loop.create_server, f) + self.assertRaises( + NotImplementedError, loop.create_datagram_endpoint, f) + self.assertRaises( + NotImplementedError, loop.add_reader, 1, f) + self.assertRaises( + NotImplementedError, loop.remove_reader, 1) + self.assertRaises( + NotImplementedError, loop.add_writer, 1, f) + self.assertRaises( + NotImplementedError, loop.remove_writer, 1) + self.assertRaises( + NotImplementedError, loop.sock_recv, f, 10) + self.assertRaises( + NotImplementedError, loop.sock_sendall, f, 10) + self.assertRaises( + NotImplementedError, loop.sock_connect, f, f) + self.assertRaises( + NotImplementedError, loop.sock_accept, f) + self.assertRaises( + NotImplementedError, loop.add_signal_handler, 1, f) + self.assertRaises( + NotImplementedError, loop.remove_signal_handler, 1) + self.assertRaises( + NotImplementedError, loop.remove_signal_handler, 1) + self.assertRaises( + NotImplementedError, loop.connect_read_pipe, f, + mock.sentinel.pipe) + self.assertRaises( + NotImplementedError, loop.connect_write_pipe, f, + mock.sentinel.pipe) + self.assertRaises( + NotImplementedError, loop.subprocess_shell, f, + mock.sentinel) + self.assertRaises( + NotImplementedError, loop.subprocess_exec, f) + self.assertRaises( + NotImplementedError, loop.set_exception_handler, f) + self.assertRaises( + NotImplementedError, loop.default_exception_handler, f) + self.assertRaises( + NotImplementedError, loop.call_exception_handler, f) + self.assertRaises( + NotImplementedError, loop.get_debug) + self.assertRaises( + NotImplementedError, loop.set_debug, f) + + +class ProtocolsAbsTests(unittest.TestCase): + + def test_empty(self): + f = mock.Mock() + p = asyncio.Protocol() + self.assertIsNone(p.connection_made(f)) + self.assertIsNone(p.connection_lost(f)) + self.assertIsNone(p.data_received(f)) + self.assertIsNone(p.eof_received()) + + dp = asyncio.DatagramProtocol() + self.assertIsNone(dp.connection_made(f)) + self.assertIsNone(dp.connection_lost(f)) + self.assertIsNone(dp.error_received(f)) + self.assertIsNone(dp.datagram_received(f, f)) + + sp = asyncio.SubprocessProtocol() + self.assertIsNone(sp.connection_made(f)) + self.assertIsNone(sp.connection_lost(f)) + self.assertIsNone(sp.pipe_data_received(1, f)) + self.assertIsNone(sp.pipe_connection_lost(1, f)) + self.assertIsNone(sp.process_exited()) + + +class PolicyTests(unittest.TestCase): + + def test_event_loop_policy(self): + policy = asyncio.AbstractEventLoopPolicy() + self.assertRaises(NotImplementedError, policy.get_event_loop) + self.assertRaises(NotImplementedError, policy.set_event_loop, object()) + self.assertRaises(NotImplementedError, policy.new_event_loop) + self.assertRaises(NotImplementedError, policy.get_child_watcher) + self.assertRaises(NotImplementedError, policy.set_child_watcher, + object()) + + def test_get_event_loop(self): + policy = asyncio.DefaultEventLoopPolicy() + self.assertIsNone(policy._local._loop) + + loop = policy.get_event_loop() + self.assertIsInstance(loop, asyncio.AbstractEventLoop) + + self.assertIs(policy._local._loop, loop) + self.assertIs(loop, policy.get_event_loop()) + loop.close() + + def test_get_event_loop_calls_set_event_loop(self): + policy = asyncio.DefaultEventLoopPolicy() + + with mock.patch.object( + policy, "set_event_loop", + wraps=policy.set_event_loop) as m_set_event_loop: + + loop = policy.get_event_loop() + + # policy._local._loop must be set through .set_event_loop() + # (the unix DefaultEventLoopPolicy needs this call to attach + # the child watcher correctly) + m_set_event_loop.assert_called_with(loop) + + loop.close() + + def test_get_event_loop_after_set_none(self): + policy = asyncio.DefaultEventLoopPolicy() + policy.set_event_loop(None) + self.assertRaises(AssertionError, policy.get_event_loop) + + @mock.patch('asyncio.events.threading.current_thread') + def test_get_event_loop_thread(self, m_current_thread): + + def f(): + policy = asyncio.DefaultEventLoopPolicy() + self.assertRaises(AssertionError, policy.get_event_loop) + + th = threading.Thread(target=f) + th.start() + th.join() + + def test_new_event_loop(self): + policy = asyncio.DefaultEventLoopPolicy() + + loop = policy.new_event_loop() + self.assertIsInstance(loop, asyncio.AbstractEventLoop) + loop.close() + + def test_set_event_loop(self): + policy = asyncio.DefaultEventLoopPolicy() + old_loop = policy.get_event_loop() + + self.assertRaises(AssertionError, policy.set_event_loop, object()) + + loop = policy.new_event_loop() + policy.set_event_loop(loop) + self.assertIs(loop, policy.get_event_loop()) + self.assertIsNot(old_loop, policy.get_event_loop()) + loop.close() + old_loop.close() + + def test_get_event_loop_policy(self): + policy = asyncio.get_event_loop_policy() + self.assertIsInstance(policy, asyncio.AbstractEventLoopPolicy) + self.assertIs(policy, asyncio.get_event_loop_policy()) + + def test_set_event_loop_policy(self): + self.assertRaises( + AssertionError, asyncio.set_event_loop_policy, object()) + + old_policy = asyncio.get_event_loop_policy() + + policy = asyncio.DefaultEventLoopPolicy() + asyncio.set_event_loop_policy(policy) + self.assertIs(policy, asyncio.get_event_loop_policy()) + self.assertIsNot(policy, old_policy) + + +if __name__ == '__main__': + unittest.main() + +#!/usr/bin/env python + +""" +This script generated test_cases for test_distribution_version.py. + +To do so it outputs the relevant files from /etc/*release, the output of platform.dist() and the current ansible_facts regarding the distribution version. + +This assumes a working ansible version in the path. +""" + +import platform +import os.path +import subprocess +import json +import sys + +filelist = [ + '/etc/oracle-release', + '/etc/slackware-version', + '/etc/redhat-release', + '/etc/vmware-release', + '/etc/openwrt_release', + '/etc/system-release', + '/etc/alpine-release', + '/etc/release', + '/etc/arch-release', + '/etc/os-release', + '/etc/SuSE-release', + '/etc/gentoo-release', + '/etc/os-release', + '/etc/lsb-release', + '/etc/altlinux-release', + '/etc/os-release', + '/etc/coreos/update.conf', + '/usr/lib/os-release', +] + +fcont = {} + +for f in filelist: + if os.path.exists(f): + s = os.path.getsize(f) + if s > 0 and s < 10000: + with open(f) as fh: + fcont[f] = fh.read() + +dist = platform.dist() + + +facts = ['distribution', 'distribution_version', 'distribution_release', 'distribution_major_version', 'os_family'] + +try: + ansible_out = subprocess.check_output( + ['ansible', 'localhost', '-m', 'setup']) +except subprocess.CalledProcessError as e: + print("ERROR: ansible run failed, output was: \n") + print(e.output) + sys.exit(e.returncode) + +parsed = json.loads(ansible_out[ansible_out.index('{'):]) +ansible_facts = {} +for fact in facts: + try: + ansible_facts[fact] = parsed['ansible_facts']['ansible_' + fact] + except: + ansible_facts[fact] = "N/A" + +nicename = ansible_facts['distribution'] + ' ' + ansible_facts['distribution_version'] + +output = { + 'name': nicename, + 'input': fcont, + 'platform.dist': dist, + 'result': ansible_facts, +} + +print(json.dumps(output, indent=4)) + +# Copyright 2015 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utility functions for summary creation.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools +import re + +from tensorflow.python.framework import dtypes +from tensorflow.python.framework import ops +from tensorflow.python.ops import standard_ops +from tensorflow.python.summary import summary + +__all__ = [ + 'summarize_tensor', + 'summarize_activation', + 'summarize_tensors', + 'summarize_collection', + 'summarize_variables', + 'summarize_weights', + 'summarize_biases', + 'summarize_activations', +] + +# TODO(wicke): add more unit tests for summarization functions. + + +def _add_scalar_summary(tensor, tag=None): + """Add a scalar summary operation for the tensor. + + Args: + tensor: The tensor to summarize. + tag: The tag to use, if None then use tensor's op's name. + + Returns: + The created histogram summary. + + Raises: + ValueError: If the tag is already in use or the rank is not 0. + """ + tensor.get_shape().assert_has_rank(0) + tag = tag or '%s_summary' % tensor.op.name + return summary.scalar(tag, tensor) + + +def _add_histogram_summary(tensor, tag=None): + """Add a summary operation for the histogram of a tensor. + + Args: + tensor: The tensor to summarize. + tag: The tag to use, if None then use tensor's op's name. + + Returns: + The created histogram summary. + + Raises: + ValueError: If the tag is already in use. + """ + tag = tag or '%s_summary' % tensor.op.name + return summary.histogram(tag, tensor) + + +def summarize_activation(op): + """Summarize an activation. + + This applies the given activation and adds useful summaries specific to the + activation. + + Args: + op: The tensor to summarize (assumed to be a layer activation). + Returns: + The summary op created to summarize `op`. + """ + if op.op.type in ('Relu', 'Softplus', 'Relu6'): + # Using inputs to avoid floating point equality and/or epsilons. + _add_scalar_summary( + standard_ops.reduce_mean( + standard_ops.to_float( + standard_ops.less(op.op.inputs[ + 0], standard_ops.cast(0.0, op.op.inputs[0].dtype)))), + '%s/zeros' % op.op.name) + if op.op.type == 'Relu6': + _add_scalar_summary( + standard_ops.reduce_mean( + standard_ops.to_float( + standard_ops.greater(op.op.inputs[ + 0], standard_ops.cast(6.0, op.op.inputs[0].dtype)))), + '%s/sixes' % op.op.name) + return _add_histogram_summary(op, '%s/activation' % op.op.name) + + +def summarize_tensor(tensor, tag=None): + """Summarize a tensor using a suitable summary type. + + This function adds a summary op for `tensor`. The type of summary depends on + the shape of `tensor`. For scalars, a `scalar_summary` is created, for all + other tensors, `histogram_summary` is used. + + Args: + tensor: The tensor to summarize + tag: The tag to use, if None then use tensor's op's name. + + Returns: + The summary op created or None for string tensors. + """ + # Skips string tensors and boolean tensors (not handled by the summaries). + if (tensor.dtype.is_compatible_with(dtypes.string) or + tensor.dtype.base_dtype == dtypes.bool): + return None + + if tensor.get_shape().ndims == 0: + # For scalars, use a scalar summary. + return _add_scalar_summary(tensor, tag) + else: + # We may land in here if the rank is still unknown. The histogram won't + # hurt if this ends up being a scalar. + return _add_histogram_summary(tensor, tag) + + +def summarize_tensors(tensors, summarizer=summarize_tensor): + """Summarize a set of tensors.""" + return [summarizer(tensor) for tensor in tensors] + + +def summarize_collection(collection, + name_filter=None, + summarizer=summarize_tensor): + """Summarize a graph collection of tensors, possibly filtered by name.""" + tensors = [] + for op in ops.get_collection(collection): + if name_filter is None or re.match(name_filter, op.op.name): + tensors.append(op) + return summarize_tensors(tensors, summarizer) + + +# Utility functions for commonly used collections +summarize_variables = functools.partial(summarize_collection, + ops.GraphKeys.GLOBAL_VARIABLES) + +summarize_weights = functools.partial(summarize_collection, + ops.GraphKeys.WEIGHTS) + +summarize_biases = functools.partial(summarize_collection, ops.GraphKeys.BIASES) + + +def summarize_activations(name_filter=None, summarizer=summarize_activation): + """Summarize activations, using `summarize_activation` to summarize.""" + return summarize_collection(ops.GraphKeys.ACTIVATIONS, name_filter, + summarizer) + +#@PydevCodeAnalysisIgnore +import sys, os +import ctypes + +# find_library(name) returns the pathname of a library, or None. +if os.name == "nt": + def find_library(name): + # See MSDN for the REAL search order. + for directory in os.environ['PATH'].split(os.pathsep): + fname = os.path.join(directory, name) + if os.path.exists(fname): + return fname + if fname.lower().endswith(".dll"): + continue + fname = fname + ".dll" + if os.path.exists(fname): + return fname + return None + +if os.name == "ce": + # search path according to MSDN: + # - absolute path specified by filename + # - The .exe launch directory + # - the Windows directory + # - ROM dll files (where are they?) + # - OEM specified search path: HKLM\Loader\SystemPath + def find_library(name): + return name + +if os.name == "posix" and sys.platform == "darwin": + from ctypes.macholib.dyld import dyld_find as _dyld_find + def find_library(name): + possible = ['lib%s.dylib' % name, + '%s.dylib' % name, + '%s.framework/%s' % (name, name)] + for name in possible: + try: + return _dyld_find(name) + except ValueError: + continue + return None + +elif os.name == "posix": + # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump + import re, tempfile + + def _findLib_gcc(name): + expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \ + '$CC -Wl,-t -o /dev/null 2>&1 -l' + name + try: + fdout, outfile = tempfile.mkstemp() + fd = os.popen(cmd) + trace = fd.read() + err = fd.close() + finally: + try: + os.unlink(outfile) + except OSError, e: + import errno + if e.errno != errno.ENOENT: + raise + res = re.search(expr, trace) + if not res: + return None + return res.group(0) + + def _findLib_ld(name): + expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read()) + if not res: + # Hm, this works only for libs needed by the python executable. + cmd = 'ldd %s 2>/dev/null' % sys.executable + res = re.search(expr, os.popen(cmd).read()) + if not res: + return None + return res.group(0) + + def _get_soname(f): + cmd = "objdump -p -j .dynamic 2>/dev/null " + f + res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read()) + if not res: + return None + return res.group(1) + + def find_library(name): + lib = _findLib_ld(name) or _findLib_gcc(name) + if not lib: + return None + return _get_soname(lib) + +################################################################ +# test code + +def test(): + from ctypes import cdll + if os.name == "nt": + sys.stdout.write('%s\n' % (cdll.msvcrt,)) + sys.stdout.write('%s\n' % (cdll.load("msvcrt"),)) + sys.stdout.write('%s\n' % (find_library("msvcrt"),)) + + if os.name == "posix": + # find and load_version + sys.stdout.write('%s\n' % (find_library("m"),)) + sys.stdout.write('%s\n' % (find_library("c"),)) + sys.stdout.write('%s\n' % (find_library("bz2"),)) + + # getattr +## print_ cdll.m +## print_ cdll.bz2 + + # load + if sys.platform == "darwin": + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypto.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libSystem.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("System.framework/System"),)) + else: + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.so"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypt.so"),)) + sys.stdout.write('%s\n' % (find_library("crypt"),)) + +if __name__ == "__main__": + test() + +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2004-2010 Tiny SPRL (). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## +{ + 'name': 'United States - Chart of accounts', + 'version': '1.1', + 'author': 'OpenERP SA', + 'category': 'Localization/Account Charts', + 'description': """ +United States - Chart of accounts. +================================== + """, + 'website': 'http://www.openerp.com', + 'depends': ['account_chart', 'account_anglo_saxon'], + 'data': [ + 'l10n_us_account_type.xml', + 'account_chart_template.xml', + 'account.account.template.csv', + 'account_tax_code_template.xml', + 'account_tax_template.xml', + 'account_chart_template_after.xml', + 'l10n_us_wizard.xml' + ], + 'demo': [], + 'test': [], + 'installable': True, + 'auto_install': False, +} +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: + +import json + +from datetime import datetime + +from django.db.models import Q +from django.core.urlresolvers import reverse +from django.http import HttpResponse, Http404 +from django.shortcuts import redirect, render_to_response, get_object_or_404 +from django.template import RequestContext +from django.template.loader import render_to_string + +from django.contrib.sites.models import Site + +from .conf import settings +from .exceptions import InvalidSection +from .models import Post, FeedHit +from .signals import post_viewed, post_redirected + + +def blog_index(request, section=None): + if section: + try: + posts = Post.objects.section(section) + except InvalidSection: + raise Http404() + else: + posts = Post.objects.current() + + if request.GET.get("q"): + posts = posts.filter( + Q(title__icontains=request.GET.get("q")) | + Q(teaser_html__icontains=request.GET.get("q")) | + Q(content_html__icontains=request.GET.get("q")) + ) + if posts.count() == 1: + return redirect(posts.get().get_absolute_url()) + + return render_to_response("pinax/blog/blog_list.html", { + "posts": posts, + "section_slug": section, + "section_name": dict(Post.SECTION_CHOICES)[Post.section_idx(section)] if section else None, + "search_term": request.GET.get("q") + }, context_instance=RequestContext(request)) + + +def blog_post_detail(request, **kwargs): + + if "post_pk" in kwargs: + if request.user.is_authenticated() and request.user.is_staff: + queryset = Post.objects.all() + post = get_object_or_404(queryset, pk=kwargs["post_pk"]) + else: + raise Http404() + elif "post_secret_key" in kwargs: + post = get_object_or_404(Post, secret_key=kwargs["post_secret_key"]) + else: + queryset = Post.objects.current() + if "post_slug" in kwargs: + if not settings.PINAX_BLOG_SLUG_UNIQUE: + raise Http404() + post = get_object_or_404(queryset, slug=kwargs["post_slug"]) + else: + queryset = queryset.filter( + published__year=int(kwargs["year"]), + published__month=int(kwargs["month"]), + published__day=int(kwargs["day"]), + ) + post = get_object_or_404(queryset, slug=kwargs["slug"]) + if settings.PINAX_BLOG_SLUG_UNIQUE: + post_redirected.send(sender=post, post=post, request=request) + return redirect(post.get_absolute_url(), permanent=True) + post_viewed.send(sender=post, post=post, request=request) + + return render_to_response("pinax/blog/blog_post.html", { + "post": post, + }, context_instance=RequestContext(request)) + + +def serialize_request(request): + data = { + "path": request.path, + "META": { + "QUERY_STRING": request.META.get("QUERY_STRING"), + "REMOTE_ADDR": request.META.get("REMOTE_ADDR"), + } + } + for key in request.META: + if key.startswith("HTTP"): + data["META"][key] = request.META[key] + return json.dumps(data) + + +def blog_feed(request, section=None, feed_type=None): + + try: + posts = Post.objects.section(section) + except InvalidSection: + raise Http404() + + if section is None: + section = settings.PINAX_BLOG_ALL_SECTION_NAME + + if feed_type == "atom": + feed_template = "pinax/blog/atom_feed.xml" + feed_mimetype = "application/atom+xml" + elif feed_type == "rss": + feed_template = "pinax/blog/rss_feed.xml" + feed_mimetype = "application/rss+xml" + else: + raise Http404() + + current_site = Site.objects.get_current() + + feed_title = "%s Blog: %s" % (current_site.name, section[0].upper() + section[1:]) + + blog_url = "http://%s%s" % (current_site.domain, reverse("blog")) + + url_name, kwargs = "blog_feed", {"section": section, "feed_type": feed_type} + feed_url = "http://%s%s" % (current_site.domain, reverse(url_name, kwargs=kwargs)) + + if posts: + feed_updated = posts[0].published + else: + feed_updated = datetime(2009, 8, 1, 0, 0, 0) + + # create a feed hit + hit = FeedHit() + hit.request_data = serialize_request(request) + hit.save() + + feed = render_to_string(feed_template, { + "feed_id": feed_url, + "feed_title": feed_title, + "blog_url": blog_url, + "feed_url": feed_url, + "feed_updated": feed_updated, + "entries": posts, + "current_site": current_site, + }) + return HttpResponse(feed, content_type=feed_mimetype) + + +# Copyright (C) 2013 Google Inc., authors, and contributors +# Licensed under http://www.apache.org/licenses/LICENSE-2.0 +# Created By: +# Maintained By: + +"""Add log_events + +Revision ID: 3288290c842a +Revises: 4dceb701509f +Create Date: 2013-05-30 01:35:29.944450 + +""" + +# revision identifiers, used by Alembic. +revision = '3288290c842a' +down_revision = '4dceb701509f' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('log_events', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('severity', sa.String(length=250), nullable=True), + sa.Column('whodunnit', sa.String(length=250), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('log_events') + ### end Alembic commands ### +class Token: + def __init__(self,pos=(0,0),type='symbol',val=None,items=None): + self.pos,self.type,self.val,self.items=pos,type,val,items + +def u_error(ctx,s,i): + y,x = i + line = s.split('\n')[y-1] + p = '' + if y < 10: p += ' ' + if y < 100: p += ' ' + r = p + str(y) + ": " + line + "\n" + r += " "+" "*x+"^" +'\n' + raise 'error: '+ctx+'\n'+r + +ISYMBOLS = '`-=[];,./~!@$%^&*()+{}:<>?' +SYMBOLS = [ + 'def','class','yield','return','pass','and','or','not','in','import', + 'is','while','break','for','continue','if','else','elif','try', + 'except','raise','True','False','None','global','del','from', + '-','+','*','**','/','%','<<','>>', + '-=','+=','*=','/=','=','==','!=','<','>', + '<=','>=','[',']','{','}','(',')','.',':',',',';','&','|','!', + ] +B_BEGIN,B_END = ['[','(','{'],[']',')','}'] + +class TData: + def __init__(self): + self.y,self.yi,self.nl = 1,0,True + self.res,self.indent,self.braces = [],[0],0 + def add(self,t,v): self.res.append(Token(self.f,t,v)) + +def clean(s): + s = s.replace('\r\n','\n') + s = s.replace('\r','\n') + return s + +def tokenize(s): + s = clean(s) + try: return do_tokenize(s) + except: u_error('tokenize',s,T.f) + +def do_tokenize(s): + global T + T,i,l = TData(),0,len(s) + T.f = (T.y,i-T.yi+1) + while i < l: + c = s[i]; T.f = (T.y,i-T.yi+1) + if T.nl: T.nl = False; i = do_indent(s,i,l) + elif c == '\n': i = do_nl(s,i,l) + elif c in ISYMBOLS: i = do_symbol(s,i,l) + elif c >= '0' and c <= '9': i = do_number(s,i,l) + elif (c >= 'a' and c <= 'z') or \ + (c >= 'A' and c <= 'Z') or c == '_': i = do_name(s,i,l) + elif c=='"' or c=="'": i = do_string(s,i,l) + elif c=='#': i = do_comment(s,i,l) + elif c == '\\' and s[i+1] == '\n': + i += 2; T.y,T.yi = T.y+1,i + elif c == ' ' or c == '\t': i += 1 + else: u_error('tokenize',s,T.f) + indent(0) + r = T.res; T = None + return r + +def do_nl(s,i,l): + if not T.braces: + T.add('nl',None) + i,T.nl = i+1,True + T.y,T.yi = T.y+1,i + return i + +def do_indent(s,i,l): + v = 0 + while i T.indent[-1]: + T.indent.append(v) + T.add('indent',v) + elif v < T.indent[-1]: + n = T.indent.index(v) + while len(T.indent) > n+1: + v = T.indent.pop() + T.add('dedent',v) + + +def do_symbol(s,i,l): + symbols = [] + v,f,i = s[i],i,i+1 + if v in SYMBOLS: symbols.append(v) + while i '9') and (c < 'a' or c > 'f') and c != 'x': break + v,i = v+c,i+1 + if c == '.': + v,i = v+c,i+1 + while i '9': break + v,i = v+c,i+1 + T.add('number',v) + return i + +def do_name(s,i,l): + v,i =s[i],i+1 + while i 'z') and (c < 'A' or c > 'Z') and (c < '0' or c > '9') and c != '_': break + v,i = v+c,i+1 + if v in SYMBOLS: T.add('symbol',v) + else: T.add('name',v) + return i + +def do_string(s,i,l): + v,q,i = '',s[i],i+1 + if (l-i) >= 5 and s[i] == q and s[i+1] == q: # """ + i += 2 + while i', 'exec') + #except TabError: pass + #else: self.fail("TabError not raised") + + self.raise_catch(SystemError, "SystemError") + + self.raise_catch(SystemExit, "SystemExit") + self.assertRaises(SystemExit, sys.exit, 0) + + self.raise_catch(TypeError, "TypeError") + try: [] + () + except TypeError: pass + + self.raise_catch(ValueError, "ValueError") + self.assertRaises(ValueError, chr, 10000) + + self.raise_catch(ZeroDivisionError, "ZeroDivisionError") + try: x = 1 // 0 + except ZeroDivisionError: pass + + self.raise_catch(Exception, "Exception") + try: x = 1 // 0 + except Exception, e: pass + + def testSyntaxErrorMessage(self): + # make sure the right exception message is raised for each of + # these code fragments + + def ckmsg(src, msg): + try: + compile(src, '', 'exec') + except SyntaxError, e: + if e.msg != msg: + self.fail("expected %s, got %s" % (msg, e.msg)) + else: + self.fail("failed to get expected SyntaxError") + + s = '''while 1: + try: + pass + finally: + continue''' + + if not sys.platform.startswith('java'): + ckmsg(s, "'continue' not supported inside 'finally' clause") + + s = '''if 1: + try: + continue + except: + pass''' + + ckmsg(s, "'continue' not properly in loop") + ckmsg("continue\n", "'continue' not properly in loop") + + @cpython_only + def testSettingException(self): + # test that setting an exception at the C level works even if the + # exception object can't be constructed. + + class BadException: + def __init__(self_): + raise RuntimeError, "can't instantiate BadException" + + def test_capi1(): + import _testcapi + try: + _testcapi.raise_exception(BadException, 1) + except TypeError, err: + exc, err, tb = sys.exc_info() + co = tb.tb_frame.f_code + self.assertEqual(co.co_name, "test_capi1") + self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py')) + else: + self.fail("Expected exception") + + def test_capi2(): + import _testcapi + try: + _testcapi.raise_exception(BadException, 0) + except RuntimeError, err: + exc, err, tb = sys.exc_info() + co = tb.tb_frame.f_code + self.assertEqual(co.co_name, "__init__") + self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py')) + co2 = tb.tb_frame.f_back.f_code + self.assertEqual(co2.co_name, "test_capi2") + else: + self.fail("Expected exception") + + if not sys.platform.startswith('java'): + test_capi1() + test_capi2() + + def test_WindowsError(self): + try: + WindowsError + except NameError: + pass + else: + self.assertEqual(str(WindowsError(1001)), + "1001") + self.assertEqual(str(WindowsError(1001, "message")), + "[Error 1001] message") + self.assertEqual(WindowsError(1001, "message").errno, 22) + self.assertEqual(WindowsError(1001, "message").winerror, 1001) + + @ignore_deprecation_warnings + def testAttributes(self): + # test that exception attributes are happy + + exceptionList = [ + (BaseException, (), {'message' : '', 'args' : ()}), + (BaseException, (1, ), {'message' : 1, 'args' : (1,)}), + (BaseException, ('foo',), + {'message' : 'foo', 'args' : ('foo',)}), + (BaseException, ('foo', 1), + {'message' : '', 'args' : ('foo', 1)}), + (SystemExit, ('foo',), + {'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}), + (IOError, ('foo',), + {'message' : 'foo', 'args' : ('foo',), 'filename' : None, + 'errno' : None, 'strerror' : None}), + (IOError, ('foo', 'bar'), + {'message' : '', 'args' : ('foo', 'bar'), 'filename' : None, + 'errno' : 'foo', 'strerror' : 'bar'}), + (IOError, ('foo', 'bar', 'baz'), + {'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz', + 'errno' : 'foo', 'strerror' : 'bar'}), + (IOError, ('foo', 'bar', 'baz', 'quux'), + {'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}), + (EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : ('errnoStr', 'strErrorStr'), + 'strerror' : 'strErrorStr', 'errno' : 'errnoStr', + 'filename' : 'filenameStr'}), + (EnvironmentError, (1, 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1, + 'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}), + (SyntaxError, (), {'message' : '', 'msg' : None, 'text' : None, + 'filename' : None, 'lineno' : None, 'offset' : None, + 'print_file_and_line' : None}), + (SyntaxError, ('msgStr',), + {'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None, + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None}), + (SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr', + 'textStr')), + {'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr', + 'args' : ('msgStr', ('filenameStr', 'linenoStr', + 'offsetStr', 'textStr')), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : 'filenameStr', 'lineno' : 'linenoStr'}), + (SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr', + 'textStr', 'print_file_and_lineStr'), + {'message' : '', 'text' : None, + 'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr', + 'textStr', 'print_file_and_lineStr'), + 'print_file_and_line' : None, 'msg' : 'msgStr', + 'filename' : None, 'lineno' : None, 'offset' : None}), + (UnicodeError, (), {'message' : '', 'args' : (),}), + (UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'), + {'message' : '', 'args' : ('ascii', u'a', 0, 1, + 'ordinal not in range'), + 'encoding' : 'ascii', 'object' : u'a', + 'start' : 0, 'reason' : 'ordinal not in range'}), + (UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'), + {'message' : '', 'args' : ('ascii', '\xff', 0, 1, + 'ordinal not in range'), + 'encoding' : 'ascii', 'object' : '\xff', + 'start' : 0, 'reason' : 'ordinal not in range'}), + (UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"), + {'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'), + 'object' : u'\u3042', 'reason' : 'ouch', + 'start' : 0, 'end' : 1}), + ] + try: + exceptionList.append( + (WindowsError, (1, 'strErrorStr', 'filenameStr'), + {'message' : '', 'args' : (1, 'strErrorStr'), + 'strerror' : 'strErrorStr', 'winerror' : 1, + 'errno' : 22, 'filename' : 'filenameStr'}) + ) + except NameError: + pass + + for exc, args, expected in exceptionList: + try: + raise exc(*args) + except BaseException, e: + if type(e) is not exc: + raise + # Verify module name + self.assertEqual(type(e).__module__, 'exceptions') + # Verify no ref leaks in Exc_str() + s = str(e) + for checkArgName in expected: + self.assertEqual(repr(getattr(e, checkArgName)), + repr(expected[checkArgName]), + 'exception "%s", attribute "%s"' % + (repr(e), checkArgName)) + + # test for pickling support + for p in pickle, cPickle: + for protocol in range(p.HIGHEST_PROTOCOL + 1): + new = p.loads(p.dumps(e, protocol)) + for checkArgName in expected: + got = repr(getattr(new, checkArgName)) + want = repr(expected[checkArgName]) + self.assertEqual(got, want, + 'pickled "%r", attribute "%s"' % + (e, checkArgName)) + + + def testDeprecatedMessageAttribute(self): + # Accessing BaseException.message and relying on its value set by + # BaseException.__init__ triggers a deprecation warning. + exc = BaseException("foo") + with check_warnings(("BaseException.message has been deprecated " + "as of Python 2.6", DeprecationWarning)) as w: + self.assertEqual(exc.message, "foo") + self.assertEqual(len(w.warnings), 1) + + def testRegularMessageAttribute(self): + # Accessing BaseException.message after explicitly setting a value + # for it does not trigger a deprecation warning. + exc = BaseException("foo") + exc.message = "bar" + with check_warnings(quiet=True) as w: + self.assertEqual(exc.message, "bar") + self.assertEqual(len(w.warnings), 0) + # Deleting the message is supported, too. + del exc.message + with self.assertRaises(AttributeError): + exc.message + + @ignore_deprecation_warnings + def testPickleMessageAttribute(self): + # Pickling with message attribute must work, as well. + e = Exception("foo") + f = Exception("foo") + f.message = "bar" + for p in pickle, cPickle: + ep = p.loads(p.dumps(e)) + self.assertEqual(ep.message, "foo") + fp = p.loads(p.dumps(f)) + self.assertEqual(fp.message, "bar") + + @ignore_deprecation_warnings + def testSlicing(self): + # Test that you can slice an exception directly instead of requiring + # going through the 'args' attribute. + args = (1, 2, 3) + exc = BaseException(*args) + self.assertEqual(exc[:], args) + self.assertEqual(exc.args[:], args) + + def testKeywordArgs(self): + # test that builtin exception don't take keyword args, + # but user-defined subclasses can if they want + self.assertRaises(TypeError, BaseException, a=1) + + class DerivedException(BaseException): + def __init__(self, fancy_arg): + BaseException.__init__(self) + self.fancy_arg = fancy_arg + + x = DerivedException(fancy_arg=42) + self.assertEqual(x.fancy_arg, 42) + + def testInfiniteRecursion(self): + def f(): + return f() + self.assertRaises(RuntimeError, f) + + def g(): + try: + return g() + except ValueError: + return -1 + + # The test prints an unraisable recursion error when + # doing "except ValueError", this is because subclass + # checking has recursion checking too. + with captured_output("stderr"): + try: + g() + except RuntimeError: + pass + except: + self.fail("Should have raised KeyError") + else: + self.fail("Should have raised KeyError") + + def testUnicodeStrUsage(self): + # Make sure both instances and classes have a str and unicode + # representation. + self.assertTrue(str(Exception)) + self.assertTrue(unicode(Exception)) + self.assertTrue(str(Exception('a'))) + self.assertTrue(unicode(Exception(u'a'))) + self.assertTrue(unicode(Exception(u'\xe1'))) + + def testUnicodeChangeAttributes(self): + # See issue 7309. This was a crasher. + + u = UnicodeEncodeError('baz', u'xxxxx', 1, 5, 'foo') + self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: foo") + u.end = 2 + self.assertEqual(str(u), "'baz' codec can't encode character u'\\x78' in position 1: foo") + u.end = 5 + u.reason = 0x345345345345345345 + self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: 965230951443685724997") + u.encoding = 4000 + self.assertEqual(str(u), "'4000' codec can't encode characters in position 1-4: 965230951443685724997") + u.start = 1000 + self.assertEqual(str(u), "'4000' codec can't encode characters in position 1000-4: 965230951443685724997") + + u = UnicodeDecodeError('baz', 'xxxxx', 1, 5, 'foo') + self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: foo") + u.end = 2 + self.assertEqual(str(u), "'baz' codec can't decode byte 0x78 in position 1: foo") + u.end = 5 + u.reason = 0x345345345345345345 + self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: 965230951443685724997") + u.encoding = 4000 + self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1-4: 965230951443685724997") + u.start = 1000 + self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1000-4: 965230951443685724997") + + u = UnicodeTranslateError(u'xxxx', 1, 5, 'foo') + self.assertEqual(str(u), "can't translate characters in position 1-4: foo") + u.end = 2 + self.assertEqual(str(u), "can't translate character u'\\x78' in position 1: foo") + u.end = 5 + u.reason = 0x345345345345345345 + self.assertEqual(str(u), "can't translate characters in position 1-4: 965230951443685724997") + u.start = 1000 + self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997") + + def test_badisinstance(self): + # Bug #2542: if issubclass(e, MyException) raises an exception, + # it should be ignored + class Meta(type): + def __subclasscheck__(cls, subclass): + raise ValueError() + + class MyException(Exception): + __metaclass__ = Meta + pass + + with captured_output("stderr") as stderr: + try: + raise KeyError() + except MyException, e: + self.fail("exception should not be a MyException") + except KeyError: + pass + except: + self.fail("Should have raised KeyError") + else: + self.fail("Should have raised KeyError") + + with captured_output("stderr") as stderr: + def g(): + try: + return g() + except RuntimeError: + return sys.exc_info() + e, v, tb = g() + self.assertTrue(e is RuntimeError, e) + self.assertIn("maximum recursion depth exceeded", str(v)) + + def test_new_returns_invalid_instance(self): + # See issue #11627. + class MyException(Exception): + def __new__(cls, *args): + return object() + + with self.assertRaises(TypeError): + raise MyException + + def test_assert_with_tuple_arg(self): + try: + assert False, (3,) + except AssertionError as e: + self.assertEqual(str(e), "(3,)") + + +# Helper class used by TestSameStrAndUnicodeMsg +class ExcWithOverriddenStr(Exception): + """Subclass of Exception that accepts a keyword 'msg' arg that is + returned by __str__. 'msg' won't be included in self.args""" + def __init__(self, *args, **kwargs): + self.msg = kwargs.pop('msg') # msg should always be present + super(ExcWithOverriddenStr, self).__init__(*args, **kwargs) + def __str__(self): + return self.msg + + +class TestSameStrAndUnicodeMsg(unittest.TestCase): + """unicode(err) should return the same message of str(err). See #6108""" + + def check_same_msg(self, exc, msg): + """Helper function that checks if str(exc) == unicode(exc) == msg""" + self.assertEqual(str(exc), msg) + self.assertEqual(str(exc), unicode(exc)) + + def test_builtin_exceptions(self): + """Check same msg for built-in exceptions""" + # These exceptions implement a __str__ method that uses the args + # to create a better error message. unicode(e) should return the same + # message. + exceptions = [ + SyntaxError('invalid syntax', ('', 1, 3, '2+*3')), + IOError(2, 'No such file or directory'), + KeyError('both should have the same quotes'), + UnicodeDecodeError('ascii', '\xc3\xa0', 0, 1, + 'ordinal not in range(128)'), + UnicodeEncodeError('ascii', u'\u1234', 0, 1, + 'ordinal not in range(128)') + ] + for exception in exceptions: + self.assertEqual(str(exception), unicode(exception)) + + def test_0_args(self): + """Check same msg for Exception with 0 args""" + # str() and unicode() on an Exception with no args should return an + # empty string + self.check_same_msg(Exception(), '') + + def test_0_args_with_overridden___str__(self): + """Check same msg for exceptions with 0 args and overridden __str__""" + # str() and unicode() on an exception with overridden __str__ that + # returns an ascii-only string should return the same string + for msg in ('foo', u'foo'): + self.check_same_msg(ExcWithOverriddenStr(msg=msg), msg) + + # if __str__ returns a non-ascii unicode string str() should fail + # but unicode() should return the unicode string + e = ExcWithOverriddenStr(msg=u'f\xf6\xf6') # no args + self.assertRaises(UnicodeEncodeError, str, e) + self.assertEqual(unicode(e), u'f\xf6\xf6') + + def test_1_arg(self): + """Check same msg for Exceptions with 1 arg""" + for arg in ('foo', u'foo'): + self.check_same_msg(Exception(arg), arg) + + # if __str__ is not overridden and self.args[0] is a non-ascii unicode + # string, str() should try to return str(self.args[0]) and fail. + # unicode() should return unicode(self.args[0]) and succeed. + e = Exception(u'f\xf6\xf6') + self.assertRaises(UnicodeEncodeError, str, e) + self.assertEqual(unicode(e), u'f\xf6\xf6') + + def test_1_arg_with_overridden___str__(self): + """Check same msg for exceptions with overridden __str__ and 1 arg""" + # when __str__ is overridden and __unicode__ is not implemented + # unicode(e) returns the same as unicode(e.__str__()). + for msg in ('foo', u'foo'): + self.check_same_msg(ExcWithOverriddenStr('arg', msg=msg), msg) + + # if __str__ returns a non-ascii unicode string, str() should fail + # but unicode() should succeed. + e = ExcWithOverriddenStr('arg', msg=u'f\xf6\xf6') # 1 arg + self.assertRaises(UnicodeEncodeError, str, e) + self.assertEqual(unicode(e), u'f\xf6\xf6') + + def test_many_args(self): + """Check same msg for Exceptions with many args""" + argslist = [ + (3, 'foo'), + (1, u'foo', 'bar'), + (4, u'f\xf6\xf6', u'bar', 'baz') + ] + # both str() and unicode() should return a repr() of the args + for args in argslist: + self.check_same_msg(Exception(*args), repr(args)) + + def test_many_args_with_overridden___str__(self): + """Check same msg for exceptions with overridden __str__ and many args""" + # if __str__ returns an ascii string / ascii unicode string + # both str() and unicode() should succeed + for msg in ('foo', u'foo'): + e = ExcWithOverriddenStr('arg1', u'arg2', u'f\xf6\xf6', msg=msg) + self.check_same_msg(e, msg) + + # if __str__ returns a non-ascii unicode string, str() should fail + # but unicode() should succeed + e = ExcWithOverriddenStr('arg1', u'f\xf6\xf6', u'arg3', # 3 args + msg=u'f\xf6\xf6') + self.assertRaises(UnicodeEncodeError, str, e) + self.assertEqual(unicode(e), u'f\xf6\xf6') + + @cpython_only + def test_exception_with_doc(self): + import _testcapi + doc2 = "This is a test docstring." + doc4 = "This is another test docstring." + + self.assertRaises(SystemError, _testcapi.make_exception_with_doc, + "error1") + + # test basic usage of PyErr_NewException + error1 = _testcapi.make_exception_with_doc("_testcapi.error1") + self.assertIs(type(error1), type) + self.assertTrue(issubclass(error1, Exception)) + self.assertIsNone(error1.__doc__) + + # test with given docstring + error2 = _testcapi.make_exception_with_doc("_testcapi.error2", doc2) + self.assertEqual(error2.__doc__, doc2) + + # test with explicit base (without docstring) + error3 = _testcapi.make_exception_with_doc("_testcapi.error3", + base=error2) + self.assertTrue(issubclass(error3, error2)) + + # test with explicit base tuple + class C(object): + pass + error4 = _testcapi.make_exception_with_doc("_testcapi.error4", doc4, + (error3, C)) + self.assertTrue(issubclass(error4, error3)) + self.assertTrue(issubclass(error4, C)) + self.assertEqual(error4.__doc__, doc4) + + # test with explicit dictionary + error5 = _testcapi.make_exception_with_doc("_testcapi.error5", "", + error4, {'a': 1}) + self.assertTrue(issubclass(error5, error4)) + self.assertEqual(error5.a, 1) + self.assertEqual(error5.__doc__, "") + + +def test_main(): + run_unittest(ExceptionTests, TestSameStrAndUnicodeMsg) + +if __name__ == '__main__': + test_main() + +from insights.core.context import (ExecutionContextMeta, HostArchiveContext, + SerializedArchiveContext, SosArchiveContext) + + +def test_host_archive_context(): + files = ["/foo/junk", "/insights_commands"] + actual = HostArchiveContext.handles(files) + assert actual == ("/", HostArchiveContext), actual + + files = ["/foo/junk", "/insights_commands/things"] + actual = HostArchiveContext.handles(files) + assert actual == ("/", HostArchiveContext), actual + + files = ["/foo/junk", "/foo/junk/insights_commands/foobar.txt"] + actual = HostArchiveContext.handles(files) + assert actual == ("/foo/junk", HostArchiveContext), actual + + +def test_host_archive_context_unsupported(): + files = ["/foo/junk", "/not_insights_commands"] + actual = HostArchiveContext.handles(files) + assert actual == (None, None), actual + + files = ["/foo/junk", "/insights_commands_not"] + actual = HostArchiveContext.handles(files) + assert actual == (None, None), actual + + +def test_sos_archive_context_supported(): + files = ["/foo/junk", "/sos_commands"] + actual = SosArchiveContext.handles(files) + assert actual == ("/", SosArchiveContext), actual + + files = ["/foo/junk", "/sos_commands/things"] + actual = SosArchiveContext.handles(files) + assert actual == ("/", SosArchiveContext), actual + + files = ["/foo/junk", "/foo/junk/sos_commands/foobar.txt"] + actual = SosArchiveContext.handles(files) + assert actual == ("/foo/junk", SosArchiveContext), actual + + +def test_sos_archive_context_unsupported(): + files = ["/foo/junk", "/sos_commands_not"] + actual = SosArchiveContext.handles(files) + assert actual == (None, None), actual + + files = ["/foo/junk", "/not_sos_commands"] + actual = SosArchiveContext.handles(files) + assert actual == (None, None), actual + + +def test_serialize_archive_context_supported(): + files = ["/foo/junk", "/insights_archive.txt"] + actual = SerializedArchiveContext.handles(files) + assert actual == ("/", SerializedArchiveContext), actual + + +def test_serialized_archive_context_unsupported(): + files = ["/foo/junk", "/sos_commands_not"] + actual = SerializedArchiveContext.handles(files) + assert actual == (None, None), actual + + files = ["/foo/junk", "/insights_archive"] + actual = SerializedArchiveContext.handles(files) + assert actual == (None, None), actual + + +def test_unrecognized(): + files = ["/foo/junk", "/bar/junk"] + actual = ExecutionContextMeta.identify(files) + assert actual == (None, None), actual + +#!/usr/bin/env python + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Verifies build of an executable with C++ define specified by a gyp define, and +the use of the environment during regeneration when the gyp file changes. +""" + +import os +import TestGyp + +env_stack = [] + + +def PushEnv(): + env_copy = os.environ.copy() + env_stack.append(env_copy) + +def PopEnv(): + os.eniron=env_stack.pop() + +# Regenerating build files when a gyp file changes is currently only supported +# by the make generator. +test = TestGyp.TestGyp(formats=['make']) + +try: + PushEnv() + os.environ['CXXFLAGS'] = '-O0' + test.run_gyp('cxxflags.gyp') +finally: + # We clear the environ after calling gyp. When the auto-regeneration happens, + # the same define should be reused anyway. Reset to empty string first in + # case the platform doesn't support unsetenv. + PopEnv() + +test.build('cxxflags.gyp') + +expect = """\ +Using no optimization flag +""" +test.run_built_executable('cxxflags', stdout=expect) + +test.sleep() + +try: + PushEnv() + os.environ['CXXFLAGS'] = '-O2' + test.run_gyp('cxxflags.gyp') +finally: + # We clear the environ after calling gyp. When the auto-regeneration happens, + # the same define should be reused anyway. Reset to empty string first in + # case the platform doesn't support unsetenv. + PopEnv() + +test.build('cxxflags.gyp') + +expect = """\ +Using an optimization flag +""" +test.run_built_executable('cxxflags', stdout=expect) + +test.pass_test() + +from chimera.core.version import _chimera_version_, _chimera_description_ +from chimera.core.constants import SYSTEM_CONFIG_DEFAULT_FILENAME +from chimera.core.location import Location, InvalidLocationException + +from chimera.core.systemconfig import SystemConfig +from chimera.core.manager import Manager +from chimera.core.path import ChimeraPath + +from chimera.controllers.site.main import SiteController +from chimera.core.exceptions import ObjectNotFoundException, printException +from chimera.core.managerlocator import ManagerLocator, ManagerNotFoundException + +from chimera.util.enum import Enum + +import Pyro.errors + +import sys +import optparse +import os.path +import threading +import socket +import time + +__all__ = ['ChimeraCLI', + 'Action', + 'Parameter', + 'action', + 'parameter'] + +ParameterType = Enum( + "INSTRUMENT", "CONTROLLER", "BOOLEAN", "CHOICE", "INCLUDE_PATH", "CONSTANT") + + +class Option (object): + name = None + + short = None + long = None + + type = None + default = None + + choices = None + + actionGroup = None + + help = None + helpGroup = None + metavar = None + + target = None + cls = None + + # if ParameterType.INSTRUMENT: is this instrument required? + required = False + + # if ParameterType.INSTRUMENT or ParameterType.CONTROLLER + location = None + + const = None + + def __init__(self, **kw): + + for key, value in kw.items(): + + if hasattr(self, key): + setattr(self, key, value) + else: + raise TypeError("Invalid option '%s'." % key) + + self.validate() + + def validate(self): + + self.name = self.name or getattr(self.target, '__name__', None) + + if not self.name: + raise TypeError("Option must have a name") + + self.long = self.long or self.name + self.help = self.help or getattr(self.target, '__doc__', None) + + if self.short and self.short[0] != '-': + self.short = "-" + self.short + + if self.long and self.long[0] != '-': + self.long = "--" + self.long + + if self.name and self.name[0] == '-': + self.name = self.name[self.name.rindex('-') + 1:] + + if self.help: + self.help = self.help.strip().replace("\n", " ") + if self.default: + self.help += " [default=%default]" + + if self.metavar: + self.metavar = self.metavar.upper() + else: + self.metavar = self.name.upper() + + def __str__(self): + s = "" + s += "<%s " % self.__class__.__name__ + for name in dir(self): + attr = getattr(self, name) + if not name.startswith("_") and not hasattr(attr, '__call__'): + s += "%s=%s " % (name, attr) + s = s[:-1] + s += ">" + return s + + def __repr__(self): + return self.__str__() + + +class Action (Option): + pass + + +class Parameter (Option): + pass + + +def action(*args, **kwargs): + """ + Defines a command line action with short name 'short', long name + 'long'. If 'short' not given, will use the first letter of the + method name (if possible), if 'long' not given, use full method + name. + Use 'type' if the action require a direct parameter, like '--to + 10', in this case, action should be like this: + + # @action(long='to', type='int') + # def move_to (self, options): + # inst.moveTo(options.to) + + See L{Action} for information about valid keywork arguments. + """ + + def mark_action(func): + kwargs["target"] = func + act = Action(**kwargs) + func.__payload__ = act + return func + + if len(args) > 0: + return mark_action(args[0]) + else: + return mark_action + + +def parameter(*args, **kwargs): + """ + Defines a command line parameter with short name 'short', long + name 'long'. If 'short' not given, will use the first letter of + the method name (if possible), if 'long' not given, use full + method name. If type given, parameter will be checked to match + 'type'. The default value, if any, shoud be passed on 'default'. + + See L{Parameter} for information about valid keywork arguments. + """ + + def mark_param(func): + kwargs["target"] = func + param = Parameter(**kwargs) + func.__payload__ = param + return func + + if len(args) > 0: + return mark_param(args[0]) + else: + return mark_param + + +class CLICheckers: + + @staticmethod + def check_includepath(option, opt_str, value, parser): + if not value or not os.path.isdir(os.path.abspath(value)): + raise optparse.OptionValueError( + "Couldn't find %s include path." % value) + l = getattr(parser.values, "%s" % option.dest) + l.append(value) + + @staticmethod + def check_location(option, opt_str, value, parser): + try: + l = Location(value) + except InvalidLocationException: + raise optparse.OptionValueError( + "%s isnt't a valid location." % value) + + setattr(parser.values, "%s" % option.dest, value) + + +class CLIValues (object): + + """ + This class mimics optparse.Values class, but add an order list to keep + track of the order in which the command line parameters was parser. This + is important to ChimeraCLI to keep CLI's with very high level of usability. + + For every option the parser, OptionParser will call setattr to store the + command line value, we just keep track of the order and ChimeraCLI does + the rest. + """ + + def __init__(self, defaults=None): + + if defaults: + for (attr, val) in defaults.items(): + setattr(self, attr, val) + + object.__setattr__(self, '__order__', []) + + def __setattr__(self, attr, value): + + object.__setattr__(self, attr, value) + + if hasattr(self, '__order__'): + order = object.__getattribute__(self, '__order__') + order.append(attr) + + +class ChimeraCLI (object): + + """ + Create a command line program with automatic parsing of actions + and parameters based on decorators. + + This class define common methods for a command line interface + (CLI) program. You should extends it and add methods with specific + decorators to create personalized CLI programs. + + This class defines a CLI program which accepts parameters (of any + kind) and do actions using those parameters. Only one action will + run for a given command line. if more than one action was asked, + only the first will run. + + The general form of the arguments that CLI accepts is given + below: + + cli-program (--action-1|--action-2|...|--action-n) + [--param-1=value1,--param-2=value-2|...|--param-n=value-n] + + Al parameters are optional, action code will check for required + parameters and shout if needed. + + At least one action is required, if none given, --help will be + fired. + + There are a few auto-generated options: + --help --quiet --verbose (default=True) --log=file + + To create actions, use 'action' decorator. If that action was + detected on the command line arguments, action method will be + called with an object containing all the parameters available. + + For example: + + @action(short='s', long='slew'): + def slew(self, options): + inst.slew(options.ra, options.dec) + + To define parameters, use parameter decorator or addParameter method. + The parameter method passed to the decorator will be called to validate + the parameter value given on the command line. Otherwise, no + validation, besides type checking, will be done. + + For example: + + self.addParameter(name='ra', help='Help for RA', type=string) + + or + + @parameter(long='ra', type=string) + def ra(self, value): + ''' + Help for RA + ''' + # validate + # return valid value or throw ValueError + + When you define a Parameter using @parameter decorator, + the name of the decorated function will be available in the options + dictionary passed to every action. Otherwise, you need to use name + keyword to define different names or to use with attribute based parameters + + Before run the selected action, ChimeraCLI runs the method + __start__, passing all the parameters and the action that would + run. After the action be runned, __stop__ would be called. + + """ + + def __init__(self, prog, description, version, + port=None, verbosity=True, + instrument_path=True, controllers_path=True): + + self.parser = optparse.OptionParser(prog=prog, + description=_chimera_description_ + + " - " + description, + version="Chimera: %s\n%s: %s" % + (_chimera_version_, prog, version)) + + # hack to inject our exit funciton into the parser + def parser_exit(status=0, msg=None): + return self.exit(msg=msg, ret=status) + + self.parser.exit = parser_exit + + self.options = None + + self._actions = {} + self._parameters = {} + + self._helpGroups = {} + + self._aborting = False + + self._keepRemoteManager = True + + # shutdown event + self.died = threading.Event() + + # base actions and parameters + + if verbosity: + self.addParameters(dict(name="quiet", short="q", long="quiet", + type=ParameterType.BOOLEAN, default=True, + help="Don't display information while working."), + + dict(name="verbose", short="v", long="verbose", + type=ParameterType.BOOLEAN, default=False, + help="Display information while working")) + + self.addHelpGroup("LOCALMANAGER", "Client Configuration") + self.addParameters(dict(name="port", short="P", helpGroup="LOCALMANAGER", default=port or 9000, + help="Port to which the local Chimera instance will listen to."), + dict(name="config", default=SYSTEM_CONFIG_DEFAULT_FILENAME, + help="Chimera configuration file to use. default=%default", + helpGroup="LOCALMANAGER")) + + self.localManager = None + self._remoteManager = None + self.sysconfig = None + + self._needInstrumentsPath = instrument_path + self._needControllersPath = controllers_path + + def _print(self, *args, **kwargs): + sep = kwargs.pop("sep", " ") + end = kwargs.pop("end", "\n") + stream = kwargs.pop("file", sys.stdout) + + for arg in args: + stream.write(arg) + stream.write(sep) + + stream.write(end) + stream.flush() + + def out(self, *args, **kwargs): + self._print(*args, **kwargs) + + def err(self, *args, **kwargs): + kwargs["file"] = sys.stderr + self._print(*args, **kwargs) + + def addParameters(self, *params): + for param in params: + p = Parameter(**param) + self._parameters[p.name] = p + + def addActions(self, *actions): + for action in actions: + act = Action(**action) + self._actions[act.name] = act + + def addHelpGroup(self, name, shortdesc, longdesc=None): + self._helpGroups[name] = optparse.OptionGroup( + self.parser, shortdesc, longdesc) + + def addInstrument(self, **params): + params["type"] = ParameterType.INSTRUMENT + self.addParameters(params) + + if self._needInstrumentsPath: + if not "PATHS" in self._helpGroups: + self.addHelpGroup("PATHS", "Object Paths") + + self.addParameters(dict(name="inst_dir", + short="I", + long="instruments-dir", + helpGroup="PATHS", + type=ParameterType.INCLUDE_PATH, + default=ChimeraPath().instruments, + help="Append PATH to %s load path. " + "This option could be setted multiple " + "times to add multiple directories." % + params["name"].capitalize(), + metavar="PATH")) + self._needInstrumentsPath = False + + def addController(self, **params): + params["type"] = ParameterType.CONTROLLER + self.addParameters(params) + + if self._needControllersPath: + if not "PATHS" in self._helpGroups: + self.addHelpGroup("PATHS", "Object Paths") + + self.addParameters(dict(name="ctrl_dir", + short="C", + long="controllers-dir", + helpGroup="PATHS", + type=ParameterType.INCLUDE_PATH, + default=ChimeraPath().controllers, + help="Append PATH to controllers load path. " + "This option could be setted multiple " + "times to add multiple directories.", + metavar="PATH")) + self._needControllersPath = False + + def exit(self, msg=None, ret=1): + self.__stop__(self.options) + + if msg: + self.err(msg) + + self.died.set() + + sys.exit(ret) + + def run(self, cmdlineArgs): + t = threading.Thread(target=self._run, args=(cmdlineArgs,)) + t.setDaemon(True) + t.start() + + def _run(self, cmdlineArgs): + + # create parser from defined actions and parameters + self._createParser() + + # run the parser + self.options, args = self.parser.parse_args( + cmdlineArgs, values=CLIValues( + defaults=self.parser.get_default_values().__dict__)) + + # check which actions should run and if there is any conflict + actions = self._getActions(self.options) + + if not actions: + self.exit( + "Please select one action or --help for more information.") + + # for each defined parameter, run validation code + self._validateParameters(self.options) + + # setup objects + self._setupObjects(self.options) + + self.__start__(self.options, args) + + # run actions + for action in actions: + if not self._runAction(action, self.options): + self.exit(ret=1) + + self.__stop__(self.options) + + self.died.set() + + def wait(self, abort=True): + try: + while not self.died.isSet(): + time.sleep(0.1) + except KeyboardInterrupt: + if abort: + self.abort() + + def _startSystem(self, options): + + try: + self.sysconfig = SystemConfig.fromFile(options.config) + self.localManager = Manager( + self.sysconfig.chimera["host"], getattr(options, 'port', 9000)) + self._remoteManager = ManagerLocator.locate( + self.sysconfig.chimera["host"], self.sysconfig.chimera["port"]) + except ManagerNotFoundException: + # FIXME: better way to start Chimera + site = SiteController(wait=False) + site.startup() + + self._keepRemoteManager = False + self._remoteManager = ManagerLocator.locate( + self.sysconfig.chimera["host"], self.sysconfig.chimera["port"]) + + def _belongsTo(self, meHost, mePort, location): + + if not location: + return False + + meName = socket.gethostbyname(meHost) + return (location.host is None or location.host in (meHost, meName)) and \ + (location.port is None or location.port == mePort) + + def _setupObjects(self, options): + + # CLI requested objects + instruments = dict( + [(x.name, x) for x in self._parameters.values() if x.type == ParameterType.INSTRUMENT]) + controllers = dict( + [(x.name, x) for x in self._parameters.values() if x.type == ParameterType.CONTROLLER]) + + # starts a local Manager (not using sysconfig) or a full sysconfig + # backed if needed. + self._startSystem(self.options) + + # create locations + for inst in instruments.values() + controllers.values(): + + # use user instrument if given + if inst.default != getattr(options, inst.name): + try: + inst.location = Location(getattr(options, inst.name)) + except InvalidLocationException: + self.exit( + "Invalid location: %s. See --help for more information" % + getattr(options, inst.name)) + + else: + # no instrument selected, ask remote Chimera instance for the + # newest + if self._remoteManager: + insts = self._remoteManager.getResourcesByClass(inst.cls) + if insts: + # get the older + inst.location = insts[0] + + if not inst.location and inst.required: + self.exit("Couldn't find %s configuration. " + "Edit %s or see --help for more information" % + (inst.name.capitalize(), + os.path.abspath(options.config))) + + for inst in instruments.values() + controllers.values(): + + inst_proxy = None + + try: + inst_proxy = self._remoteManager.getProxy(inst.location) + except ObjectNotFoundException: + if inst.required == True: + self.exit( + "Couldn't find %s. (see --help for more information)" % inst.name.capitalize()) + + # save values in CLI object (which users are supposed to inherites + # from). + setattr(self, inst.name, inst_proxy) + + def __start__(self, options, args): + pass + + def __stop__(self, options): + if self.localManager: + self.localManager.shutdown() + + try: + if self._remoteManager and not self._keepRemoteManager: + self._remoteManager.shutdown() + except Pyro.errors.ConnectionClosedError: + pass + + def _createParser(self): + + for name in dir(self): + attr = getattr(self, name) + + if isinstance(attr, Action) or hasattr(attr, '__payload__'): + + try: + # decorated methods + payload = getattr(attr, '__payload__') + except AttributeError: + # pure attribute + payload = attr + + if type(payload) == Action: + self._actions[payload.name] = payload + elif type(payload) == Parameter: + self._parameters[payload.name] = payload + + for action in self._actions.values(): + + if not action.actionGroup: + action.actionGroup = action.name + + if action.type: + kind = "store" + else: + kind = "store_true" + + group = self._helpGroups.get(action.helpGroup, self.parser) + + if action.short: + group.add_option(action.short, action.long, + action=kind, type=action.type, dest=action.name, + help=action.help, metavar=action.metavar) + else: + group.add_option(action.long, dest=action.name, + action=kind, type=action.type, + help=action.help, metavar=action.metavar) + + for param in self._parameters.values(): + + if not param.type: + param.type = "string" + + group = self._helpGroups.get(param.helpGroup, self.parser) + + option_action = "store" + option_callback = None + option_choices = None + option_const = None + option_type = param.type or None + + if param.type in (ParameterType.INSTRUMENT, ParameterType.CONTROLLER): + option_type = "string" + option_action = "callback" + option_callback = CLICheckers.check_location + + if param.type == ParameterType.BOOLEAN: + option_action = "store_true" + option_type = None + + if param.type == ParameterType.CONSTANT: + option_action = "store_const" + option_type = None + option_const = param.const + + if param.type == ParameterType.INCLUDE_PATH: + option_action = "callback" + option_type = "string" + option_callback = CLICheckers.check_includepath + + if param.type == ParameterType.CHOICE: + option_action = "store" + option_type = "choice" + option_choices = param.choices + + option_kwargs = dict(action=option_action, + dest=param.name, + help=param.help, metavar=param.metavar) + + if option_callback: + option_kwargs["callback"] = option_callback + + if option_type: + option_kwargs["type"] = option_type + + if option_choices: + option_kwargs["choices"] = option_choices + + if option_const: + option_kwargs["const"] = option_const + + if param.short: + group.add_option(param.short, param.long, **option_kwargs) + else: + group.add_option(param.long, **option_kwargs) + + for group in self._helpGroups.values(): + self.parser.add_option_group(group) + + defaults = {} + + for action in self._actions.values(): + if action.default is not None: + defaults[action.name] = action.default + + for param in self._parameters.values(): + if param.default is not None: + defaults[param.name] = param.default + + self.parser.set_defaults(**defaults) + + def _getActions(self, options): + + # actions in command line (and run) order + actions = [self._actions[action] + for action in self.options.__order__ if action in self._actions] + + # add default actions + # FIXME: there is no way to disable a default action? + actions.extend( + [action for action in self._actions.values() if action.default == True]) + + if not actions: + return [] + + for action in actions: + for other in actions: + if action != other and action.actionGroup == other.actionGroup: + self.exit("Cannot use %s and %s at the same time." % + (action.long, other.long)) + + # remove duplicates + uniqueActions = [] + + for action in actions: + if action in uniqueActions: + continue + uniqueActions.append(action) + + return uniqueActions + + def _validateParameters(self, options): + + paramValues = [getattr(options, param) + for param in self._parameters.keys()] + + for name, value in zip(self._parameters.keys(), paramValues): + param = self._parameters[name] + + try: + # to signal invalid values, use self.exit or throws a + # ValueError exception if None returned, just copy passed value + if param.target is not None: + newValue = getattr(self, param.target.__name__)(value) + setattr(options, name, newValue or value) + except ValueError, e: + self.exit("Invalid value for %s: %s" % (name, e)) + + def _runAction(self, action, options): + + try: + if action.target is not None: + method = getattr(self, action.target.__name__) + method(options) + except Exception, e: + self.err("Something wrong with '%s' action." % (action.name)) + printException(e) + return False + + return True + + def abort(self): + + if self._aborting is False: + self._aborting = True + else: + return + + if hasattr(self, '__abort__'): + abort = getattr(self, '__abort__') + if hasattr(abort, '__call__'): + t = threading.Thread(target=abort) + t.start() + try: + t.join() + except KeyboardInterrupt: + pass + + self.exit(ret=2) + + def isAborting(self): + return self._aborting + +# -*- coding: utf-8 -*- +# +# documentation build configuration file, created by +# sphinx-quickstart on Thu Jul 23 19:40:08 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. +import sys +import os, subprocess +import shlex +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) +libpath = os.path.join(curr_path, '../wrapper/') +sys.path.insert(0, os.path.join(curr_path, '../wrapper/')) +sys.path.insert(0, curr_path) +from sphinx_util import MarkdownParser, AutoStructify + +# -- General configuration ------------------------------------------------ + +# General information about the project. +project = u'rabit' +copyright = u'2015, rabit developers' +author = u'rabit developers' +github_doc_root = 'https://github.com/dmlc/rabit/tree/master/doc/' + +# add markdown parser +MarkdownParser.github_doc_root = github_doc_root +source_parsers = { + '.md': MarkdownParser, +} +# Version information. +import rabit + +version = rabit.__version__ +release = rabit.__version__ + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.mathjax', + 'breathe', +] + +# Use breathe to include doxygen documents +breathe_projects = {'rabit' : 'doxygen/xml/'} +breathe_default_project = 'rabit' + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = ['.rst', '.md'] + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# html_theme = 'alabaster' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Output file base name for HTML help builder. +htmlhelp_basename = project + 'doc' + +# -- Options for LaTeX output --------------------------------------------- +latex_elements = { +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'rabit.tex', project, + author, 'manual'), +] + +# hook for doxygen +def run_doxygen(folder): + """Run the doxygen make command in the designated folder.""" + try: + retcode = subprocess.call("cd %s; make doxygen" % folder, shell=True) + if retcode < 0: + sys.stderr.write("doxygen terminated by signal %s" % (-retcode)) + except OSError as e: + sys.stderr.write("doxygen execution failed: %s" % e) + + +def run_build_lib(folder): + """Run the doxygen make command in the designated folder.""" + try: + retcode = subprocess.call("cd %s; make" % folder, shell=True) + retcode = subprocess.call("rm -rf _build/html/doxygen", shell=True) + retcode = subprocess.call("mkdir _build", shell=True) + retcode = subprocess.call("mkdir _build/html", shell=True) + retcode = subprocess.call("cp -rf doxygen/html _build/html/doxygen", shell=True) + if retcode < 0: + sys.stderr.write("build terminated by signal %s" % (-retcode)) + except OSError as e: + sys.stderr.write("build execution failed: %s" % e) + + +def generate_doxygen_xml(app): + """Run the doxygen make commands if we're on the ReadTheDocs server""" + read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True' + if read_the_docs_build: + run_doxygen('..') + sys.stderr.write('Check if shared lib exists\n') + run_build_lib('..') + sys.stderr.write('The wrapper path: %s\n' % str(os.listdir('../wrapper'))) + rabit._loadlib() + + +def setup(app): + # Add hook for building doxygen xml when needed + app.connect("builder-inited", generate_doxygen_xml) + app.add_config_value('recommonmark_config', { + 'url_resolver': lambda url: github_doc_root + url, + }, True) + app.add_transform(AutoStructify) + +#!/usr/bin/env python + +# icing.py +# +# Created by Eric Sumner on 2010-10-29. +# Copyright 2010 Haleret Productions. All rights reserved. +# +# See the file named "COPYING" in this directory for licensing information. + +import sys + +# Syntactic sugar for common object types: +# +# NSNumber: @42 +# NSMutableArray: @[a, b, c] +# NSMutableDictionary: @{ k1 => v1, k2 => v2, k3 => v3 } +# +# You must #import "NSArray.h" to use the array or dictionary syntax + +# Implementation notes: +# - Files that don't use the new syntax should come out completely unchanged +# - Expansions should always span the same number of lines as their source, +# so that line numbers stay in sync +# - Should output as much as we can on unexpected end-of-file +# - Translations should be as direct as possible, and easy to understand + +out = sys.stdout +source = sys.stdin +matched_tokens = {'[':']', '{':'}', '(':')'} + +# Represents the input file; probably shouldn't slurp the whole file, +# but it was easy +class charstream: + def __init__(self, source): + self.source = source + + def next(self): + if self.source: + rtn = self.source[0] + self.source = self.source[1:] + return rtn + else: raise StopIteration + + def push(self, c): + self.source = c+self.source + + def __iter__(self): + return self + +source = charstream(source.read()) + +# Pass over a single- or double-quoted string without doing macro expansion +def read_string(delim, source): + out = delim + escaped = False + for c in source: + out += c + if escaped: + escaped = False + else: + if c == '\\': escaped = True + if c == delim: break + return out + +# Pass over comments without doing macro expansion +# Does not support nested /* style comments +def read_comment(delim, source): + out = delim + comment_type = source.next() + out += comment_type + if comment_type == '/': + for c in source: + out += c + if c in '\r\n': break + return out + elif comment_type == '*': + for c in source: + out += c + if out.endswith('*/'): break + return out + else: + source.push(comment_type) + return delim + +# Dispatch to the proper generator after seeing an @ +def read_directive(delim, source): + out = delim + source.next() + directive_type = out[-1] + if directive_type == '[': + return parse_array_constant(out, source) + elif directive_type == '{': + return parse_dict_constant(out, source) + elif directive_type in '-0123456789': + return parse_number_constant(out, source) + else: + source.push(directive_type) + return delim + +# Read (and macroexpand) input until one of the end tokens is reached at the +# current nesting level (of quotes, parens, braces, comments, etc) +def read_argument(source, *end): + out = '' + for c in source: + if c in '"\'': out += read_string(c, source) + elif c in '/': out += read_comment(c, source) + elif c in '@': out += read_directive(c, source) + elif c in matched_tokens: + text, delim = read_argument(source, matched_tokens[c]) + out += text + if delim: out += delim + else: + out += c + for e in end: + if out.endswith(e): return out[:-len(e)], e + return out, None + +# Generate NSMutableArray constructor for @[a, b, c] +def parse_array_constant(out, source): + out = '[NSMutableArray arrayWithObjects: ' + args = [] + while True: + arg, delim = read_argument(source, ',', ']') + args.append('[NSNull maybe:%s]' % arg) + if delim == ']': break + if not delim: return out + ','.join(args) + if len(args) and args[-1].strip(): + args += [' '] + return out + ','.join(args) + 'nil]' + +# Generate NSMutableDictionary constructor for @{ k1 => v1, k2 => v2 } +def parse_dict_constant(out, source): + out = '[NSMutableDictionary dictionaryWithObjectsAndKeys: ' + args = [] + last_delim = ',' + while True: + key, delim = read_argument(source, '=>', '}') + if delim == '}': + if key.strip(): + args += ['', key, ' '] + else: + args += [key] + break + val, delim = read_argument(source, ',', '}') + args += ['[NSNull maybe:%s]' % val, key] + if delim == '}': + args += [' '] + break + if not delim: return out + ','.join(args) + return out + ','.join(args) + 'nil]' + +# Generate NSNumber constructor for @4.2e+12 +def parse_number_constant(out, source): + out = out[1:] + for c in source: + if c.isalnum() or c == '.' or (out[-1] in 'eE' and c in '+-'): + out += c + else: + source.push(c) + break + return '[NSNumber numberWithDouble: %s ]' % out + +# Process the input file +for c in source: + if c in '"\'': out.write(read_string(c, source)) + elif c in '/': out.write(read_comment(c, source)) + elif c in '@': out.write(read_directive(c, source)) + else: out.write(c) + +# Test interfaces to fortran blas. +# +# The tests are more of interface than they are of the underlying blas. +# Only very small matrices checked -- N=3 or so. +# +# !! Complex calculations really aren't checked that carefully. +# !! Only real valued complex numbers are used in tests. + +from __future__ import division, print_function, absolute_import + +from numpy import float32, float64, complex64, complex128, arange, array, \ + zeros, shape, transpose, newaxis, common_type, conjugate +from scipy.linalg import _fblas as fblas + +from scipy._lib.six import xrange + +from numpy.testing import TestCase, run_module_suite, assert_array_equal, \ + assert_allclose, assert_array_almost_equal, assert_ + + +# decimal accuracy to require between Python and LAPACK/BLAS calculations +accuracy = 5 + +# Since numpy.dot likely uses the same blas, use this routine +# to check. + + +def matrixmultiply(a, b): + if len(b.shape) == 1: + b_is_vector = True + b = b[:,newaxis] + else: + b_is_vector = False + assert_(a.shape[1] == b.shape[0]) + c = zeros((a.shape[0], b.shape[1]), common_type(a, b)) + for i in xrange(a.shape[0]): + for j in xrange(b.shape[1]): + s = 0 + for k in xrange(a.shape[1]): + s += a[i,k] * b[k, j] + c[i,j] = s + if b_is_vector: + c = c.reshape((a.shape[0],)) + return c + +################################################## +### Test blas ?axpy + + +class BaseAxpy(object): + ''' Mixin class for axpy tests ''' + + def test_default_a(self): + x = arange(3.,dtype=self.dtype) + y = arange(3.,dtype=x.dtype) + real_y = x*1.+y + y = self.blas_func(x,y) + assert_array_equal(real_y,y) + + def test_simple(self): + x = arange(3.,dtype=self.dtype) + y = arange(3.,dtype=x.dtype) + real_y = x*3.+y + y = self.blas_func(x,y,a=3.) + assert_array_equal(real_y,y) + + def test_x_stride(self): + x = arange(6.,dtype=self.dtype) + y = zeros(3,x.dtype) + y = arange(3.,dtype=x.dtype) + real_y = x[::2]*3.+y + y = self.blas_func(x,y,a=3.,n=3,incx=2) + assert_array_equal(real_y,y) + + def test_y_stride(self): + x = arange(3.,dtype=self.dtype) + y = zeros(6,x.dtype) + real_y = x*3.+y[::2] + y = self.blas_func(x,y,a=3.,n=3,incy=2) + assert_array_equal(real_y,y[::2]) + + def test_x_and_y_stride(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + real_y = x[::4]*3.+y[::2] + y = self.blas_func(x,y,a=3.,n=3,incx=4,incy=2) + assert_array_equal(real_y,y[::2]) + + def test_x_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=4,incx=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + + def test_y_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=3,incy=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + +try: + class TestSaxpy(TestCase, BaseAxpy): + blas_func = fblas.saxpy + dtype = float32 +except AttributeError: + class TestSaxpy: + pass + + +class TestDaxpy(TestCase, BaseAxpy): + blas_func = fblas.daxpy + dtype = float64 + +try: + class TestCaxpy(TestCase, BaseAxpy): + blas_func = fblas.caxpy + dtype = complex64 +except AttributeError: + class TestCaxpy: + pass + + +class TestZaxpy(TestCase, BaseAxpy): + blas_func = fblas.zaxpy + dtype = complex128 + + +################################################## +### Test blas ?scal + +class BaseScal(object): + ''' Mixin class for scal testing ''' + + def test_simple(self): + x = arange(3.,dtype=self.dtype) + real_x = x*3. + x = self.blas_func(3.,x) + assert_array_equal(real_x,x) + + def test_x_stride(self): + x = arange(6.,dtype=self.dtype) + real_x = x.copy() + real_x[::2] = x[::2]*array(3.,self.dtype) + x = self.blas_func(3.,x,n=3,incx=2) + assert_array_equal(real_x,x) + + def test_x_bad_size(self): + x = arange(12.,dtype=self.dtype) + try: + self.blas_func(2.,x,n=4,incx=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + +try: + class TestSscal(TestCase, BaseScal): + blas_func = fblas.sscal + dtype = float32 +except AttributeError: + class TestSscal: + pass + + +class TestDscal(TestCase, BaseScal): + blas_func = fblas.dscal + dtype = float64 + +try: + class TestCscal(TestCase, BaseScal): + blas_func = fblas.cscal + dtype = complex64 +except AttributeError: + class TestCscal: + pass + + +class TestZscal(TestCase, BaseScal): + blas_func = fblas.zscal + dtype = complex128 + + +################################################## +### Test blas ?copy + +class BaseCopy(object): + ''' Mixin class for copy testing ''' + + def test_simple(self): + x = arange(3.,dtype=self.dtype) + y = zeros(shape(x),x.dtype) + y = self.blas_func(x,y) + assert_array_equal(x,y) + + def test_x_stride(self): + x = arange(6.,dtype=self.dtype) + y = zeros(3,x.dtype) + y = self.blas_func(x,y,n=3,incx=2) + assert_array_equal(x[::2],y) + + def test_y_stride(self): + x = arange(3.,dtype=self.dtype) + y = zeros(6,x.dtype) + y = self.blas_func(x,y,n=3,incy=2) + assert_array_equal(x,y[::2]) + + def test_x_and_y_stride(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + y = self.blas_func(x,y,n=3,incx=4,incy=2) + assert_array_equal(x[::4],y[::2]) + + def test_x_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=4,incx=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + + def test_y_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=3,incy=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + + # def test_y_bad_type(self): + ## Hmmm. Should this work? What should be the output. + # x = arange(3.,dtype=self.dtype) + # y = zeros(shape(x)) + # self.blas_func(x,y) + # assert_array_equal(x,y) + +try: + class TestScopy(TestCase, BaseCopy): + blas_func = fblas.scopy + dtype = float32 +except AttributeError: + class TestScopy: + pass + + +class TestDcopy(TestCase, BaseCopy): + blas_func = fblas.dcopy + dtype = float64 + +try: + class TestCcopy(TestCase, BaseCopy): + blas_func = fblas.ccopy + dtype = complex64 +except AttributeError: + class TestCcopy: + pass + + +class TestZcopy(TestCase, BaseCopy): + blas_func = fblas.zcopy + dtype = complex128 + + +################################################## +### Test blas ?swap + +class BaseSwap(object): + ''' Mixin class for swap tests ''' + + def test_simple(self): + x = arange(3.,dtype=self.dtype) + y = zeros(shape(x),x.dtype) + desired_x = y.copy() + desired_y = x.copy() + x, y = self.blas_func(x,y) + assert_array_equal(desired_x,x) + assert_array_equal(desired_y,y) + + def test_x_stride(self): + x = arange(6.,dtype=self.dtype) + y = zeros(3,x.dtype) + desired_x = y.copy() + desired_y = x.copy()[::2] + x, y = self.blas_func(x,y,n=3,incx=2) + assert_array_equal(desired_x,x[::2]) + assert_array_equal(desired_y,y) + + def test_y_stride(self): + x = arange(3.,dtype=self.dtype) + y = zeros(6,x.dtype) + desired_x = y.copy()[::2] + desired_y = x.copy() + x, y = self.blas_func(x,y,n=3,incy=2) + assert_array_equal(desired_x,x) + assert_array_equal(desired_y,y[::2]) + + def test_x_and_y_stride(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + desired_x = y.copy()[::2] + desired_y = x.copy()[::4] + x, y = self.blas_func(x,y,n=3,incx=4,incy=2) + assert_array_equal(desired_x,x[::4]) + assert_array_equal(desired_y,y[::2]) + + def test_x_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=4,incx=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + + def test_y_bad_size(self): + x = arange(12.,dtype=self.dtype) + y = zeros(6,x.dtype) + try: + self.blas_func(x,y,n=3,incy=5) + except: # what kind of error should be caught? + return + # should catch error and never get here + assert_(0) + +try: + class TestSswap(TestCase, BaseSwap): + blas_func = fblas.sswap + dtype = float32 +except AttributeError: + class TestSswap: + pass + + +class TestDswap(TestCase, BaseSwap): + blas_func = fblas.dswap + dtype = float64 + +try: + class TestCswap(TestCase, BaseSwap): + blas_func = fblas.cswap + dtype = complex64 +except AttributeError: + class TestCswap: + pass + + +class TestZswap(TestCase, BaseSwap): + blas_func = fblas.zswap + dtype = complex128 + +################################################## +### Test blas ?gemv +### This will be a mess to test all cases. + + +class BaseGemv(object): + ''' Mixin class for gemv tests ''' + + def get_data(self,x_stride=1,y_stride=1): + mult = array(1, dtype=self.dtype) + if self.dtype in [complex64, complex128]: + mult = array(1+1j, dtype=self.dtype) + from numpy.random import normal, seed + seed(1234) + alpha = array(1., dtype=self.dtype) * mult + beta = array(1.,dtype=self.dtype) * mult + a = normal(0.,1.,(3,3)).astype(self.dtype) * mult + x = arange(shape(a)[0]*x_stride,dtype=self.dtype) * mult + y = arange(shape(a)[1]*y_stride,dtype=self.dtype) * mult + return alpha,beta,a,x,y + + def test_simple(self): + alpha,beta,a,x,y = self.get_data() + desired_y = alpha*matrixmultiply(a,x)+beta*y + y = self.blas_func(alpha,a,x,beta,y) + assert_array_almost_equal(desired_y,y) + + def test_default_beta_y(self): + alpha,beta,a,x,y = self.get_data() + desired_y = matrixmultiply(a,x) + y = self.blas_func(1,a,x) + assert_array_almost_equal(desired_y,y) + + def test_simple_transpose(self): + alpha,beta,a,x,y = self.get_data() + desired_y = alpha*matrixmultiply(transpose(a),x)+beta*y + y = self.blas_func(alpha,a,x,beta,y,trans=1) + assert_array_almost_equal(desired_y,y) + + def test_simple_transpose_conj(self): + alpha,beta,a,x,y = self.get_data() + desired_y = alpha*matrixmultiply(transpose(conjugate(a)),x)+beta*y + y = self.blas_func(alpha,a,x,beta,y,trans=2) + assert_array_almost_equal(desired_y,y) + + def test_x_stride(self): + alpha,beta,a,x,y = self.get_data(x_stride=2) + desired_y = alpha*matrixmultiply(a,x[::2])+beta*y + y = self.blas_func(alpha,a,x,beta,y,incx=2) + assert_array_almost_equal(desired_y,y) + + def test_x_stride_transpose(self): + alpha,beta,a,x,y = self.get_data(x_stride=2) + desired_y = alpha*matrixmultiply(transpose(a),x[::2])+beta*y + y = self.blas_func(alpha,a,x,beta,y,trans=1,incx=2) + assert_array_almost_equal(desired_y, y) + + def test_x_stride_assert(self): + # What is the use of this test? + alpha,beta,a,x,y = self.get_data(x_stride=2) + try: + y = self.blas_func(1,a,x,1,y,trans=0,incx=3) + assert_(0) + except: + pass + try: + y = self.blas_func(1,a,x,1,y,trans=1,incx=3) + assert_(0) + except: + pass + + def test_y_stride(self): + alpha,beta,a,x,y = self.get_data(y_stride=2) + desired_y = y.copy() + desired_y[::2] = alpha*matrixmultiply(a,x)+beta*y[::2] + y = self.blas_func(alpha,a,x,beta,y,incy=2) + assert_array_almost_equal(desired_y,y) + + def test_y_stride_transpose(self): + alpha,beta,a,x,y = self.get_data(y_stride=2) + desired_y = y.copy() + desired_y[::2] = alpha*matrixmultiply(transpose(a),x)+beta*y[::2] + y = self.blas_func(alpha,a,x,beta,y,trans=1,incy=2) + assert_array_almost_equal(desired_y,y) + + def test_y_stride_assert(self): + # What is the use of this test? + alpha,beta,a,x,y = self.get_data(y_stride=2) + try: + y = self.blas_func(1,a,x,1,y,trans=0,incy=3) + assert_(0) + except: + pass + try: + y = self.blas_func(1,a,x,1,y,trans=1,incy=3) + assert_(0) + except: + pass + +try: + class TestSgemv(TestCase, BaseGemv): + blas_func = fblas.sgemv + dtype = float32 + + def test_sgemv_on_osx(self): + from itertools import product + import sys + import numpy as np + + if sys.platform != 'darwin': + return + + def aligned_array(shape, align, dtype, order='C'): + # Make array shape `shape` with aligned at `align` bytes + d = dtype() + # Make array of correct size with `align` extra bytes + N = np.prod(shape) + tmp = np.zeros(N * d.nbytes + align, dtype=np.uint8) + address = tmp.__array_interface__["data"][0] + # Find offset into array giving desired alignment + for offset in range(align): + if (address + offset) % align == 0: + break + tmp = tmp[offset:offset+N*d.nbytes].view(dtype=dtype) + return tmp.reshape(shape, order=order) + + def as_aligned(arr, align, dtype, order='C'): + # Copy `arr` into an aligned array with same shape + aligned = aligned_array(arr.shape, align, dtype, order) + aligned[:] = arr[:] + return aligned + + def assert_dot_close(A, X, desired): + assert_allclose(self.blas_func(1.0,A,X), desired, + rtol=1e-5, atol=1e-7) + + testdata = product((15,32), (10000,), (200,89), ('C','F')) + for align, m, n, a_order in testdata: + A_d = np.random.rand(m, n) + X_d = np.random.rand(n) + desired = np.dot(A_d, X_d) + # Calculation with aligned single precision + A_f = as_aligned(A_d, align, np.float32, order=a_order) + X_f = as_aligned(X_d, align, np.float32, order=a_order) + assert_dot_close(A_f, X_f, desired) + +except AttributeError: + class TestSgemv: + pass + + +class TestDgemv(TestCase, BaseGemv): + blas_func = fblas.dgemv + dtype = float64 + +try: + class TestCgemv(TestCase, BaseGemv): + blas_func = fblas.cgemv + dtype = complex64 +except AttributeError: + class TestCgemv: + pass + + +class TestZgemv(TestCase, BaseGemv): + blas_func = fblas.zgemv + dtype = complex128 + +""" +################################################## +### Test blas ?ger +### This will be a mess to test all cases. + +class BaseGer(TestCase): + def get_data(self,x_stride=1,y_stride=1): + from numpy.random import normal, seed + seed(1234) + alpha = array(1., dtype = self.dtype) + a = normal(0.,1.,(3,3)).astype(self.dtype) + x = arange(shape(a)[0]*x_stride,dtype=self.dtype) + y = arange(shape(a)[1]*y_stride,dtype=self.dtype) + return alpha,a,x,y + def test_simple(self): + alpha,a,x,y = self.get_data() + # tranpose takes care of Fortran vs. C(and Python) memory layout + desired_a = alpha*transpose(x[:,newaxis]*y) + a + self.blas_func(x,y,a) + assert_array_almost_equal(desired_a,a) + def test_x_stride(self): + alpha,a,x,y = self.get_data(x_stride=2) + desired_a = alpha*transpose(x[::2,newaxis]*y) + a + self.blas_func(x,y,a,incx=2) + assert_array_almost_equal(desired_a,a) + def test_x_stride_assert(self): + alpha,a,x,y = self.get_data(x_stride=2) + try: + self.blas_func(x,y,a,incx=3) + assert(0) + except: + pass + def test_y_stride(self): + alpha,a,x,y = self.get_data(y_stride=2) + desired_a = alpha*transpose(x[:,newaxis]*y[::2]) + a + self.blas_func(x,y,a,incy=2) + assert_array_almost_equal(desired_a,a) + + def test_y_stride_assert(self): + alpha,a,x,y = self.get_data(y_stride=2) + try: + self.blas_func(a,x,y,incy=3) + assert(0) + except: + pass + +class TestSger(BaseGer): + blas_func = fblas.sger + dtype = float32 +class TestDger(BaseGer): + blas_func = fblas.dger + dtype = float64 +""" +################################################## +### Test blas ?gerc +### This will be a mess to test all cases. + +""" +class BaseGerComplex(BaseGer): + def get_data(self,x_stride=1,y_stride=1): + from numpy.random import normal, seed + seed(1234) + alpha = array(1+1j, dtype = self.dtype) + a = normal(0.,1.,(3,3)).astype(self.dtype) + a = a + normal(0.,1.,(3,3)) * array(1j, dtype = self.dtype) + x = normal(0.,1.,shape(a)[0]*x_stride).astype(self.dtype) + x = x + x * array(1j, dtype = self.dtype) + y = normal(0.,1.,shape(a)[1]*y_stride).astype(self.dtype) + y = y + y * array(1j, dtype = self.dtype) + return alpha,a,x,y + def test_simple(self): + alpha,a,x,y = self.get_data() + # tranpose takes care of Fortran vs. C(and Python) memory layout + a = a * array(0.,dtype = self.dtype) + #desired_a = alpha*transpose(x[:,newaxis]*self.transform(y)) + a + desired_a = alpha*transpose(x[:,newaxis]*y) + a + #self.blas_func(x,y,a,alpha = alpha) + fblas.cgeru(x,y,a,alpha = alpha) + assert_array_almost_equal(desired_a,a) + + #def test_x_stride(self): + # alpha,a,x,y = self.get_data(x_stride=2) + # desired_a = alpha*transpose(x[::2,newaxis]*self.transform(y)) + a + # self.blas_func(x,y,a,incx=2) + # assert_array_almost_equal(desired_a,a) + #def test_y_stride(self): + # alpha,a,x,y = self.get_data(y_stride=2) + # desired_a = alpha*transpose(x[:,newaxis]*self.transform(y[::2])) + a + # self.blas_func(x,y,a,incy=2) + # assert_array_almost_equal(desired_a,a) + +class TestCgeru(BaseGerComplex): + blas_func = fblas.cgeru + dtype = complex64 + def transform(self,x): + return x +class TestZgeru(BaseGerComplex): + blas_func = fblas.zgeru + dtype = complex128 + def transform(self,x): + return x + +class TestCgerc(BaseGerComplex): + blas_func = fblas.cgerc + dtype = complex64 + def transform(self,x): + return conjugate(x) + +class TestZgerc(BaseGerComplex): + blas_func = fblas.zgerc + dtype = complex128 + def transform(self,x): + return conjugate(x) +""" + +if __name__ == "__main__": + run_module_suite() + + +""" +opcode module - potentially shared between dis and other modules which +operate on bytecodes (e.g. peephole optimizers). +""" + +__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs", + "haslocal", "hascompare", "hasfree", "opname", "opmap", + "HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"] + +# It's a chicken-and-egg I'm afraid: +# We're imported before _opcode's made. +# With exception unheeded +# (stack_effect is not needed) +# Both our chickens and eggs are allayed. +# --Larry Hastings, 2013/11/23 + +try: + from _opcode import stack_effect + __all__.append('stack_effect') +except ImportError: + pass + +cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is', + 'is not', 'exception match', 'BAD') + +hasconst = [] +hasname = [] +hasjrel = [] +hasjabs = [] +haslocal = [] +hascompare = [] +hasfree = [] +hasnargs = [] + +opmap = {} +opname = [''] * 256 +for op in range(256): opname[op] = '<%r>' % (op,) +del op + +def def_op(name, op): + opname[op] = name + opmap[name] = op + +def name_op(name, op): + def_op(name, op) + hasname.append(op) + +def jrel_op(name, op): + def_op(name, op) + hasjrel.append(op) + +def jabs_op(name, op): + def_op(name, op) + hasjabs.append(op) + +# Instruction opcodes for compiled code +# Blank lines correspond to available opcodes + +def_op('POP_TOP', 1) +def_op('ROT_TWO', 2) +def_op('ROT_THREE', 3) +def_op('DUP_TOP', 4) +def_op('DUP_TOP_TWO', 5) + +def_op('NOP', 9) +def_op('UNARY_POSITIVE', 10) +def_op('UNARY_NEGATIVE', 11) +def_op('UNARY_NOT', 12) + +def_op('UNARY_INVERT', 15) + +def_op('BINARY_POWER', 19) +def_op('BINARY_MULTIPLY', 20) + +def_op('BINARY_MODULO', 22) +def_op('BINARY_ADD', 23) +def_op('BINARY_SUBTRACT', 24) +def_op('BINARY_SUBSCR', 25) +def_op('BINARY_FLOOR_DIVIDE', 26) +def_op('BINARY_TRUE_DIVIDE', 27) +def_op('INPLACE_FLOOR_DIVIDE', 28) +def_op('INPLACE_TRUE_DIVIDE', 29) + +def_op('STORE_MAP', 54) +def_op('INPLACE_ADD', 55) +def_op('INPLACE_SUBTRACT', 56) +def_op('INPLACE_MULTIPLY', 57) + +def_op('INPLACE_MODULO', 59) +def_op('STORE_SUBSCR', 60) +def_op('DELETE_SUBSCR', 61) +def_op('BINARY_LSHIFT', 62) +def_op('BINARY_RSHIFT', 63) +def_op('BINARY_AND', 64) +def_op('BINARY_XOR', 65) +def_op('BINARY_OR', 66) +def_op('INPLACE_POWER', 67) +def_op('GET_ITER', 68) + +def_op('PRINT_EXPR', 70) +def_op('LOAD_BUILD_CLASS', 71) +def_op('YIELD_FROM', 72) + +def_op('INPLACE_LSHIFT', 75) +def_op('INPLACE_RSHIFT', 76) +def_op('INPLACE_AND', 77) +def_op('INPLACE_XOR', 78) +def_op('INPLACE_OR', 79) +def_op('BREAK_LOOP', 80) +def_op('WITH_CLEANUP', 81) + +def_op('RETURN_VALUE', 83) +def_op('IMPORT_STAR', 84) + +def_op('YIELD_VALUE', 86) +def_op('POP_BLOCK', 87) +def_op('END_FINALLY', 88) +def_op('POP_EXCEPT', 89) + +HAVE_ARGUMENT = 90 # Opcodes from here have an argument: + +name_op('STORE_NAME', 90) # Index in name list +name_op('DELETE_NAME', 91) # "" +def_op('UNPACK_SEQUENCE', 92) # Number of tuple items +jrel_op('FOR_ITER', 93) +def_op('UNPACK_EX', 94) +name_op('STORE_ATTR', 95) # Index in name list +name_op('DELETE_ATTR', 96) # "" +name_op('STORE_GLOBAL', 97) # "" +name_op('DELETE_GLOBAL', 98) # "" +def_op('LOAD_CONST', 100) # Index in const list +hasconst.append(100) +name_op('LOAD_NAME', 101) # Index in name list +def_op('BUILD_TUPLE', 102) # Number of tuple items +def_op('BUILD_LIST', 103) # Number of list items +def_op('BUILD_SET', 104) # Number of set items +def_op('BUILD_MAP', 105) # Number of dict entries (upto 255) +name_op('LOAD_ATTR', 106) # Index in name list +def_op('COMPARE_OP', 107) # Comparison operator +hascompare.append(107) +name_op('IMPORT_NAME', 108) # Index in name list +name_op('IMPORT_FROM', 109) # Index in name list + +jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip +jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code +jabs_op('JUMP_IF_TRUE_OR_POP', 112) # "" +jabs_op('JUMP_ABSOLUTE', 113) # "" +jabs_op('POP_JUMP_IF_FALSE', 114) # "" +jabs_op('POP_JUMP_IF_TRUE', 115) # "" + +name_op('LOAD_GLOBAL', 116) # Index in name list + +jabs_op('CONTINUE_LOOP', 119) # Target address +jrel_op('SETUP_LOOP', 120) # Distance to target address +jrel_op('SETUP_EXCEPT', 121) # "" +jrel_op('SETUP_FINALLY', 122) # "" + +def_op('LOAD_FAST', 124) # Local variable number +haslocal.append(124) +def_op('STORE_FAST', 125) # Local variable number +haslocal.append(125) +def_op('DELETE_FAST', 126) # Local variable number +haslocal.append(126) + +def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3) +def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8) +hasnargs.append(131) +def_op('MAKE_FUNCTION', 132) # Number of args with default values +def_op('BUILD_SLICE', 133) # Number of items +def_op('MAKE_CLOSURE', 134) +def_op('LOAD_CLOSURE', 135) +hasfree.append(135) +def_op('LOAD_DEREF', 136) +hasfree.append(136) +def_op('STORE_DEREF', 137) +hasfree.append(137) +def_op('DELETE_DEREF', 138) +hasfree.append(138) + +def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8) +hasnargs.append(140) +def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8) +hasnargs.append(141) +def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8) +hasnargs.append(142) + +jrel_op('SETUP_WITH', 143) + +def_op('LIST_APPEND', 145) +def_op('SET_ADD', 146) +def_op('MAP_ADD', 147) + +def_op('LOAD_CLASSDEREF', 148) +hasfree.append(148) + +def_op('EXTENDED_ARG', 144) +EXTENDED_ARG = 144 + +del def_op, name_op, jrel_op, jabs_op + +from ctypes import * +import unittest +import struct + +def valid_ranges(*types): + # given a sequence of numeric types, collect their _type_ + # attribute, which is a single format character compatible with + # the struct module, use the struct module to calculate the + # minimum and maximum value allowed for this format. + # Returns a list of (min, max) values. + result = [] + for t in types: + fmt = t._type_ + size = struct.calcsize(fmt) + a = struct.unpack(fmt, ("\x00"*32)[:size])[0] + b = struct.unpack(fmt, ("\xFF"*32)[:size])[0] + c = struct.unpack(fmt, ("\x7F"+"\x00"*32)[:size])[0] + d = struct.unpack(fmt, ("\x80"+"\xFF"*32)[:size])[0] + result.append((min(a, b, c, d), max(a, b, c, d))) + return result + +ArgType = type(byref(c_int(0))) + +unsigned_types = [c_ubyte, c_ushort, c_uint, c_ulong] +signed_types = [c_byte, c_short, c_int, c_long, c_longlong] + +bool_types = [] + +float_types = [c_double, c_float] + +try: + c_ulonglong + c_longlong +except NameError: + pass +else: + unsigned_types.append(c_ulonglong) + signed_types.append(c_longlong) + +try: + c_bool +except NameError: + pass +else: + bool_types.append(c_bool) + +unsigned_ranges = valid_ranges(*unsigned_types) +signed_ranges = valid_ranges(*signed_types) +bool_values = [True, False, 0, 1, -1, 5000, 'test', [], [1]] + +################################################################ + +class NumberTestCase(unittest.TestCase): + + def test_default_init(self): + # default values are set to zero + for t in signed_types + unsigned_types + float_types: + self.assertEqual(t().value, 0) + + def test_unsigned_values(self): + # the value given to the constructor is available + # as the 'value' attribute + for t, (l, h) in zip(unsigned_types, unsigned_ranges): + self.assertEqual(t(l).value, l) + self.assertEqual(t(h).value, h) + + def test_signed_values(self): + # see above + for t, (l, h) in zip(signed_types, signed_ranges): + self.assertEqual(t(l).value, l) + self.assertEqual(t(h).value, h) + + def test_bool_values(self): + from operator import truth + for t, v in zip(bool_types, bool_values): + self.assertEqual(t(v).value, truth(v)) + + def test_typeerror(self): + # Only numbers are allowed in the contructor, + # otherwise TypeError is raised + for t in signed_types + unsigned_types + float_types: + self.assertRaises(TypeError, t, "") + self.assertRaises(TypeError, t, None) + +## def test_valid_ranges(self): +## # invalid values of the correct type +## # raise ValueError (not OverflowError) +## for t, (l, h) in zip(unsigned_types, unsigned_ranges): +## self.assertRaises(ValueError, t, l-1) +## self.assertRaises(ValueError, t, h+1) + + def test_from_param(self): + # the from_param class method attribute always + # returns PyCArgObject instances + for t in signed_types + unsigned_types + float_types: + self.assertEqual(ArgType, type(t.from_param(0))) + + def test_byref(self): + # calling byref returns also a PyCArgObject instance + for t in signed_types + unsigned_types + float_types + bool_types: + parm = byref(t()) + self.assertEqual(ArgType, type(parm)) + + + def test_floats(self): + # c_float and c_double can be created from + # Python int, long and float + class FloatLike(object): + def __float__(self): + return 2.0 + f = FloatLike() + for t in float_types: + self.assertEqual(t(2.0).value, 2.0) + self.assertEqual(t(2).value, 2.0) + self.assertEqual(t(2L).value, 2.0) + self.assertEqual(t(f).value, 2.0) + + def test_integers(self): + class FloatLike(object): + def __float__(self): + return 2.0 + f = FloatLike() + class IntLike(object): + def __int__(self): + return 2 + i = IntLike() + # integers cannot be constructed from floats, + # but from integer-like objects + for t in signed_types + unsigned_types: + self.assertRaises(TypeError, t, 3.14) + self.assertRaises(TypeError, t, f) + self.assertEqual(t(i).value, 2) + + def test_sizes(self): + for t in signed_types + unsigned_types + float_types + bool_types: + try: + size = struct.calcsize(t._type_) + except struct.error: + continue + # sizeof of the type... + self.assertEqual(sizeof(t), size) + # and sizeof of an instance + self.assertEqual(sizeof(t()), size) + + def test_alignments(self): + for t in signed_types + unsigned_types + float_types: + code = t._type_ # the typecode + align = struct.calcsize("c%c" % code) - struct.calcsize(code) + + # alignment of the type... + self.assertEqual((code, alignment(t)), + (code, align)) + # and alignment of an instance + self.assertEqual((code, alignment(t())), + (code, align)) + + def test_int_from_address(self): + from array import array + for t in signed_types + unsigned_types: + # the array module doesn't support all format codes + # (no 'q' or 'Q') + try: + array(t._type_) + except ValueError: + continue + a = array(t._type_, [100]) + + # v now is an integer at an 'external' memory location + v = t.from_address(a.buffer_info()[0]) + self.assertEqual(v.value, a[0]) + self.assertEqual(type(v), t) + + # changing the value at the memory location changes v's value also + a[0] = 42 + self.assertEqual(v.value, a[0]) + + + def test_float_from_address(self): + from array import array + for t in float_types: + a = array(t._type_, [3.14]) + v = t.from_address(a.buffer_info()[0]) + self.assertEqual(v.value, a[0]) + self.assertTrue(type(v) is t) + a[0] = 2.3456e17 + self.assertEqual(v.value, a[0]) + self.assertTrue(type(v) is t) + + def test_char_from_address(self): + from ctypes import c_char + from array import array + + a = array('c', 'x') + v = c_char.from_address(a.buffer_info()[0]) + self.assertEqual(v.value, a[0]) + self.assertTrue(type(v) is c_char) + + a[0] = '?' + self.assertEqual(v.value, a[0]) + + # array does not support c_bool / 't' + # def test_bool_from_address(self): + # from ctypes import c_bool + # from array import array + # a = array(c_bool._type_, [True]) + # v = t.from_address(a.buffer_info()[0]) + # self.assertEqual(v.value, a[0]) + # self.assertEqual(type(v) is t) + # a[0] = False + # self.assertEqual(v.value, a[0]) + # self.assertEqual(type(v) is t) + + def test_init(self): + # c_int() can be initialized from Python's int, and c_int. + # Not from c_long or so, which seems strange, abc should + # probably be changed: + self.assertRaises(TypeError, c_int, c_long(42)) + + def test_float_overflow(self): + import sys + big_int = int(sys.float_info.max) * 2 + for t in float_types + [c_longdouble]: + self.assertRaises(OverflowError, t, big_int) + if (hasattr(t, "__ctype_be__")): + self.assertRaises(OverflowError, t.__ctype_be__, big_int) + if (hasattr(t, "__ctype_le__")): + self.assertRaises(OverflowError, t.__ctype_le__, big_int) + +## def test_perf(self): +## check_perf() + +from ctypes import _SimpleCData +class c_int_S(_SimpleCData): + _type_ = "i" + __slots__ = [] + +def run_test(rep, msg, func, arg=None): +## items = [None] * rep + items = range(rep) + from time import clock + if arg is not None: + start = clock() + for i in items: + func(arg); func(arg); func(arg); func(arg); func(arg) + stop = clock() + else: + start = clock() + for i in items: + func(); func(); func(); func(); func() + stop = clock() + print "%15s: %.2f us" % (msg, ((stop-start)*1e6/5/rep)) + +def check_perf(): + # Construct 5 objects + from ctypes import c_int + + REP = 200000 + + run_test(REP, "int()", int) + run_test(REP, "int(999)", int) + run_test(REP, "c_int()", c_int) + run_test(REP, "c_int(999)", c_int) + run_test(REP, "c_int_S()", c_int_S) + run_test(REP, "c_int_S(999)", c_int_S) + +# Python 2.3 -OO, win2k, P4 700 MHz: +# +# int(): 0.87 us +# int(999): 0.87 us +# c_int(): 3.35 us +# c_int(999): 3.34 us +# c_int_S(): 3.23 us +# c_int_S(999): 3.24 us + +# Python 2.2 -OO, win2k, P4 700 MHz: +# +# int(): 0.89 us +# int(999): 0.89 us +# c_int(): 9.99 us +# c_int(999): 10.02 us +# c_int_S(): 9.87 us +# c_int_S(999): 9.85 us + +if __name__ == '__main__': +## check_perf() + unittest.main() + +# -*- coding: utf-8 -*- +# Generated by Django 1.9.7 on 2016-06-10 19:56 +from __future__ import unicode_literals + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0007_alter_validators_add_error_messages'), + ] + + operations = [ + migrations.CreateModel( + name='User', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(max_length=20, unique=True, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z]*$', message='Only alphanumeric characters are allowed.')])), + ('email', models.EmailField(max_length=255, verbose_name='email address')), + ('first_name', models.CharField(blank=True, max_length=30, null=True)), + ('last_name', models.CharField(blank=True, max_length=50, null=True)), + ('date_joined', models.DateTimeField(auto_now_add=True)), + ('is_active', models.BooleanField(default=True)), + ('is_staff', models.BooleanField(default=False)), + ('tipo', models.CharField(blank=True, choices=[('Administrador', 'Administrador'), ('Empleado', 'Empleado')], max_length=50, null=True)), + ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ], + options={ + 'abstract': False, + }, + ), + ] + +#!/usr/bin/env python +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Get rietveld stats about the review you done, or forgot to do. + +Example: + - my_reviews.py -r me@chromium.org -Q for stats for last quarter. +""" +import datetime +import math +import optparse +import os +import sys + +import rietveld + + +def username(email): + """Keeps the username of an email address.""" + return email.split('@', 1)[0] + + +def to_datetime(string): + """Load UTC time as a string into a datetime object.""" + try: + # Format is 2011-07-05 01:26:12.084316 + return datetime.datetime.strptime( + string.split('.', 1)[0], '%Y-%m-%d %H:%M:%S') + except ValueError: + return datetime.datetime.strptime(string, '%Y-%m-%d') + + +def to_time(seconds): + """Convert a number of seconds into human readable compact string.""" + prefix = '' + if seconds < 0: + prefix = '-' + seconds *= -1 + minutes = math.floor(seconds / 60) + seconds -= minutes * 60 + hours = math.floor(minutes / 60) + minutes -= hours * 60 + days = math.floor(hours / 24) + hours -= days * 24 + out = [] + if days > 0: + out.append('%dd' % days) + if hours > 0 or days > 0: + out.append('%02dh' % hours) + if minutes > 0 or hours > 0 or days > 0: + out.append('%02dm' % minutes) + if seconds > 0 and not out: + # Skip seconds unless there's only seconds. + out.append('%02ds' % seconds) + return prefix + ''.join(out) + + +class Stats(object): + def __init__(self): + self.total = 0 + self.actually_reviewed = 0 + self.latencies = [] + self.lgtms = 0 + self.multiple_lgtms = 0 + self.drive_by = 0 + self.not_requested = 0 + self.self_review = 0 + + self.percent_lgtm = 0. + self.percent_drive_by = 0. + self.percent_not_requested = 0. + self.days = 0 + + @property + def average_latency(self): + if not self.latencies: + return 0 + return sum(self.latencies) / float(len(self.latencies)) + + @property + def median_latency(self): + if not self.latencies: + return 0 + length = len(self.latencies) + latencies = sorted(self.latencies) + if (length & 1) == 0: + return (latencies[length/2] + latencies[length/2+1]) / 2. + else: + return latencies[length/2] + + @property + def percent_done(self): + if not self.total: + return 0 + return self.actually_reviewed * 100. / self.total + + @property + def review_per_day(self): + if not self.days: + return 0 + return self.total * 1. / self.days + + @property + def review_done_per_day(self): + if not self.days: + return 0 + return self.actually_reviewed * 1. / self.days + + def finalize(self, first_day, last_day): + if self.actually_reviewed: + self.percent_lgtm = (self.lgtms * 100. / self.actually_reviewed) + self.percent_drive_by = (self.drive_by * 100. / self.actually_reviewed) + self.percent_not_requested = ( + self.not_requested * 100. / self.actually_reviewed) + if first_day and last_day: + self.days = (to_datetime(last_day) - to_datetime(first_day)).days + 1 + + +def _process_issue_lgtms(issue, reviewer, stats): + """Calculates LGTMs stats.""" + stats.actually_reviewed += 1 + reviewer_lgtms = len([ + msg for msg in issue['messages'] + if msg['approval'] and msg['sender'] == reviewer]) + if reviewer_lgtms > 1: + stats.multiple_lgtms += 1 + return ' X ' + if reviewer_lgtms: + stats.lgtms += 1 + return ' x ' + else: + return ' o ' + + +def _process_issue_latency(issue, reviewer, stats): + """Calculates latency for an issue that was actually reviewed.""" + from_owner = [ + msg for msg in issue['messages'] if msg['sender'] == issue['owner_email'] + ] + if not from_owner: + # Probably requested by email. + stats.not_requested += 1 + return '' + + first_msg_from_owner = None + latency = None + received = False + for index, msg in enumerate(issue['messages']): + if not first_msg_from_owner and msg['sender'] == issue['owner_email']: + first_msg_from_owner = msg + if index and not received and msg['sender'] == reviewer: + # Not first email, reviewer never received one, reviewer sent a mesage. + stats.drive_by += 1 + return '' + received |= reviewer in msg['recipients'] + + if first_msg_from_owner and msg['sender'] == reviewer: + delta = msg['date'] - first_msg_from_owner['date'] + latency = delta.seconds + delta.days * 24 * 3600 + break + + if latency is None: + stats.not_requested += 1 + return '' + if latency > 0: + stats.latencies.append(latency) + else: + stats.not_requested += 1 + return to_time(latency) + + +def _process_issue(issue): + """Preprocesses the issue to simplify the remaining code.""" + issue['owner_email'] = username(issue['owner_email']) + issue['reviewers'] = set(username(r) for r in issue['reviewers']) + # By default, hide commit-bot. + issue['reviewers'] -= set(['commit-bot']) + for msg in issue['messages']: + msg['sender'] = username(msg['sender']) + msg['recipients'] = [username(r) for r in msg['recipients']] + # Convert all times to datetime instances. + msg['date'] = to_datetime(msg['date']) + issue['messages'].sort(key=lambda x: x['date']) + + +def print_issue(issue, reviewer, stats): + """Process an issue and prints stats about it.""" + stats.total += 1 + _process_issue(issue) + if issue['owner_email'] == reviewer: + stats.self_review += 1 + latency = '' + reviewed = '' + elif any(msg['sender'] == reviewer for msg in issue['messages']): + reviewed = _process_issue_lgtms(issue, reviewer, stats) + latency = _process_issue_latency(issue, reviewer, stats) + else: + latency = 'N/A' + reviewed = '' + + # More information is available, print issue.keys() to see them. + print '%7d %10s %3s %14s %-15s %s' % ( + issue['issue'], + issue['created'][:10], + reviewed, + latency, + issue['owner_email'], + ', '.join(sorted(issue['reviewers']))) + + +def print_reviews(reviewer, created_after, created_before, instance_url): + """Prints issues |reviewer| received and potentially reviewed.""" + remote = rietveld.Rietveld(instance_url, None, None) + + # The stats we gather. Feel free to send me a CL to get more stats. + stats = Stats() + + last_issue = None + first_day = None + last_day = None + + # Column sizes need to match print_issue() output. + print >> sys.stderr, ( + 'Issue Creation Did Latency Owner Reviewers') + + # See def search() in rietveld.py to see all the filters you can use. + for issue in remote.search( + reviewer=reviewer, + created_after=created_after, + created_before=created_before, + with_messages=True): + last_issue = issue + if not first_day: + first_day = issue['created'][:10] + print_issue(issue, username(reviewer), stats) + if last_issue: + last_day = last_issue['created'][:10] + stats.finalize(first_day, last_day) + + print >> sys.stderr, ( + '%s reviewed %d issues out of %d (%1.1f%%). %d were self-review.' % + (reviewer, stats.actually_reviewed, stats.total, stats.percent_done, + stats.self_review)) + print >> sys.stderr, ( + '%4.1f review request/day during %3d days (%4.1f r/d done).' % ( + stats.review_per_day, stats.days, stats.review_done_per_day)) + print >> sys.stderr, ( + '%4d were drive-bys (%5.1f%% of reviews done).' % ( + stats.drive_by, stats.percent_drive_by)) + print >> sys.stderr, ( + '%4d were requested over IM or irc (%5.1f%% of reviews done).' % ( + stats.not_requested, stats.percent_not_requested)) + print >> sys.stderr, ( + ('%4d issues LGTM\'d (%5.1f%% of reviews done),' + ' gave multiple LGTMs on %d issues.') % ( + stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms)) + print >> sys.stderr, ( + 'Average latency from request to first comment is %s.' % + to_time(stats.average_latency)) + print >> sys.stderr, ( + 'Median latency from request to first comment is %s.' % + to_time(stats.median_latency)) + + +def print_count(reviewer, created_after, created_before, instance_url): + remote = rietveld.Rietveld(instance_url, None, None) + print len(list(remote.search( + reviewer=reviewer, + created_after=created_after, + created_before=created_before, + keys_only=True))) + + +def get_previous_quarter(today): + """There are four quarters, 01-03, 04-06, 07-09, 10-12. + + If today is in the last month of a quarter, assume it's the current quarter + that is requested. + """ + end_year = today.year + end_month = today.month - (today.month % 3) + 1 + if end_month <= 0: + end_year -= 1 + end_month += 12 + if end_month > 12: + end_year += 1 + end_month -= 12 + end = '%d-%02d-01' % (end_year, end_month) + begin_year = end_year + begin_month = end_month - 3 + if begin_month <= 0: + begin_year -= 1 + begin_month += 12 + begin = '%d-%02d-01' % (begin_year, begin_month) + return begin, end + + +def main(): + # Silence upload.py. + rietveld.upload.verbosity = 0 + today = datetime.date.today() + begin, end = get_previous_quarter(today) + parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) + parser.add_option( + '--count', action='store_true', + help='Just count instead of printing individual issues') + parser.add_option( + '-r', '--reviewer', metavar='', + default=os.environ.get('EMAIL_ADDRESS'), + help='Filter on issue reviewer, default=%default') + parser.add_option( + '-b', '--begin', metavar='', + help='Filter issues created after the date') + parser.add_option( + '-e', '--end', metavar='', + help='Filter issues created before the date') + parser.add_option( + '-Q', '--last_quarter', action='store_true', + help='Use last quarter\'s dates, e.g. %s to %s' % ( + begin, end)) + parser.add_option( + '-i', '--instance_url', metavar='', + default='http://codereview.chromium.org', + help='Host to use, default is %default') + # Remove description formatting + parser.format_description = ( + lambda _: parser.description) # pylint: disable=E1101 + options, args = parser.parse_args() + if args: + parser.error('Args unsupported') + if not options.reviewer: + parser.error('$EMAIL_ADDRESS is not set, please use -r') + print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer + if options.last_quarter: + options.begin = begin + options.end = end + print >> sys.stderr, 'Using range %s to %s' % ( + options.begin, options.end) + if options.count: + print_count( + options.reviewer, + options.begin, + options.end, + options.instance_url) + else: + print_reviews( + options.reviewer, + options.begin, + options.end, + options.instance_url) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) + +import unittest, time, sys, random +sys.path.extend(['.','..','../..','py']) +import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e +import getpass + +class Basic(unittest.TestCase): + def tearDown(self): + h2o.check_sandbox_for_errors() + + @classmethod + def setUpClass(cls): + # assume we're at 0xdata with it's hdfs namenode + h2o.init(1, use_hdfs=True, hdfs_version='hdp2.1', hdfs_name_node='172.16.2.186') + + @classmethod + def tearDownClass(cls): + h2o.tear_down_cloud() + + def test_hdfs_hdp2_1_fvec(self): + print "\nLoad a list of files from HDFS, parse and do 1 RF tree" + print "\nYou can try running as hduser/hduser if fail" + # larger set in my local dir + # fails because classes aren't integers + # "allstate_claim_prediction_train_set.zip", + csvFilenameAll = [ + # "3G_poker_shuffle" + ("and-testing.data", 60), + ### "arcene2_train.both", + ### "arcene_train.both", + ### "bestbuy_test.csv", + ("covtype.data", 60), + ("covtype4x.shuffle.data", 60), + # "four_billion_rows.csv", + ("hhp.unbalanced.012.data.gz", 60), + ("hhp.unbalanced.data.gz", 60), + ("leads.csv", 60), + # ("covtype.169x.data", 1200), + ("prostate_long_1G.csv", 200), + ("airlines_all.csv", 1200), + ] + + # pick 8 randomly! + if (1==0): + csvFilenameList = random.sample(csvFilenameAll,8) + # Alternatively: do the list in order! Note the order is easy to hard + else: + csvFilenameList = csvFilenameAll + + # pop open a browser on the cloud + # h2b.browseTheCloud() + + trial = 0 + print "try importing /tmp2" + d = h2i.import_only(path="tmp2/*", schema='hdfs', timeoutSecs=1000) + for (csvFilename, timeoutSecs) in csvFilenameList: + # creates csvFilename.hex from file in hdfs dir + print "Loading", csvFilename, 'from HDFS' + start = time.time() + hex_key = "a.hex" + csvPathname = "datasets/" + csvFilename + parseResult = h2i.import_parse(path=csvPathname, schema='hdfs', hex_key=hex_key, timeoutSecs=1000) + print "hdfs parse of", csvPathname, "took", time.time() - start, 'secs' + + start = time.time() + print "Saving", csvFilename, 'to HDFS' + print "Using /tmp2 to avoid the '.' prefixed files in /tmp2 (kills import)" + print "Unique per-user to avoid permission issues" + username = getpass.getuser() + csvPathname = "tmp2/a%s.%s.csv" % (trial, username) + # reuse the file name to avoid running out of space + csvPathname = "tmp2/a%s.%s.csv" % ('_h2o_export_files', username) + + path = "hdfs://"+ h2o.nodes[0].hdfs_name_node + "/" + csvPathname + h2o.nodes[0].export_files(src_key=hex_key, path=path, force=1, timeoutSecs=timeoutSecs) + print "export_files of", hex_key, "to", path, "took", time.time() - start, 'secs' + trial += 1 + + print "Re-Loading", csvFilename, 'from HDFS' + start = time.time() + hex_key = "a2.hex" + time.sleep(2) + d = h2i.import_only(path=csvPathname, schema='hdfs', timeoutSecs=1000) + print h2o.dump_json(d) + parseResult = h2i.import_parse(path=csvPathname, schema='hdfs', hex_key=hex_key, timeoutSecs=1000) + print "hdfs re-parse of", csvPathname, "took", time.time() - start, 'secs' + + # currently fails + # print "This comparison test only works because na's are treated as 0's. bug fix might change that na-> na" + # execExpr = "sum(%s!=%s)" % ("a.hex", "a2.hex") + # resultExec, result = h2e.exec_expr(execExpr=execExpr, timeoutSecs=30) + # self.assertEqual(result, 0.0, msg="a.hex and a2.hex weren't the same (NA treated as 0) %s" % result) + + + +if __name__ == '__main__': + h2o.unit_main() + +#!/usr/bin/python +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . +# + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + +DOCUMENTATION = ''' +--- +module: ce_vrf +version_added: "2.4" +short_description: Manages VPN instance on HUAWEI CloudEngine switches. +description: + - Manages VPN instance of HUAWEI CloudEngine switches. +author: Yang yang (@CloudEngine-Ansible) +notes: + - If I(state=absent), the route will be removed, regardless of the + non-required options. +options: + vrf: + description: + - VPN instance, the length of vrf name is 1 - 31, i.e. "test", but can not be C(_public_). + required: true + description: + description: + - Description of the vrf, the string length is 1 - 242 . + required: false + default: null + state: + description: + - Manage the state of the resource. + required: false + choices: ['present','absent'] + default: present +''' + +EXAMPLES = ''' +- name: vrf module test + hosts: cloudengine + connection: local + gather_facts: no + vars: + cli: + host: "{{ inventory_hostname }}" + port: "{{ ansible_ssh_port }}" + username: "{{ username }}" + password: "{{ password }}" + transport: cli + + tasks: + + - name: Config a vpn install named vpna, description is test + ce_vrf: + vrf: vpna + description: test + state: present + provider: "{{ cli }}" + - name: Delete a vpn install named vpna + ce_vrf: + vrf: vpna + state: absent + provider: "{{ cli }}" +''' +RETURN = ''' +proposed: + description: k/v pairs of parameters passed into module + returned: always + type: dict + sample: {"vrf": "vpna", + "description": "test", + "state": "present"} +existing: + description: k/v pairs of existing switchport + returned: always + type: dict + sample: {} +end_state: + description: k/v pairs of switchport after module execution + returned: always + type: dict + sample: {"vrf": "vpna", + "description": "test", + "present": "present"} +updates: + description: command list sent to the device + returned: always + type: list + sample: ["ip vpn-instance vpna", + "description test"] +changed: + description: check to see if a change was made on the device + returned: always + type: boolean + sample: true +''' + +from xml.etree import ElementTree +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec + + +CE_NC_GET_VRF = """ + + + + + + + + + + + + +""" + +CE_NC_CREATE_VRF = """ + + + + + %s + %s + + + + +""" + +CE_NC_DELETE_VRF = """ + + + + + %s + %s + + + + +""" + + +def build_config_xml(xmlstr): + """build_config_xml""" + + return ' ' + xmlstr + ' ' + + +class Vrf(object): + """Manange vpn instance""" + + def __init__(self, argument_spec, ): + self.spec = argument_spec + self.module = None + self.init_module() + + # vpn instance info + self.vrf = self.module.params['vrf'] + self.description = self.module.params['description'] + self.state = self.module.params['state'] + + # state + self.changed = False + self.updates_cmd = list() + self.results = dict() + self.proposed = dict() + self.existing = dict() + self.end_state = dict() + + def init_module(self): + """init_module""" + + self.module = AnsibleModule( + argument_spec=self.spec, supports_check_mode=True) + + def check_response(self, xml_str, xml_name): + """Check if response message is already succeed.""" + + if "" not in xml_str: + self.module.fail_json(msg='Error: %s failed.' % xml_name) + + def set_update_cmd(self): + """ set update command""" + if not self.changed: + return + if self.state == "present": + self.updates_cmd.append('ip vpn-instance %s' % (self.vrf)) + if self.description: + self.updates_cmd.append('description %s' % (self.description)) + else: + self.updates_cmd.append('undo ip vpn-instance %s' % (self.vrf)) + + def get_vrf(self): + """ check if vrf is need to change""" + + getxmlstr = CE_NC_GET_VRF + xml_str = get_nc_config(self.module, getxmlstr) + xml_str = xml_str.replace('\r', '').replace('\n', '').\ + replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\ + replace('xmlns="http://www.huawei.com/netconf/vrp"', "") + + root = ElementTree.fromstring(xml_str) + vpn_instances = root.findall( + "data/l3vpn/l3vpncomm/l3vpnInstances/l3vpnInstance") + if vpn_instances: + for vpn_instance in vpn_instances: + if vpn_instance.find('vrfName').text == self.vrf: + if vpn_instance.find('vrfDescription').text == self.description: + if self.state == "present": + return False + else: + return True + else: + return True + return self.state == "present" + else: + return self.state == "present" + + def check_params(self): + """Check all input params""" + + # vrf and description check + if self.vrf == '_public_': + self.module.fail_json( + msg='Error: The vrf name _public_ is reserved.') + if len(self.vrf) < 1 or len(self.vrf) > 31: + self.module.fail_json( + msg='Error: The vrf name length must between 1 and 242.') + if self.description: + if len(self.description) < 1 or len(self.description) > 242: + self.module.fail_json( + msg='Error: The vrf description length must between 1 and 242.') + + def operate_vrf(self): + """config/delete vrf""" + if not self.changed: + return + if self.state == "present": + if self.description is None: + configxmlstr = CE_NC_CREATE_VRF % (self.vrf, '') + else: + configxmlstr = CE_NC_CREATE_VRF % (self.vrf, self.description) + else: + configxmlstr = CE_NC_DELETE_VRF % (self.vrf, self.description) + + conf_str = build_config_xml(configxmlstr) + + recv_xml = set_nc_config(self.module, conf_str) + self.check_response(recv_xml, "OPERATE_VRF") + + def get_proposed(self): + """get_proposed""" + + if self.state == 'present': + self.proposed['vrf'] = self.vrf + if self.description: + self.proposed['description'] = self.description + + else: + self.proposed = dict() + self.proposed['state'] = self.state + + def get_existing(self): + """get_existing""" + + change = self.get_vrf() + if change: + if self.state == 'present': + self.existing = dict() + else: + self.existing['vrf'] = self.vrf + if self.description: + self.existing['description'] = self.description + self.changed = True + else: + if self.state == 'absent': + self.existing = dict() + else: + self.existing['vrf'] = self.vrf + if self.description: + self.existing['description'] = self.description + self.changed = False + + def get_end_state(self): + """get_end_state""" + + change = self.get_vrf() + if not change: + if self.state == 'present': + self.end_state['vrf'] = self.vrf + if self.description: + self.end_state['description'] = self.description + else: + self.end_state = dict() + else: + if self.state == 'present': + self.end_state = dict() + else: + self.end_state['vrf'] = self.vrf + if self.description: + self.end_state['description'] = self.description + + def work(self): + """worker""" + + self.check_params() + self.get_existing() + self.get_proposed() + self.operate_vrf() + self.set_update_cmd() + self.get_end_state() + self.results['changed'] = self.changed + self.results['proposed'] = self.proposed + self.results['existing'] = self.existing + self.results['end_state'] = self.end_state + if self.changed: + self.results['updates'] = self.updates_cmd + else: + self.results['updates'] = list() + + self.module.exit_json(**self.results) + + +def main(): + """main""" + + argument_spec = dict( + vrf=dict(required=True, type='str'), + description=dict(required=False, type='str'), + state=dict(choices=['absent', 'present'], + default='present', required=False), + ) + argument_spec.update(ce_argument_spec) + interface = Vrf(argument_spec) + interface.work() + + +if __name__ == '__main__': + main() + +import collections +import json +import re +from datetime import datetime + +from coala_utils.decorators import get_public_members +from coalib.settings.FunctionMetadata import FunctionMetadata + + +def create_json_encoder(**kwargs): + class JSONEncoder(json.JSONEncoder): + + @classmethod + def _filter_params(cls, op, nop): + params = set(op) | set(nop) + return {key: kwargs[key] for key in set(kwargs) & (params)} + + def default(self, obj): + if hasattr(obj, '__json__'): + fdata = FunctionMetadata.from_function(obj.__json__) + params = self._filter_params( + fdata.optional_params, fdata.non_optional_params) + return obj.__json__(**params) + elif isinstance(obj, collections.Iterable): + return list(obj) + elif isinstance(obj, datetime): + return obj.isoformat() + elif hasattr(obj, '__getitem__') and hasattr(obj, 'keys'): + return dict(obj) + elif hasattr(obj, '__dict__'): + return {member: getattr(obj, member) + for member in get_public_members(obj)} + elif isinstance(obj, re._pattern_type): + return obj.pattern + + return json.JSONEncoder.default(self, obj) + return JSONEncoder + +# Copyright 2013: Mirantis Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +"""Contains the Rally objects.""" + +from rally.common.objects.credential import Credential # noqa +from rally.common.objects.deploy import Deployment # noqa +from rally.common.objects.task import Subtask # noqa +from rally.common.objects.task import Task # noqa +from rally.common.objects.task import Workload # noqa +from rally.common.objects.verification import Verification # noqa +from rally.common.objects.verifier import Verifier # noqa + +import sys +import re + +from .. utils import common as uc + + +def print_line(sample, region, location, type_var, removed, + added, abnormal, normal, ratio, min_cov, min_exclu, + variant, target, info, var_seq, ref_seq): + line = "\t".join([sample, region, location, type_var, removed, + added, abnormal, normal, ratio, min_cov, min_exclu, + variant, target, info, var_seq, ref_seq]) + sys.stdout.write(line + "\n") + + +def print_vcf_header(): + header = '##fileformat=VCFv4.1\n' + header += '##INFO=LOC lines: + for line in open(arg_ref, "r"): + line = line.strip() + nt = [] + # Parse attributes + if line[0] == '>': + # sanity check + loc = line.split(" ")[0] + if ":" not in loc or "-" not in loc: + sys.exit('ERROR: Fasta entries do not contain a correctly ' + + 'formatted location: {}\n'.format(loc)) + + # look up attributes in fasta file + line = line.replace(">", "location=", 1) + attr = {x.split("=")[0].strip(): x.split("=")[1].strip() for x in line.split("|")} + exon = attr["location"] + chro, pos = exon.split(":") + refstart, refstop = pos.split("-") + + # get nt coordinates on the genome + if 'strand' not in list(attr.keys()): + attr['strand'] = '+' + sys.stderr.write("WARNING: Strand is assumed to be '+' \n") + strand = attr["strand"] + for i in range(int(refstart), int(refstop) + 1): + nt += [i] + nt = nt[::-1] if strand == "-" else nt + nts.extend(nt) + + return nts, chro, strand + + +def create_report(args): + + # Find correct extremities of a mutation + sys.setrecursionlimit(10000) + + def get_extremities(va, p, rs): + if p - 1 > 0 and rs[p - 1] == va[-1]: + return get_extremities(rs[p - 1] + va[:-1], p - 1, rs) + return p - 1 + + if args.format == "vcf" and args.info == "cluster": + # Note: could salvage that option if we get the fill ref from vs_ref entries + sys.exit("ERROR: -f vcf and -i cluster options are incompatible") + + variants = {} + samples = {} + data = {} + vcf = True if args.format == 'vcf' else False + table = True if args.format == 'table' else False + + (nts, chro, strand) = init_ref_seq(args.target) + + if vcf: + print_vcf_header() + elif not table: + print_line("Sample", "Region", "Location", "Type", "Removed", + "Added", "Abnormal", "Normal", "rVAF", "Min_coverage", + "Exclu_min_cov", "Variant", "Target", "Info", "Variant_sequence", + "Reference_sequence") + + for line in args.infile: + # filter header + if line[0] == "#": + # sys.stderr.write("Filtred: " + line) + continue + + tok = line.strip("\n").split("\t") + + # filter on info column + if not re.search(args.info, line) or tok[0] == "Database" or len(tok) <= 1: + # sys.stderr.write("Filtered: " + line) + continue + + samp = tok[0] + query = tok[1] + ratio = tok[4] + alt_exp = tok[5] + ref_exp = tok[9] + min_cov = tok[6] + start_off = tok[7] + alt_seq = tok[8] + refSeq = tok[10] + info = tok[11] + + min_exclu = "" + variant = (tok[2], tok[3]) + ref_seq = refSeq.upper() + + if args.exclu != "" and alt_seq != "": + res = uc.get_cov(args.exclu, alt_seq) + min_exclu = str(res[2]) + + if int(min_cov) < args.min_cov: + continue + + # case: entries with no mutations + if variant[0] == 'Reference': + mod = "" + if strand == "-": + region = "{}:{}-{}".format(chro, nts[-1], nts[0]) + else: + region = "{}:{}-{}".format(chro, nts[0], nts[-1]) + if not vcf and not table: + print_line(samp, region, '-', variant[0], '0', '0', + '0.0', alt_exp, tok[4], min_cov, min_exclu, '-', + query, tok[-1], "", "") + continue + elif vcf: + continue + + # case: there is a mutation + else: + start, mod, stop = variant[1].split(":") + delet, insert = mod.split("/") + + added = str(len(insert)) + removed = str(len(delet)) + + # start and end positions in 0-based coordinates + pos = int(start) - 1 + pos -= int(start_off) + end = int(stop) - 2 # one to go back to last position, the other for 0-base + end -= int(start_off) + + if strand == "+": + start_pos = nts[pos] + end_pos = nts[end] + elif strand == "-": + start_pos = nts[end] + end_pos = nts[pos] + + region = "{}:{}-{}".format(chro, start_pos, end_pos + 1) + + ref_var = delet.upper() + alt_var = insert.upper() + loc_var = start_pos + end_var = end_pos + + if len(delet) == 0 and len(insert) != 0: + if strand == "+": + start_pos = nts[pos] + end_pos = nts[end + 1] # insertions end at last position + elif strand == "-": + start_pos = nts[end + 1] + end_pos = nts[pos] + region = "{}:{}-{}".format(chro, start_pos, end_pos + 1) + + var = insert.upper() + ibef = get_extremities(var, pos, ref_seq) # include current position + before = ref_seq[ibef:pos] + iaft = get_extremities(var[::-1], len(ref_seq)-pos, ref_seq[::-1]) + after = ref_seq[::-1][iaft:len(ref_seq)-pos][::-1] + iaft = len(ref_seq) - iaft - 1 + ref_var = before + after + alt_var = before + var + after + loc_var = nts[iaft] if strand == "-" else nts[ibef] + end_var = nts[iaft-len(ref_var)+1] if strand == "-" else nts[ibef+len(ref_var)-1] + + if loc_var + len(ref_var) - 1 != end_var and vcf: + sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n") + continue + + # Reinterpret mutations for small ITDs + # careful, going upstream may put us outside the reference. + upstream = alt_seq[pos-len(insert):pos] + match = 0 + if pos-len(insert) >= 0: + for i in range(0, len(insert)): + if insert[i] == upstream[i]: + match += 1 + match = float(match)/len(insert) + + insert_type = "Insertion" + if pos-len(insert) >= 0 and len(insert) >= 3 and insert == upstream: + insert_type = "ITD" + added += " | " + str(end_pos - start_pos + 1) + elif pos-len(insert) >= 0 and len(insert) >= 3 and match > 0.5: + insert_type = "I&I" + added += " | " + str(end_pos - start_pos + 1) + + location = chro + ":" + str(end_pos) + + elif variant[0] == 'Deletion': + region = "{}:{}-{}".format(chro, start_pos, end_pos + 1) + location = "" + insert_type = variant[0] + + var = delet.upper() + ibef = get_extremities(var, pos, ref_seq) + before = ref_seq[ibef:pos] + iaft = get_extremities(var[::-1], len(ref_seq)-pos-1-len(var)+1, ref_seq[::-1]) + after = ref_seq[::-1][iaft:len(ref_seq)-pos-1-len(var)+1][::-1] + iaft = len(ref_seq) - iaft - 1 + ref_var = before + var + after + alt_var = before + after + loc_var = nts[iaft] if strand == "-" else nts[ibef] + end_var = nts[iaft-len(ref_var)+1] if strand == "-" else nts[ibef+len(ref_var)-1] + + if loc_var + len(ref_var) - 1 != end_var and vcf: + continue + + elif variant[0] == 'Substitution': + location = chro + ":" + str(start_pos) + insert_type = variant[0] + + if loc_var + len(ref_var) - 1 != end_var and vcf: + sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n") + continue + + elif variant[0] == 'Indel': + location = chro + ":" + str(end_pos) + insert_type = variant[0] + + ref_var = ref_seq[pos-1] + delet.upper() + ref_seq[end + 1] + alt_var = ref_seq[pos-1] + insert.upper() + ref_seq[end + 1] + loc_var = start_pos - 1 + end_var = end_pos + 1 + + if loc_var + len(ref_var) - 1 != end_var and vcf: + sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n") + continue + + else: + sys.stderr.write("WARNING: This variant isn't taken account\n") + sys.stderr.write(" - variant: " + str(variant[0]) + "\n") + sys.stderr.write(" - line: " + line) + sys.exit() + + if not vcf and not table: + print_line(samp, region, location, insert_type, + removed, added, alt_exp, ref_exp, ratio, + min_cov, min_exclu, mod, query, info, + alt_seq, refSeq) + + elif vcf: + complement = str.maketrans('ATGCU', 'TACGA') + ref_var = ref_var.translate(complement)[::-1] if strand == '-' else ref_var + alt_var = alt_var.translate(complement)[::-1] if strand == '-' else alt_var + print_vcf_line(chro, loc_var, ref_var, alt_var, insert_type, + query, ratio, min_cov, removed, added.replace(" ", "")) + + elif table: + var_name = variant[0] + "/" + query if "/" not in variant[0] else variant[0] + region_mod = region + ":" + mod if mod else region + var = (var_name, region_mod) + if var not in variants: + variants[var] = 0 + variants[var] += 1 + + if samp not in samples: + samples[samp] = set() + samples[samp].add(var) + + if samp not in data: + data[samp] = {} + data[samp][var] = float(ratio) + + if table: + sorted_variants = sorted(variants, key=variants.get, reverse=True) + + sys.stdout.write("Sample") + for v in sorted_variants: + if v[0].split("/")[0] == "Reference": + sys.stdout.write("\t" + v[0]) + else: + sys.stdout.write("\t" + v[1]) + sys.stdout.write("\n") + + for s, sv in samples.items(): + sys.stdout.write(s) + for v in sorted_variants: + if v in sv: + if 'Reference' not in v[0] and (not data[s][v]): + sys.stdout.write("\t" + ".") + else: + sys.stdout.write("\t" + str(data[s][v])) + else: + sys.stdout.write("\t" + ".") + sys.stdout.write("\n") + + +def main_find_report(args, argparser): + + if args.infile.isatty() or args.target is None: + argparser.print_help() + sys.exit() + + create_report(args) + +import bitstring +import collections +import heapq + + +class Node(object): + def __init__(self, left, right): + if left.weight is None: + self.weight = None + else: + self.weight = left.weight + right.weight + self.left = left + self.right = right + self.symbol = left.symbol + + def __lt__(self, other): + # If weights are equal, sort based on symbol. We do this so that the + # huffman tree will be deterministic, which makes it easier to test. + if self.weight == other.weight: + return self.symbol < other.symbol + return self.weight < other.weight + + def add_to_code(self, bit): + for child in self.left, self.right: + child.add_to_code(bit) + + def codes(self): + out = self.left.codes() + out.update(self.right.codes()) + return out + + def read(self, stream): + if stream.read("bool"): + return self.left.read(stream) + else: + return self.right.read(stream) + + def binary(self, out=None): + out = bitstring.BitArray("0b0") + out.append(self.left.binary()) + out.append(self.right.binary()) + return out + + @staticmethod + def from_binary(stream): + try: + stream.pos + except AttributeError: + stream = bitstring.BitStream(stream) + code = bitstring.BitArray() + out = Node._from_binary(stream, code) + return out + + @staticmethod + def _from_binary(stream, code): + if stream.read("bool"): + symbol = stream.read("uint:8") + return LeafNode(symbol, code=code) + else: + return Node( + Node._from_binary(stream, code + bitstring.Bits("0b1")), + Node._from_binary(stream, code + bitstring.Bits("0b0"))) + + @staticmethod + def from_data(data, weights=None): + if weights is None: + weights = collections.Counter(data) + + heap = [] + for symbol, weight in weights.items(): + heapq.heappush(heap, LeafNode(symbol, weight)) + + while len(heap) > 1: + first = heapq.heappop(heap) + second = heapq.heappop(heap) + first.add_to_code(1) + second.add_to_code(0) + heapq.heappush(heap, Node(first, second)) + + return heap[0] + + +class LeafNode(Node): + def __init__(self, symbol, weight=None, code=None): + self.symbol = symbol + self.weight = weight + if code is not None: + self.code = code + else: + self.code = bitstring.BitArray() + + def add_to_code(self, bit): + self.code.prepend("0b%s" % bit) + + def codes(self): + return {self.symbol: self.code} + + def binary(self): + out = bitstring.BitArray("0b1") + out.append(bitstring.Bits(uint=self.symbol, length=8)) + return out + + def read(self, stream): + return self.symbol + + +def compress(data, weights=None): + """Performs huffman compression on data. + data - The data to compress (bytes). + weights - The weights for each code point. If None, we will use the + number of occurances. Should be a dict of {symbol: weight}. + + return - The compressed data, with the huffman tree prepended (bytes). + """ + tree = Node.from_data(data, weights) + codes = tree.codes() + + output = tree.binary() + for byte in data: + output.append(codes[byte]) + + # Pad the front with 0's followed by 1 so we know where the real data + # starts + pad_bits = 8 - (len(output) % 8) + if pad_bits == 0: + pad_bits = 8 + + padding = bitstring.BitArray() + for i in range(pad_bits - 1): + padding.append("0b0") + padding.append("0b1") + output.prepend(padding) + + return output.tobytes() + + +def decompress(data): + """Decompresses huffman compressed data. + data - The compressed data, with the huffman tree prepended (bytes). + + return - The decompressed data (bytes) + """ + stream = bitstring.BitStream(data) + + # Read padding + while not stream.read("bool"): + pass + + tree = Node.from_binary(stream) + out = [] + try: + while 1: + out.append(tree.read(stream)) + except bitstring.ReadError: + pass + + return bytes(out) + +# Copyright 2011 Justin Santa Barbara +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""The volumes extension.""" + +import webob +from webob import exc + +from nova.api.openstack import common +from nova.api.openstack import extensions +from nova.api.openstack import wsgi +from nova.api.openstack import xmlutil +from nova import compute +from nova import exception +from nova.objects import block_device as block_device_obj +from nova.openstack.common.gettextutils import _ +from nova.openstack.common import log as logging +from nova.openstack.common import strutils +from nova.openstack.common import uuidutils +from nova import volume + +LOG = logging.getLogger(__name__) +authorize = extensions.extension_authorizer('compute', 'volumes') + +authorize_attach = extensions.extension_authorizer('compute', + 'volume_attachments') + + +def _translate_volume_detail_view(context, vol): + """Maps keys for volumes details view.""" + + d = _translate_volume_summary_view(context, vol) + + # No additional data / lookups at the moment + + return d + + +def _translate_volume_summary_view(context, vol): + """Maps keys for volumes summary view.""" + d = {} + + d['id'] = vol['id'] + d['status'] = vol['status'] + d['size'] = vol['size'] + d['availabilityZone'] = vol['availability_zone'] + d['createdAt'] = vol['created_at'] + + if vol['attach_status'] == 'attached': + d['attachments'] = [_translate_attachment_detail_view(vol['id'], + vol['instance_uuid'], + vol['mountpoint'])] + else: + d['attachments'] = [{}] + + d['displayName'] = vol['display_name'] + d['displayDescription'] = vol['display_description'] + + if vol['volume_type_id'] and vol.get('volume_type'): + d['volumeType'] = vol['volume_type']['name'] + else: + d['volumeType'] = vol['volume_type_id'] + + d['snapshotId'] = vol['snapshot_id'] + LOG.audit(_("vol=%s"), vol, context=context) + + if vol.get('volume_metadata'): + d['metadata'] = vol.get('volume_metadata') + else: + d['metadata'] = {} + + return d + + +def make_volume(elem): + elem.set('id') + elem.set('status') + elem.set('size') + elem.set('availabilityZone') + elem.set('createdAt') + elem.set('displayName') + elem.set('displayDescription') + elem.set('volumeType') + elem.set('snapshotId') + + attachments = xmlutil.SubTemplateElement(elem, 'attachments') + attachment = xmlutil.SubTemplateElement(attachments, 'attachment', + selector='attachments') + make_attachment(attachment) + + # Attach metadata node + elem.append(common.MetadataTemplate()) + + +class VolumeTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('volume', selector='volume') + make_volume(root) + return xmlutil.MasterTemplate(root, 1) + + +class VolumesTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('volumes') + elem = xmlutil.SubTemplateElement(root, 'volume', selector='volumes') + make_volume(elem) + return xmlutil.MasterTemplate(root, 1) + + +class CommonDeserializer(wsgi.MetadataXMLDeserializer): + """Common deserializer to handle xml-formatted volume requests. + + Handles standard volume attributes as well as the optional metadata + attribute + """ + + metadata_deserializer = common.MetadataXMLDeserializer() + + def _extract_volume(self, node): + """Marshal the volume attribute of a parsed request.""" + vol = {} + volume_node = self.find_first_child_named(node, 'volume') + + attributes = ['display_name', 'display_description', 'size', + 'volume_type', 'availability_zone'] + for attr in attributes: + if volume_node.getAttribute(attr): + vol[attr] = volume_node.getAttribute(attr) + + metadata_node = self.find_first_child_named(volume_node, 'metadata') + if metadata_node is not None: + vol['metadata'] = self.extract_metadata(metadata_node) + + return vol + + +class CreateDeserializer(CommonDeserializer): + """Deserializer to handle xml-formatted create volume requests. + + Handles standard volume attributes as well as the optional metadata + attribute + """ + + def default(self, string): + """Deserialize an xml-formatted volume create request.""" + dom = xmlutil.safe_minidom_parse_string(string) + vol = self._extract_volume(dom) + return {'body': {'volume': vol}} + + +class VolumeController(wsgi.Controller): + """The Volumes API controller for the OpenStack API.""" + + def __init__(self): + self.volume_api = volume.API() + super(VolumeController, self).__init__() + + @wsgi.serializers(xml=VolumeTemplate) + def show(self, req, id): + """Return data about the given volume.""" + context = req.environ['nova.context'] + authorize(context) + + try: + vol = self.volume_api.get(context, id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + return {'volume': _translate_volume_detail_view(context, vol)} + + def delete(self, req, id): + """Delete a volume.""" + context = req.environ['nova.context'] + authorize(context) + + LOG.audit(_("Delete volume with id: %s"), id, context=context) + + try: + self.volume_api.delete(context, id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + return webob.Response(status_int=202) + + @wsgi.serializers(xml=VolumesTemplate) + def index(self, req): + """Returns a summary list of volumes.""" + return self._items(req, entity_maker=_translate_volume_summary_view) + + @wsgi.serializers(xml=VolumesTemplate) + def detail(self, req): + """Returns a detailed list of volumes.""" + return self._items(req, entity_maker=_translate_volume_detail_view) + + def _items(self, req, entity_maker): + """Returns a list of volumes, transformed through entity_maker.""" + context = req.environ['nova.context'] + authorize(context) + + volumes = self.volume_api.get_all(context) + limited_list = common.limited(volumes, req) + res = [entity_maker(context, vol) for vol in limited_list] + return {'volumes': res} + + @wsgi.serializers(xml=VolumeTemplate) + @wsgi.deserializers(xml=CreateDeserializer) + def create(self, req, body): + """Creates a new volume.""" + context = req.environ['nova.context'] + authorize(context) + + if not self.is_valid_body(body, 'volume'): + raise exc.HTTPUnprocessableEntity() + + vol = body['volume'] + + vol_type = vol.get('volume_type', None) + + metadata = vol.get('metadata', None) + + snapshot_id = vol.get('snapshot_id') + + if snapshot_id is not None: + snapshot = self.volume_api.get_snapshot(context, snapshot_id) + else: + snapshot = None + + size = vol.get('size', None) + if size is None and snapshot is not None: + size = snapshot['volume_size'] + + LOG.audit(_("Create volume of %s GB"), size, context=context) + + availability_zone = vol.get('availability_zone', None) + + try: + new_volume = self.volume_api.create( + context, + size, + vol.get('display_name'), + vol.get('display_description'), + snapshot=snapshot, + volume_type=vol_type, + metadata=metadata, + availability_zone=availability_zone + ) + except exception.InvalidInput as err: + raise exc.HTTPBadRequest(explanation=err.format_message()) + + # TODO(vish): Instance should be None at db layer instead of + # trying to lazy load, but for now we turn it into + # a dict to avoid an error. + retval = _translate_volume_detail_view(context, dict(new_volume)) + result = {'volume': retval} + + location = '%s/%s' % (req.url, new_volume['id']) + + return wsgi.ResponseObject(result, headers=dict(location=location)) + + +def _translate_attachment_detail_view(volume_id, instance_uuid, mountpoint): + """Maps keys for attachment details view.""" + + d = _translate_attachment_summary_view(volume_id, + instance_uuid, + mountpoint) + + # No additional data / lookups at the moment + return d + + +def _translate_attachment_summary_view(volume_id, instance_uuid, mountpoint): + """Maps keys for attachment summary view.""" + d = {} + + # NOTE(justinsb): We use the volume id as the id of the attachment object + d['id'] = volume_id + + d['volumeId'] = volume_id + + d['serverId'] = instance_uuid + if mountpoint: + d['device'] = mountpoint + + return d + + +def make_attachment(elem): + elem.set('id') + elem.set('serverId') + elem.set('volumeId') + elem.set('device') + + +class VolumeAttachmentTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('volumeAttachment', + selector='volumeAttachment') + make_attachment(root) + return xmlutil.MasterTemplate(root, 1) + + +class VolumeAttachmentsTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('volumeAttachments') + elem = xmlutil.SubTemplateElement(root, 'volumeAttachment', + selector='volumeAttachments') + make_attachment(elem) + return xmlutil.MasterTemplate(root, 1) + + +class VolumeAttachmentController(wsgi.Controller): + """The volume attachment API controller for the OpenStack API. + + A child resource of the server. Note that we use the volume id + as the ID of the attachment (though this is not guaranteed externally) + + """ + + def __init__(self, ext_mgr=None): + self.compute_api = compute.API() + self.volume_api = volume.API() + self.ext_mgr = ext_mgr + super(VolumeAttachmentController, self).__init__() + + @wsgi.serializers(xml=VolumeAttachmentsTemplate) + def index(self, req, server_id): + """Returns the list of volume attachments for a given instance.""" + context = req.environ['nova.context'] + authorize_attach(context, action='index') + return self._items(req, server_id, + entity_maker=_translate_attachment_summary_view) + + @wsgi.serializers(xml=VolumeAttachmentTemplate) + def show(self, req, server_id, id): + """Return data about the given volume attachment.""" + context = req.environ['nova.context'] + authorize(context) + authorize_attach(context, action='show') + + volume_id = id + try: + instance = self.compute_api.get(context, server_id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + bdms = block_device_obj.BlockDeviceMappingList.get_by_instance_uuid( + context, instance['uuid']) + + if not bdms: + msg = _("Instance %s is not attached.") % server_id + raise exc.HTTPNotFound(explanation=msg) + + assigned_mountpoint = None + + for bdm in bdms: + if bdm.volume_id == volume_id: + assigned_mountpoint = bdm.device_name + break + + if assigned_mountpoint is None: + msg = _("volume_id not found: %s") % volume_id + raise exc.HTTPNotFound(explanation=msg) + + return {'volumeAttachment': _translate_attachment_detail_view( + volume_id, + instance['uuid'], + assigned_mountpoint)} + + def _validate_volume_id(self, volume_id): + if not uuidutils.is_uuid_like(volume_id): + msg = _("Bad volumeId format: volumeId is " + "not in proper format (%s)") % volume_id + raise exc.HTTPBadRequest(explanation=msg) + + @wsgi.serializers(xml=VolumeAttachmentTemplate) + def create(self, req, server_id, body): + """Attach a volume to an instance.""" + context = req.environ['nova.context'] + authorize(context) + authorize_attach(context, action='create') + + if not self.is_valid_body(body, 'volumeAttachment'): + raise exc.HTTPUnprocessableEntity() + try: + volume_id = body['volumeAttachment']['volumeId'] + except KeyError: + msg = _("volumeId must be specified.") + raise exc.HTTPBadRequest(explanation=msg) + device = body['volumeAttachment'].get('device') + + self._validate_volume_id(volume_id) + + LOG.audit(_("Attach volume %(volume_id)s to instance %(server_id)s " + "at %(device)s"), + {'volume_id': volume_id, + 'device': device, + 'server_id': server_id}, + context=context) + + try: + instance = self.compute_api.get(context, server_id, + want_objects=True) + device = self.compute_api.attach_volume(context, instance, + volume_id, device) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + except exception.InstanceIsLocked as e: + raise exc.HTTPConflict(explanation=e.format_message()) + except exception.InstanceInvalidState as state_error: + common.raise_http_conflict_for_instance_invalid_state(state_error, + 'attach_volume') + + # The attach is async + attachment = {} + attachment['id'] = volume_id + attachment['serverId'] = server_id + attachment['volumeId'] = volume_id + attachment['device'] = device + + # NOTE(justinsb): And now, we have a problem... + # The attach is async, so there's a window in which we don't see + # the attachment (until the attachment completes). We could also + # get problems with concurrent requests. I think we need an + # attachment state, and to write to the DB here, but that's a bigger + # change. + # For now, we'll probably have to rely on libraries being smart + + # TODO(justinsb): How do I return "accepted" here? + return {'volumeAttachment': attachment} + + def update(self, req, server_id, id, body): + if (not self.ext_mgr or + not self.ext_mgr.is_loaded('os-volume-attachment-update')): + raise exc.HTTPBadRequest() + context = req.environ['nova.context'] + authorize(context) + authorize_attach(context, action='update') + + if not self.is_valid_body(body, 'volumeAttachment'): + raise exc.HTTPUnprocessableEntity() + + old_volume_id = id + old_volume = self.volume_api.get(context, old_volume_id) + + try: + new_volume_id = body['volumeAttachment']['volumeId'] + except KeyError: + msg = _("volumeId must be specified.") + raise exc.HTTPBadRequest(explanation=msg) + self._validate_volume_id(new_volume_id) + new_volume = self.volume_api.get(context, new_volume_id) + + try: + instance = self.compute_api.get(context, server_id, + want_objects=True) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + bdms = block_device_obj.BlockDeviceMappingList.get_by_instance_uuid( + context, instance.uuid) + found = False + try: + for bdm in bdms: + if bdm.volume_id != old_volume_id: + continue + try: + self.compute_api.swap_volume(context, instance, old_volume, + new_volume) + found = True + break + except exception.VolumeUnattached: + # The volume is not attached. Treat it as NotFound + # by falling through. + pass + except exception.InstanceIsLocked as e: + raise exc.HTTPConflict(explanation=e.format_message()) + except exception.InstanceInvalidState as state_error: + common.raise_http_conflict_for_instance_invalid_state(state_error, + 'swap_volume') + + if not found: + msg = _("volume_id not found: %s") % old_volume_id + raise exc.HTTPNotFound(explanation=msg) + else: + return webob.Response(status_int=202) + + def delete(self, req, server_id, id): + """Detach a volume from an instance.""" + context = req.environ['nova.context'] + authorize(context) + authorize_attach(context, action='delete') + + volume_id = id + LOG.audit(_("Detach volume %s"), volume_id, context=context) + + try: + instance = self.compute_api.get(context, server_id, + want_objects=True) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + volume = self.volume_api.get(context, volume_id) + + bdms = block_device_obj.BlockDeviceMappingList.get_by_instance_uuid( + context, instance['uuid']) + if not bdms: + msg = _("Instance %s is not attached.") % server_id + raise exc.HTTPNotFound(explanation=msg) + + found = False + try: + for bdm in bdms: + if bdm.volume_id != volume_id: + continue + if bdm.is_root: + msg = _("Can't detach root device volume") + raise exc.HTTPForbidden(explanation=msg) + try: + self.compute_api.detach_volume(context, instance, volume) + found = True + break + except exception.VolumeUnattached: + # The volume is not attached. Treat it as NotFound + # by falling through. + pass + except exception.InstanceIsLocked as e: + raise exc.HTTPConflict(explanation=e.format_message()) + except exception.InstanceInvalidState as state_error: + common.raise_http_conflict_for_instance_invalid_state(state_error, + 'detach_volume') + + if not found: + msg = _("volume_id not found: %s") % volume_id + raise exc.HTTPNotFound(explanation=msg) + else: + return webob.Response(status_int=202) + + def _items(self, req, server_id, entity_maker): + """Returns a list of attachments, transformed through entity_maker.""" + context = req.environ['nova.context'] + authorize(context) + + try: + instance = self.compute_api.get(context, server_id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + bdms = block_device_obj.BlockDeviceMappingList.get_by_instance_uuid( + context, instance['uuid']) + limited_list = common.limited(bdms, req) + results = [] + + for bdm in limited_list: + if bdm.volume_id: + results.append(entity_maker(bdm.volume_id, + bdm.instance_uuid, + bdm.device_name)) + + return {'volumeAttachments': results} + + +def _translate_snapshot_detail_view(context, vol): + """Maps keys for snapshots details view.""" + + d = _translate_snapshot_summary_view(context, vol) + + # NOTE(gagupta): No additional data / lookups at the moment + return d + + +def _translate_snapshot_summary_view(context, vol): + """Maps keys for snapshots summary view.""" + d = {} + + d['id'] = vol['id'] + d['volumeId'] = vol['volume_id'] + d['status'] = vol['status'] + # NOTE(gagupta): We map volume_size as the snapshot size + d['size'] = vol['volume_size'] + d['createdAt'] = vol['created_at'] + d['displayName'] = vol['display_name'] + d['displayDescription'] = vol['display_description'] + return d + + +def make_snapshot(elem): + elem.set('id') + elem.set('status') + elem.set('size') + elem.set('createdAt') + elem.set('displayName') + elem.set('displayDescription') + elem.set('volumeId') + + +class SnapshotTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('snapshot', selector='snapshot') + make_snapshot(root) + return xmlutil.MasterTemplate(root, 1) + + +class SnapshotsTemplate(xmlutil.TemplateBuilder): + def construct(self): + root = xmlutil.TemplateElement('snapshots') + elem = xmlutil.SubTemplateElement(root, 'snapshot', + selector='snapshots') + make_snapshot(elem) + return xmlutil.MasterTemplate(root, 1) + + +class SnapshotController(wsgi.Controller): + """The Snapshots API controller for the OpenStack API.""" + + def __init__(self): + self.volume_api = volume.API() + super(SnapshotController, self).__init__() + + @wsgi.serializers(xml=SnapshotTemplate) + def show(self, req, id): + """Return data about the given snapshot.""" + context = req.environ['nova.context'] + authorize(context) + + try: + vol = self.volume_api.get_snapshot(context, id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + + return {'snapshot': _translate_snapshot_detail_view(context, vol)} + + def delete(self, req, id): + """Delete a snapshot.""" + context = req.environ['nova.context'] + authorize(context) + + LOG.audit(_("Delete snapshot with id: %s"), id, context=context) + + try: + self.volume_api.delete_snapshot(context, id) + except exception.NotFound as e: + raise exc.HTTPNotFound(explanation=e.format_message()) + return webob.Response(status_int=202) + + @wsgi.serializers(xml=SnapshotsTemplate) + def index(self, req): + """Returns a summary list of snapshots.""" + return self._items(req, entity_maker=_translate_snapshot_summary_view) + + @wsgi.serializers(xml=SnapshotsTemplate) + def detail(self, req): + """Returns a detailed list of snapshots.""" + return self._items(req, entity_maker=_translate_snapshot_detail_view) + + def _items(self, req, entity_maker): + """Returns a list of snapshots, transformed through entity_maker.""" + context = req.environ['nova.context'] + authorize(context) + + snapshots = self.volume_api.get_all_snapshots(context) + limited_list = common.limited(snapshots, req) + res = [entity_maker(context, snapshot) for snapshot in limited_list] + return {'snapshots': res} + + @wsgi.serializers(xml=SnapshotTemplate) + def create(self, req, body): + """Creates a new snapshot.""" + context = req.environ['nova.context'] + authorize(context) + + if not self.is_valid_body(body, 'snapshot'): + raise exc.HTTPUnprocessableEntity() + + snapshot = body['snapshot'] + volume_id = snapshot['volume_id'] + + LOG.audit(_("Create snapshot from volume %s"), volume_id, + context=context) + + force = snapshot.get('force', False) + try: + force = strutils.bool_from_string(force, strict=True) + except ValueError: + msg = _("Invalid value '%s' for force.") % force + raise exception.InvalidParameterValue(err=msg) + + if force: + create_func = self.volume_api.create_snapshot_force + else: + create_func = self.volume_api.create_snapshot + + new_snapshot = create_func(context, volume_id, + snapshot.get('display_name'), + snapshot.get('display_description')) + + retval = _translate_snapshot_detail_view(context, new_snapshot) + return {'snapshot': retval} + + +class Volumes(extensions.ExtensionDescriptor): + """Volumes support.""" + + name = "Volumes" + alias = "os-volumes" + namespace = "http://docs.openstack.org/compute/ext/volumes/api/v1.1" + updated = "2011-03-25T00:00:00Z" + + def get_resources(self): + resources = [] + + # NOTE(justinsb): No way to provide singular name ('volume') + # Does this matter? + res = extensions.ResourceExtension('os-volumes', + VolumeController(), + collection_actions={'detail': 'GET'}) + resources.append(res) + + attachment_controller = VolumeAttachmentController(self.ext_mgr) + res = extensions.ResourceExtension('os-volume_attachments', + attachment_controller, + parent=dict( + member_name='server', + collection_name='servers')) + resources.append(res) + + res = extensions.ResourceExtension('os-volumes_boot', + inherits='servers') + resources.append(res) + + res = extensions.ResourceExtension('os-snapshots', + SnapshotController(), + collection_actions={'detail': 'GET'}) + resources.append(res) + + return resources + +# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" + + Survo puzzle Google CP Solver. + + http://en.wikipedia.org/wiki/Survo_Puzzle + ''' + Survo puzzle is a kind of logic puzzle presented (in April 2006) and studied + by Seppo Mustonen. The name of the puzzle is associated to Mustonen's + Survo system which is a general environment for statistical computing and + related areas. + + In a Survo puzzle the task is to fill an m * n table by integers 1,2,...,m*n + so + that each of these numbers appears only once and their row and column sums are + equal to integers given on the bottom and the right side of the table. + Often some of the integers are given readily in the table in order to + guarantee uniqueness of the solution and/or for making the task easier. + ''' + + See also + http://www.survo.fi/english/index.html + http://www.survo.fi/puzzles/index.html + + References: + Mustonen, S. (2006b). "On certain cross sum puzzles" + http://www.survo.fi/papers/puzzles.pdf + Mustonen, S. (2007b). "Enumeration of uniquely solvable open Survo puzzles." + http://www.survo.fi/papers/enum_survo_puzzles.pdf + Kimmo Vehkalahti: "Some comments on magic squares and Survo puzzles" + http://www.helsinki.fi/~kvehkala/Kimmo_Vehkalahti_Windsor.pdf + R code: http://koti.mbnet.fi/tuimala/tiedostot/survo.R + + Compare with the following models: + * Choco : http://www.hakank.org/choco/SurvoPuzzle.java + * Comet : http://www.hakank.org/comet/survo_puzzle.co + * ECLiPSE : http://www.hakank.org/eclipse/survo_puzzle.ecl + * Gecode : http://www.hakank.org/gecode/survo_puzzle.cpp + * Gecode/R: http://www.hakank.org/gecode_r/survo_puzzle.rb + * JaCoP : http://www.hakank.org/JaCoP/SurvoPuzzle.java + * MiniZinc: http://www.hakank.org/minizinc/survo_puzzle.mzn + * Tailor/Essence': http://www.hakank.org/tailor/survo_puzzle.eprime + * Zinc: http://www.hakank.org/minizinc/survo_puzzle.zinc + + + This model was created by Hakan Kjellerstrand (hakank@bonetmail.com) + Also see my other Google CP Solver models: + http://www.hakank.org/google_or_tools/ +""" + +import sys +from ortools.constraint_solver import pywrapcp + + +def main(r=0, c=0, rowsums=[], colsums=[], game=[]): + + # Create the solver. + solver = pywrapcp.Solver("Survo puzzle") + + # + # data + # + if r == 0: + r = 3 + c = 4 + rowsums = [30, 18, 30] + colsums = [27, 16, 10, 25] + game = [[0, 6, 0, 0], + [8, 0, 0, 0], + [0, 0, 3, 0]] + + print "r:", r, "c:", c + + # declare variables + x = {} + for i in range(r): + for j in range(c): + x[(i, j)] = solver.IntVar(1, r * c, "x %i %i" % (i, j)) + + # + # constraints + # + + # + # set the clues + # + for i in range(r): + for j in range(c): + if game[i][j] > 0: + solver.Add(x[i, j] == game[i][j]) + + xflat = [x[(i, j)] for i in range(r) for j in range(c)] + solver.Add(solver.AllDifferent(xflat)) + # + # calculate rowsums and colsums + # + for i in range(r): + solver.Add(rowsums[i] == solver.Sum([x[i, j] for j in range(c)])) + + for j in range(c): + solver.Add(colsums[j] == solver.Sum([x[i, j] for i in range(r)])) + + # + # solution and search + # + solution = solver.Assignment() + solution.Add([x[(i, j)] for i in range(r) for j in range(c)]) + + collector = solver.AllSolutionCollector(solution) + solver.Solve(solver.Phase(xflat, + solver.CHOOSE_FIRST_UNBOUND, + solver.ASSIGN_MIN_VALUE), + [collector]) + + num_solutions = collector.SolutionCount() + print "\nnum_solutions: ", num_solutions + if num_solutions > 0: + for s in range(num_solutions): + xval = [collector.Value(s, x[(i, j)]) + for i in range(r) for j in range(c)] + + for i in range(r): + for j in range(c): + print "%2i" % (xval[i * c + j]), + print + print + + print + print "num_solutions:", num_solutions + print "failures:", solver.Failures() + print "branches:", solver.Branches() + print "WallTime:", solver.WallTime() + + else: + print "No solutions found" + + +# +# Read a problem instance from a file +# +def read_problem(file): + f = open(file, "r") + r = int(f.readline()) + c = int(f.readline()) + rowsums = f.readline() + colsums = f.readline() + rowsums = [int(t) for t in (rowsums.rstrip()).split(",")] + colsums = [int(t) for t in (colsums.rstrip()).split(",")] + game = [] + for i in range(r): + x = f.readline() + x = [int(t) for t in (x.rstrip()).split(",")] + row = [0] * c + for j in range(c): + row[j] = int(x[j]) + game.append(row) + return [r, c, rowsums, colsums, game] + + +if __name__ == "__main__": + if len(sys.argv) > 1: + file = sys.argv[1] + [r, c, rowsums, colsums, game] = read_problem(file) + main(r, c, rowsums, colsums, game) + else: + main() + +# Copyright 2011 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Keypair management extension.""" + +import webob +import webob.exc + +from nova.api.openstack.compute.legacy_v2 import servers +from nova.api.openstack import extensions +from nova.api.openstack import wsgi +from nova.compute import api as compute_api +from nova import exception +from nova.i18n import _ + + +authorize = extensions.extension_authorizer('compute', 'keypairs') +soft_authorize = extensions.soft_extension_authorizer('compute', 'keypairs') + + +class KeypairController(object): + + """Keypair API controller for the OpenStack API.""" + def __init__(self): + self.api = compute_api.KeypairAPI() + + def _filter_keypair(self, keypair, **attrs): + clean = { + 'name': keypair.name, + 'public_key': keypair.public_key, + 'fingerprint': keypair.fingerprint, + } + for attr in attrs: + clean[attr] = keypair[attr] + return clean + + def create(self, req, body): + """Create or import keypair. + + Sending name will generate a key and return private_key + and fingerprint. + + You can send a public_key to add an existing ssh key + + params: keypair object with: + name (required) - string + public_key (optional) - string + """ + + context = req.environ['nova.context'] + authorize(context, action='create') + + try: + params = body['keypair'] + name = params['name'] + except KeyError: + msg = _("Invalid request body") + raise webob.exc.HTTPBadRequest(explanation=msg) + + try: + if 'public_key' in params: + keypair = self.api.import_key_pair(context, + context.user_id, name, + params['public_key']) + keypair = self._filter_keypair(keypair, user_id=True) + else: + keypair, private_key = self.api.create_key_pair( + context, context.user_id, name) + keypair = self._filter_keypair(keypair, user_id=True) + keypair['private_key'] = private_key + + return {'keypair': keypair} + + except exception.KeypairLimitExceeded: + msg = _("Quota exceeded, too many key pairs.") + raise webob.exc.HTTPForbidden(explanation=msg) + except exception.InvalidKeypair as exc: + raise webob.exc.HTTPBadRequest(explanation=exc.format_message()) + except exception.KeyPairExists as exc: + raise webob.exc.HTTPConflict(explanation=exc.format_message()) + + def delete(self, req, id): + """Delete a keypair with a given name.""" + context = req.environ['nova.context'] + authorize(context, action='delete') + try: + self.api.delete_key_pair(context, context.user_id, id) + except exception.KeypairNotFound as exc: + raise webob.exc.HTTPNotFound(explanation=exc.format_message()) + return webob.Response(status_int=202) + + def show(self, req, id): + """Return data for the given key name.""" + context = req.environ['nova.context'] + authorize(context, action='show') + + try: + # The return object needs to be a dict in order to pop the 'type' + # field, since it is incompatible with API version <= 2.1. + keypair = self.api.get_key_pair(context, context.user_id, id) + keypair = self._filter_keypair(keypair, created_at=True, + deleted=True, deleted_at=True, + id=True, user_id=True, + updated_at=True) + except exception.KeypairNotFound as exc: + raise webob.exc.HTTPNotFound(explanation=exc.format_message()) + return {'keypair': keypair} + + def index(self, req): + """List of keypairs for a user.""" + context = req.environ['nova.context'] + authorize(context, action='index') + key_pairs = self.api.get_key_pairs(context, context.user_id) + rval = [] + for key_pair in key_pairs: + rval.append({'keypair': self._filter_keypair(key_pair)}) + + return {'keypairs': rval} + + +class Controller(servers.Controller): + + def _add_key_name(self, req, servers): + for server in servers: + db_server = req.get_db_instance(server['id']) + # server['id'] is guaranteed to be in the cache due to + # the core API adding it in its 'show'/'detail' methods. + server['key_name'] = db_server['key_name'] + + def _show(self, req, resp_obj): + if 'server' in resp_obj.obj: + server = resp_obj.obj['server'] + self._add_key_name(req, [server]) + + @wsgi.extends + def show(self, req, resp_obj, id): + context = req.environ['nova.context'] + if soft_authorize(context): + self._show(req, resp_obj) + + @wsgi.extends + def detail(self, req, resp_obj): + context = req.environ['nova.context'] + if 'servers' in resp_obj.obj and soft_authorize(context): + servers = resp_obj.obj['servers'] + self._add_key_name(req, servers) + + +class Keypairs(extensions.ExtensionDescriptor): + """Keypair Support.""" + + name = "Keypairs" + alias = "os-keypairs" + namespace = "http://docs.openstack.org/compute/ext/keypairs/api/v1.1" + updated = "2011-08-08T00:00:00Z" + + def get_resources(self): + resources = [] + + res = extensions.ResourceExtension( + 'os-keypairs', + KeypairController()) + resources.append(res) + return resources + + def get_controller_extensions(self): + controller = Controller(self.ext_mgr) + extension = extensions.ControllerExtension(self, 'servers', controller) + return [extension] + +# -*- coding: utf-8 -*- +from __future__ import with_statement +import os + +from fabric import api as fab +from fabric.contrib.files import uncomment, comment + +from ..base import _ +from ..utils import render_template, upload_template +from ..deployment import command + + +class DjangoProject(object): + + namespace = 'django' + + python_path = '../env/bin/python' + media_path = '../media' + settings_local = './settings_local.py' + settings_local_filename = 'settings_local.py' + version = (1, 3) + + HAS_WSGI = property(lambda self: self.version >= (1, 4)) + HAS_REQUIREDEBUGFALSE = property(lambda self: self.version >= (1, 4)) + + USE_LOGGING = True + USE_SENTRY = True + USE_CELERY = property(lambda self: hasattr(fab.env, 'celery')) + USE_SPHINXSEARCH = property(lambda self: hasattr(fab.env, 'sphinxsearch')) + + # app depends + USE_SOUTH = True + # TODO get info from settings.py + USE_STATICFILES = False + + def __init__(self, project_path, + settings_local=None, + python_path=None): + self.project_path = project_path + fab.env['django_project_path'] = project_path + + if settings_local is not None: + self.settings_local = settings_local + if python_path is not None: + self.python_path = python_path + + self.settings_local_path = self.project_path + self.settings_local + path = fab.env['os'].path + fab.env['django_project_name'] = path.basename(self.project_path.rstrip('/')) + fab.env['django_python_path'] = project_path + fab.env['django_settings'] = 'settings' + fab.env['reqs_file'] = 'requirements.txt' + + def get_version(self): + return '.'.join(str(part) for part in self.version) + + def install_requirements(self, update=True): + opts = '-r' + if update: + opts = '-U %s' % opts + + with fab.cd(_('%(django_python_path)s')): + fab.run(_("../%%(virtualenv)s/bin/pip install %s" + " %%(reqs_file)s" % opts)) + + def manage(self, command): + with fab.cd(_('%(django_python_path)s')): + fab.run('%s manage.py %s' % (self.python_path, + command) + ) + # legacy + run = manage + + def syncdb(self, app=''): + self.manage('syncdb --noinput %s' % app) + + def migrate(self, app=''): + self.manage('migrate %s' % app) + + def init_debug_server(self): + self.manage('init_debug_server') + + def runserver(self, host="0.0.0.0", port=8080): + self.manage('runserver %s:%s' % (host, port)) + + def createsuperuser(self): + self.manage('createsuperuser') + + @command(same_name=True) + def update_settings_local(self): + settings_local_path = fab.env.os.path.join(self.project_path, + self.settings_local_filename) + + context = fab.env + + names = ['logging'] + if self.USE_SENTRY: + names.append('sentry') + if self.USE_SPHINXSEARCH: + names.append('sphinxsearch') + if self.USE_CELERY: + names.append('celery') + + for name in names: + if getattr(self, 'USE_' + name.upper(), False): + text = render_template('django/settings_%s.py' % name, + context=context, + use_jinja=True) + context['settings_%s' % name] = text + else: + context['settings_%s' % name] = '' + + upload_template(self.settings_local, + settings_local_path, + context, use_jinja=True) + + def update_code(self): + with fab.cd(self.project_path): + fab.run('svn up') + + def reload(self): + self.update_settings_local() + + if self.version >= (1, 7) or self.USE_SOUTH: + self.migrate() + else: + self.syncdb() + + if self.USE_STATICFILES: + self.deploy_static() + + def set_maintanance_mode(self, on=True): + settings_local_path = self.project_path + 'settings_local.py' + + if on: + uncomment(settings_local_path, r'MAINTENANCE_MODE.*') + else: + comment(settings_local_path, r'MAINTENANCE_MODE.*') + + @command + def deploy_static(self): + self.manage('collectstatic -v0 --noinput') + + +class Django13(DjangoProject): + pass + + +class Django13ChangeProjectDir(Django13): + + def __init__(self, *args, **kwargs): + super(Django13ChangeProjectDir, self).__init__(*args, **kwargs) + + path = fab.env['os'].path + python_path = path.split(self.project_path.rstrip('/'))[0] + fab.env['django_python_path'] = python_path + fab.env['django_settings'] = '%s.settings' % fab.env['django_project_name'] + + +class Django14(DjangoProject): + version = (1, 4) + + def __init__(self, *args, **kwargs): + super(Django14, self).__init__(*args, **kwargs) + + path = fab.env['os'].path + python_path = path.split(self.project_path.rstrip('/'))[0] + fab.env['django_python_path'] = python_path + fab.env['django_settings'] = '%s.settings' % fab.env['django_project_name'] + + +class Django15(Django14): + version = (1, 5) + + +class Django16(Django15): + version = (1, 6) + + +class Django17(Django16): + version = (1, 7) + + USE_SOUTH = False + + def migrate(self, app=''): + self.manage('migrate --no-color %s' % app) + + +class Django18(Django17): + version = (1, 8) + + +from django.contrib.admin.views.main import SEARCH_VAR + +try: + from threading import local +except ImportError: + from django.utils._threading_local import local + +_thread_locals = local() + +def set_current_request(request): + _thread_locals.request = request + +def get_current_request(): + """ returns the request object for this thread """ + return getattr(_thread_locals, "request", None) + +# Sometimes we need to pass around parameters between standard ModelAdmin methods, +# and since the methods don't have these parameters, we are passing them through a +# dictionary in the request object. This is hackish, but there currently is no +# better solution. +def set_request_attr(request, attr, value): + if not hasattr(request, 'media_tree'): + request.media_tree = {} + request.media_tree[attr] = value + +def get_request_attr(request, attr, default=None): + if not hasattr(request, 'media_tree'): + return default + return request.media_tree.get(attr, default) + +def is_search_request(request): + return request.GET.get(SEARCH_VAR, None) != None +""" +Set operations for 1D numeric arrays based on sorting. + +Contains: + ediff1d, + unique1d, + intersect1d, + intersect1d_nu, + setxor1d, + setmember1d, + union1d, + setdiff1d + +All functions work best with integer numerical arrays on input (e.g. indices). +For floating point arrays, innacurate results may appear due to usual round-off +and floating point comparison issues. + +Except unique1d, union1d and intersect1d_nu, all functions expect inputs with +unique elements. Speed could be gained in some operations by an implementaion of +sort(), that can provide directly the permutation vectors, avoiding thus calls +to argsort(). + +Run _test_unique1d_speed() to compare performance of numpy.unique1d() and +numpy.unique() - it should be the same. + +To do: Optionally return indices analogously to unique1d for all functions. + +Author: Robert Cimrman + +created: 01.11.2005 +last revision: 07.01.2007 +""" +__all__ = ['ediff1d', 'unique1d', 'intersect1d', 'intersect1d_nu', 'setxor1d', + 'setmember1d', 'union1d', 'setdiff1d'] + +import time +import numpy as nm + +def ediff1d(ary, to_end = None, to_begin = None): + """The differences between consecutive elements of an array, possibly with + prefixed and/or appended values. + + :Parameters: + - `ary` : array + This array will be flattened before the difference is taken. + - `to_end` : number, optional + If provided, this number will be tacked onto the end of the returned + differences. + - `to_begin` : number, optional + If provided, this number will be taked onto the beginning of the + returned differences. + + :Returns: + - `ed` : array + The differences. Loosely, this will be (ary[1:] - ary[:-1]). + """ + ary = nm.asarray(ary).flat + ed = ary[1:] - ary[:-1] + arrays = [ed] + if to_begin is not None: + arrays.insert(0, to_begin) + if to_end is not None: + arrays.append(to_end) + + if len(arrays) != 1: + # We'll save ourselves a copy of a potentially large array in the common + # case where neither to_begin or to_end was given. + ed = nm.hstack(arrays) + + return ed + +def unique1d(ar1, return_index=False): + """Find the unique elements of 1D array. + + Most of the other array set operations operate on the unique arrays + generated by this function. + + :Parameters: + - `ar1` : array + This array will be flattened if it is not already 1D. + - `return_index` : bool, optional + If True, also return the indices against ar1 that result in the unique + array. + + :Returns: + - `unique` : array + The unique values. + - `unique_indices` : int array, optional + The indices of the unique values. Only provided if return_index is True. + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + ar = nm.asarray(ar1).flatten() + if ar.size == 0: + if return_index: return nm.empty(0, nm.bool), ar + else: return ar + + if return_index: + perm = ar.argsort() + aux = ar[perm] + flag = nm.concatenate( ([True], aux[1:] != aux[:-1]) ) + return perm[flag], aux[flag] + + else: + ar.sort() + flag = nm.concatenate( ([True], ar[1:] != ar[:-1]) ) + return ar[flag] + +def intersect1d( ar1, ar2 ): + """Intersection of 1D arrays with unique elements. + + Use unique1d() to generate arrays with only unique elements to use as inputs + to this function. Alternatively, use intersect1d_nu() which will find the + unique values for you. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `intersection` : array + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + aux = nm.concatenate((ar1,ar2)) + aux.sort() + return aux[aux[1:] == aux[:-1]] + +def intersect1d_nu( ar1, ar2 ): + """Intersection of 1D arrays with any elements. + + The input arrays do not have unique elements like intersect1d() requires. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `intersection` : array + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + # Might be faster than unique1d( intersect1d( ar1, ar2 ) )? + aux = nm.concatenate((unique1d(ar1), unique1d(ar2))) + aux.sort() + return aux[aux[1:] == aux[:-1]] + +def setxor1d( ar1, ar2 ): + """Set exclusive-or of 1D arrays with unique elements. + + Use unique1d() to generate arrays with only unique elements to use as inputs + to this function. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `xor` : array + The values that are only in one, but not both, of the input arrays. + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + aux = nm.concatenate((ar1, ar2)) + if aux.size == 0: + return aux + + aux.sort() +# flag = ediff1d( aux, to_end = 1, to_begin = 1 ) == 0 + flag = nm.concatenate( ([True], aux[1:] != aux[:-1], [True] ) ) +# flag2 = ediff1d( flag ) == 0 + flag2 = flag[1:] == flag[:-1] + return aux[flag2] + +def setmember1d( ar1, ar2 ): + """Return a boolean array of shape of ar1 containing True where the elements + of ar1 are in ar2 and False otherwise. + + Use unique1d() to generate arrays with only unique elements to use as inputs + to this function. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `mask` : bool array + The values ar1[mask] are in ar2. + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + zlike = nm.zeros_like + ar = nm.concatenate( (ar1, ar2 ) ) + tt = nm.concatenate( (zlike( ar1 ), zlike( ar2 ) + 1) ) + # We need this to be a stable sort, so always use 'mergesort' here. The + # values from the first array should always come before the values from the + # second array. + perm = ar.argsort(kind='mergesort') + aux = ar[perm] + aux2 = tt[perm] +# flag = ediff1d( aux, 1 ) == 0 + flag = nm.concatenate( (aux[1:] == aux[:-1], [False] ) ) + + ii = nm.where( flag * aux2 )[0] + aux = perm[ii+1] + perm[ii+1] = perm[ii] + perm[ii] = aux + + indx = perm.argsort(kind='mergesort')[:len( ar1 )] + + return flag[indx] + +def union1d( ar1, ar2 ): + """Union of 1D arrays with unique elements. + + Use unique1d() to generate arrays with only unique elements to use as inputs + to this function. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `union` : array + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + return unique1d( nm.concatenate( (ar1, ar2) ) ) + +def setdiff1d( ar1, ar2 ): + """Set difference of 1D arrays with unique elements. + + Use unique1d() to generate arrays with only unique elements to use as inputs + to this function. + + :Parameters: + - `ar1` : array + - `ar2` : array + + :Returns: + - `difference` : array + The values in ar1 that are not in ar2. + + :See also: + numpy.lib.arraysetops has a number of other functions for performing set + operations on arrays. + """ + aux = setmember1d(ar1,ar2) + if aux.size == 0: + return aux + else: + return nm.asarray(ar1)[aux == 0] + +def _test_unique1d_speed( plot_results = False ): +# exponents = nm.linspace( 2, 7, 9 ) + exponents = nm.linspace( 2, 7, 9 ) + ratios = [] + nItems = [] + dt1s = [] + dt2s = [] + for ii in exponents: + + nItem = 10 ** ii + print 'using %d items:' % nItem + a = nm.fix( nItem / 10 * nm.random.random( nItem ) ) + + print 'unique:' + tt = time.clock() + b = nm.unique( a ) + dt1 = time.clock() - tt + print dt1 + + print 'unique1d:' + tt = time.clock() + c = unique1d( a ) + dt2 = time.clock() - tt + print dt2 + + + if dt1 < 1e-8: + ratio = 'ND' + else: + ratio = dt2 / dt1 + print 'ratio:', ratio + print 'nUnique: %d == %d\n' % (len( b ), len( c )) + + nItems.append( nItem ) + ratios.append( ratio ) + dt1s.append( dt1 ) + dt2s.append( dt2 ) + + assert nm.alltrue( b == c ) + + print nItems + print dt1s + print dt2s + print ratios + + if plot_results: + import pylab + + def plotMe( fig, fun, nItems, dt1s, dt2s ): + pylab.figure( fig ) + fun( nItems, dt1s, 'g-o', linewidth = 2, markersize = 8 ) + fun( nItems, dt2s, 'b-x', linewidth = 2, markersize = 8 ) + pylab.legend( ('unique', 'unique1d' ) ) + pylab.xlabel( 'nItem' ) + pylab.ylabel( 'time [s]' ) + + plotMe( 1, pylab.loglog, nItems, dt1s, dt2s ) + plotMe( 2, pylab.plot, nItems, dt1s, dt2s ) + pylab.show() + +if (__name__ == '__main__'): + _test_unique1d_speed( plot_results = True ) + +# -*- coding: utf-8 -*- +""" + flask.blueprints + ~~~~~~~~~~~~~~~~ + + Blueprints are the recommended way to implement larger or more + pluggable applications in Flask 0.7 and later. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +from functools import update_wrapper + +from .helpers import _PackageBoundObject, _endpoint_from_view_func + + +class BlueprintSetupState(object): + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__(self, blueprint, app, options, first_registration): + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get('subdomain') + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, `None` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get('url_prefix') + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get('url_defaults', ())) + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix: + rule = self.url_prefix + rule + options.setdefault('subdomain', self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + defaults = self.url_defaults + if 'defaults' in options: + defaults = dict(defaults, **options.pop('defaults')) + self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), + view_func, defaults=defaults, **options) + + +class Blueprint(_PackageBoundObject): + """Represents a blueprint. A blueprint is an object that records + functions that will be called with the + :class:`~flask.blueprint.BlueprintSetupState` later to register functions + or other things on the main application. See :ref:`blueprints` for more + information. + + .. versionadded:: 0.7 + """ + + warn_on_modifications = False + _got_registered_once = False + + def __init__(self, name, import_name, static_folder=None, + static_url_path=None, template_folder=None, + url_prefix=None, subdomain=None, url_defaults=None): + _PackageBoundObject.__init__(self, import_name, template_folder) + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.static_folder = static_folder + self.static_url_path = static_url_path + self.deferred_functions = [] + self.view_functions = {} + if url_defaults is None: + url_defaults = {} + self.url_values_defaults = url_defaults + + def record(self, func): + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + if self._got_registered_once and self.warn_on_modifications: + from warnings import warn + warn(Warning('The blueprint was already registered once ' + 'but is getting modified now. These changes ' + 'will not show up.')) + self.deferred_functions.append(func) + + def record_once(self, func): + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + def wrapper(state): + if state.first_registration: + func(state) + return self.record(update_wrapper(wrapper, func)) + + def make_setup_state(self, app, options, first_registration=False): + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + def register(self, app, options, first_registration=False): + """Called by :meth:`Flask.register_blueprint` to register a blueprint + on the application. This can be overridden to customize the register + behavior. Keyword arguments from + :func:`~flask.Flask.register_blueprint` are directly forwarded to this + method in the `options` dictionary. + """ + self._got_registered_once = True + state = self.make_setup_state(app, options, first_registration) + if self.has_static_folder: + state.add_url_rule(self.static_url_path + '/', + view_func=self.send_static_file, + endpoint='static') + + for deferred in self.deferred_functions: + deferred(state) + + def route(self, rule, **options): + """Like :meth:`Flask.route` but for a blueprint. The endpoint for the + :func:`url_for` function is prefixed with the name of the blueprint. + """ + def decorator(f): + endpoint = options.pop("endpoint", f.__name__) + self.add_url_rule(rule, endpoint, f, **options) + return f + return decorator + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint: + assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's" + self.record(lambda s: + s.add_url_rule(rule, endpoint, view_func, **options)) + + def endpoint(self, endpoint): + """Like :meth:`Flask.endpoint` but for a blueprint. This does not + prefix the endpoint with the blueprint name, this has to be done + explicitly by the user of this method. If the endpoint is prefixed + with a `.` it will be registered to the current blueprint, otherwise + it's an application independent endpoint. + """ + def decorator(f): + def register_endpoint(state): + state.app.view_functions[endpoint] = f + self.record_once(register_endpoint) + return f + return decorator + + def app_template_filter(self, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_filter(f, name=name) + return f + return decorator + + def add_app_template_filter(self, f, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.filters[name or f.__name__] = f + self.record_once(register_template) + + def app_template_test(self, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_test(f, name=name) + return f + return decorator + + def add_app_template_test(self, f, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.tests[name or f.__name__] = f + self.record_once(register_template) + + def app_template_global(self, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_global(f, name=name) + return f + return decorator + + def add_app_template_global(self, f, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.globals[name or f.__name__] = f + self.record_once(register_template) + + def before_request(self, f): + """Like :meth:`Flask.before_request` but for a blueprint. This function + is only executed before each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def before_app_request(self, f): + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(None, []).append(f)) + return f + + def before_app_first_request(self, f): + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + """ + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + def after_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. This function + is only executed after each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def after_app_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(None, []).append(f)) + return f + + def teardown_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. This + function is only executed when tearing down requests handled by a + function of that blueprint. Teardown request functions are executed + when the request context is popped, even when no actual request was + performed. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def teardown_app_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(None, []).append(f)) + return f + + def context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. This + function is only executed for requests handled by a blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(self.name, []).append(f)) + return f + + def app_context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(None, []).append(f)) + return f + + def app_errorhandler(self, code): + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + def decorator(f): + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + return decorator + + def url_value_preprocessor(self, f): + """Registers a function as URL value preprocessor for this + blueprint. It's called before the view functions are called and + can modify the url values provided. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(self.name, []).append(f)) + return f + + def url_defaults(self, f): + """Callback function for URL defaults for this blueprint. It's called + with the endpoint and values and should update the values passed + in place. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(self.name, []).append(f)) + return f + + def app_url_value_preprocessor(self, f): + """Same as :meth:`url_value_preprocessor` but application wide. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(None, []).append(f)) + return f + + def app_url_defaults(self, f): + """Same as :meth:`url_defaults` but application wide. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(None, []).append(f)) + return f + + def errorhandler(self, code_or_exception): + """Registers an error handler that becomes active for this blueprint + only. Please be aware that routing does not happen local to a + blueprint so an error handler for 404 usually is not handled by + a blueprint unless it is caused inside a view function. Another + special case is the 500 internal server error which is always looked + up from the application. + + Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator + of the :class:`~flask.Flask` object. + """ + def decorator(f): + self.record_once(lambda s: s.app._register_error_handler( + self.name, code_or_exception, f)) + return f + return decorator + +# encoding: utf-8 +"""Tests for IPython.core.ultratb +""" +import io +import sys +import os.path +from textwrap import dedent +import traceback +import unittest + +try: + from unittest import mock +except ImportError: + import mock # Python 2 + +from ..ultratb import ColorTB, VerboseTB, find_recursion + + +from IPython.testing import tools as tt +from IPython.testing.decorators import onlyif_unicode_paths +from IPython.utils.syspathcontext import prepended_to_syspath +from IPython.utils.tempdir import TemporaryDirectory +from IPython.utils.py3compat import PY3 + +ip = get_ipython() + +file_1 = """1 +2 +3 +def f(): + 1/0 +""" + +file_2 = """def f(): + 1/0 +""" + +class ChangedPyFileTest(unittest.TestCase): + def test_changing_py_file(self): + """Traceback produced if the line where the error occurred is missing? + + https://github.com/ipython/ipython/issues/1456 + """ + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, "w") as f: + f.write(file_1) + + with prepended_to_syspath(td): + ip.run_cell("import foo") + + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + + # Make the file shorter, so the line of the error is missing. + with open(fname, "w") as f: + f.write(file_2) + + # For some reason, this was failing on the *second* call after + # changing the file, so we call f() twice. + with tt.AssertNotPrints("Internal Python error", channel='stderr'): + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + +iso_8859_5_file = u'''# coding: iso-8859-5 + +def fail(): + """дбИЖ""" + 1/0 # дбИЖ +''' + +class NonAsciiTest(unittest.TestCase): + @onlyif_unicode_paths + def test_nonascii_path(self): + # Non-ascii directory name as well. + with TemporaryDirectory(suffix=u'é') as td: + fname = os.path.join(td, u"fooé.py") + with open(fname, "w") as f: + f.write(file_1) + + with prepended_to_syspath(td): + ip.run_cell("import foo") + + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + + def test_iso8859_5(self): + with TemporaryDirectory() as td: + fname = os.path.join(td, 'dfghjkl.py') + + with io.open(fname, 'w', encoding='iso-8859-5') as f: + f.write(iso_8859_5_file) + + with prepended_to_syspath(td): + ip.run_cell("from dfghjkl import fail") + + with tt.AssertPrints("ZeroDivisionError"): + with tt.AssertPrints(u'дбИЖ', suppress=False): + ip.run_cell('fail()') + + def test_nonascii_msg(self): + cell = u"raise Exception('é')" + expected = u"Exception('é')" + ip.run_cell("%xmode plain") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + ip.run_cell("%xmode verbose") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + ip.run_cell("%xmode context") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + +class NestedGenExprTestCase(unittest.TestCase): + """ + Regression test for the following issues: + https://github.com/ipython/ipython/issues/8293 + https://github.com/ipython/ipython/issues/8205 + """ + def test_nested_genexpr(self): + code = dedent( + """\ + class SpecificException(Exception): + pass + + def foo(x): + raise SpecificException("Success!") + + sum(sum(foo(x) for _ in [0]) for x in [0]) + """ + ) + with tt.AssertPrints('SpecificException: Success!', suppress=False): + ip.run_cell(code) + + +indentationerror_file = """if True: +zoon() +""" + +class IndentationErrorTest(unittest.TestCase): + def test_indentationerror_shows_line(self): + # See issue gh-2398 + with tt.AssertPrints("IndentationError"): + with tt.AssertPrints("zoon()", suppress=False): + ip.run_cell(indentationerror_file) + + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, "w") as f: + f.write(indentationerror_file) + + with tt.AssertPrints("IndentationError"): + with tt.AssertPrints("zoon()", suppress=False): + ip.magic('run %s' % fname) + +se_file_1 = """1 +2 +7/ +""" + +se_file_2 = """7/ +""" + +class SyntaxErrorTest(unittest.TestCase): + def test_syntaxerror_without_lineno(self): + with tt.AssertNotPrints("TypeError"): + with tt.AssertPrints("line unknown"): + ip.run_cell("raise SyntaxError()") + + def test_changing_py_file(self): + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, 'w') as f: + f.write(se_file_1) + + with tt.AssertPrints(["7/", "SyntaxError"]): + ip.magic("run " + fname) + + # Modify the file + with open(fname, 'w') as f: + f.write(se_file_2) + + # The SyntaxError should point to the correct line + with tt.AssertPrints(["7/", "SyntaxError"]): + ip.magic("run " + fname) + + def test_non_syntaxerror(self): + # SyntaxTB may be called with an error other than a SyntaxError + # See e.g. gh-4361 + try: + raise ValueError('QWERTY') + except ValueError: + with tt.AssertPrints('QWERTY'): + ip.showsyntaxerror() + + +class Python3ChainedExceptionsTest(unittest.TestCase): + DIRECT_CAUSE_ERROR_CODE = """ +try: + x = 1 + 2 + print(not_defined_here) +except Exception as e: + x += 55 + x - 1 + y = {} + raise KeyError('uh') from e + """ + + EXCEPTION_DURING_HANDLING_CODE = """ +try: + x = 1 + 2 + print(not_defined_here) +except Exception as e: + x += 55 + x - 1 + y = {} + raise KeyError('uh') + """ + + SUPPRESS_CHAINING_CODE = """ +try: + 1/0 +except Exception: + raise ValueError("Yikes") from None + """ + + def test_direct_cause_error(self): + if PY3: + with tt.AssertPrints(["KeyError", "NameError", "direct cause"]): + ip.run_cell(self.DIRECT_CAUSE_ERROR_CODE) + + def test_exception_during_handling_error(self): + if PY3: + with tt.AssertPrints(["KeyError", "NameError", "During handling"]): + ip.run_cell(self.EXCEPTION_DURING_HANDLING_CODE) + + def test_suppress_exception_chaining(self): + if PY3: + with tt.AssertNotPrints("ZeroDivisionError"), \ + tt.AssertPrints("ValueError", suppress=False): + ip.run_cell(self.SUPPRESS_CHAINING_CODE) + + +class RecursionTest(unittest.TestCase): + DEFINITIONS = """ +def non_recurs(): + 1/0 + +def r1(): + r1() + +def r3a(): + r3b() + +def r3b(): + r3c() + +def r3c(): + r3a() + +def r3o1(): + r3a() + +def r3o2(): + r3o1() +""" + def setUp(self): + ip.run_cell(self.DEFINITIONS) + + def test_no_recursion(self): + with tt.AssertNotPrints("frames repeated"): + ip.run_cell("non_recurs()") + + def test_recursion_one_frame(self): + with tt.AssertPrints("1 frames repeated"): + ip.run_cell("r1()") + + def test_recursion_three_frames(self): + with tt.AssertPrints("3 frames repeated"): + ip.run_cell("r3o2()") + + def test_find_recursion(self): + captured = [] + def capture_exc(*args, **kwargs): + captured.append(sys.exc_info()) + with mock.patch.object(ip, 'showtraceback', capture_exc): + ip.run_cell("r3o2()") + + self.assertEqual(len(captured), 1) + etype, evalue, tb = captured[0] + self.assertIn("recursion", str(evalue)) + + records = ip.InteractiveTB.get_records(tb, 3, ip.InteractiveTB.tb_offset) + for r in records[:10]: + print(r[1:4]) + + # The outermost frames should be: + # 0: the 'cell' that was running when the exception came up + # 1: r3o2() + # 2: r3o1() + # 3: r3a() + # Then repeating r3b, r3c, r3a + last_unique, repeat_length = find_recursion(etype, evalue, records) + self.assertEqual(last_unique, 2) + self.assertEqual(repeat_length, 3) + + +#---------------------------------------------------------------------------- + +# module testing (minimal) +if sys.version_info > (3,): + def test_handlers(): + def spam(c, d_e): + (d, e) = d_e + x = c + d + y = c * d + foo(x, y) + + def foo(a, b, bar=1): + eggs(a, b + bar) + + def eggs(f, g, z=globals()): + h = f + g + i = f - g + return h / i + + buff = io.StringIO() + + buff.write('') + buff.write('*** Before ***') + try: + buff.write(spam(1, (2, 3))) + except: + traceback.print_exc(file=buff) + + handler = ColorTB(ostream=buff) + buff.write('*** ColorTB ***') + try: + buff.write(spam(1, (2, 3))) + except: + handler(*sys.exc_info()) + buff.write('') + + handler = VerboseTB(ostream=buff) + buff.write('*** VerboseTB ***') + try: + buff.write(spam(1, (2, 3))) + except: + handler(*sys.exc_info()) + buff.write('') + + +# Mantid Repository : https://github.com/mantidproject/mantid +# +# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI, +# NScD Oak Ridge National Laboratory, European Spallation Source +# & Institut Laue - Langevin +# SPDX - License - Identifier: GPL - 3.0 + +from __future__ import (absolute_import, division, print_function) +#pylint: disable=invalid-name,R0912 +""" + Classes for each reduction step. Those are kept separately + from the the interface class so that the DgsReduction class could + be used independently of the interface implementation +""" +import os +from mantid.kernel import Logger +from mantid.api import FileFinder +from reduction_gui.reduction.scripter import BaseReductionScripter + + +class DiffractionReductionScripter(BaseReductionScripter): + """ Organizes the set of reduction parameters that will be used to + create a reduction script. Parameters are organized by groups that + will each have their own UI representation. + + Items in dictionary: + 1. facility_name; + 2. instrument_name + 3. _output_directory + 4. _observers + """ + TOPLEVEL_WORKFLOWALG = "SNSPowderReductionPlus" + WIDTH_END = "".join([" " for i in range(len(TOPLEVEL_WORKFLOWALG))]) + WIDTH = WIDTH_END + " " + AUTOSCRIPTNAME = 'SNSPowderReductionScript_AutoSave.py' + + def __init__(self, name, facility='SNS'): + """ Initialization + """ + # Call base class + super(DiffractionReductionScripter, self).__init__(name=name, facility=facility) + + # Find whether there is stored setup XMLs + homedir = os.path.expanduser("~") + mantidconfigdir = os.path.join(homedir, ".mantid") + self.configDir = mantidconfigdir + + # create configuration dir if it has not been + if os.path.exists(self.configDir) is False: + os.makedirs(self.configDir) + + # Information output + if self.facility_name is False: + self.facility_name = 'SNS' + dbmsg = '[SNS Powder Reduction] Facility = %s, Instrument = %s\n' \ + 'Auto-save Directory %s' % (self.facility_name, self.instrument_name, + mantidconfigdir) + Logger("DiffractionReductionScripter").debug(str(dbmsg)) + + return + + def to_script(self, file_name=None): + """ Generate reduction script via observers and + (1) save the script to disk and (2) save the reduction setup to disk. + + Arguments: + - file_name: name of the file to write the script to + """ + # Collect partial scripters from observers + paramdict = {} + for observer in self._observers: + obstate = observer.state() + self.parseTabSetupScript(observer._subject.__class__.__name__, obstate, paramdict) + # ENDFOR + + # Construct python commands + script = self.constructPythonScript(paramdict) + + # Save script to disk + if file_name is None: + file_name = os.path.join(self.configDir, DiffractionReductionScripter.AUTOSCRIPTNAME) + + try: + f = open(file_name, 'w') + f.write(script) + f.close() + except IOError as e: + print ("Unable to save script to file. Reason: %s." % (str(e))) + + # Export XML file + autosavexmlfname = os.path.join(self.configDir, "snspowderreduction.xml") + self.to_xml(autosavexmlfname) + + # Information output + wbuf = "Reduction script: (script is saved to %s; setup is saved to %s. \n" % ( + file_name, autosavexmlfname) + wbuf += script + wbuf += "\n========== End of Script ===========" + print (wbuf) + + return script + + def to_xml(self, file_name=None): + """ Extending base class to_xml + """ + BaseReductionScripter.to_xml(self, file_name) + + return + + def parseTabSetupScript(self, tabsetuptype, setupscript, paramdict): + """ Parse script returned from tab setup + + @param setupscript : object of SetupScript for this tab/observer + """ + # print "ClassName: %s. Type %s" % (tabsetuptype, type(setupscript)) + + if setupscript is None: + return + + else: + paramdict[tabsetuptype] = {} + terms = str(setupscript).split("\n") + for item in terms: + item = item.strip() + if item == "": + continue + + item = item.rstrip(",") + subterms = item.split("=", 1) + key = subterms[0].strip() + value = subterms[1].strip().strip("\"").strip("'") + paramdict[tabsetuptype][key] = value + # ENDFOR + # ENDIF + + return + + def constructPythonScript(self, paramdict): + """ Construct python script + """ + # 1. Obtain all information + runsetupdict = paramdict["RunSetupWidget"] + advsetupdict = paramdict["AdvancedSetupWidget"] + filterdict = paramdict["FilterSetupWidget"] + + # 2. Obtain some information + datafilenames = self.getDataFileNames(runsetupdict, advsetupdict) + if len(datafilenames) == 0: + raise NotImplementedError("RunNumber cannot be neglected. ") + + dofilter = self.doFiltering(filterdict) + + # 3. Header + script = "from mantid.simpleapi import *\n" + script += "config['default.facility']=\"%s\"\n" % self.facility_name + script += "\n" + + if dofilter: + # a) Construct python script with generating filters + for runtuple in datafilenames: + + runnumber = runtuple[0] + datafilename = runtuple[1] + + # print "Working on run ", str(runnumber), " in file ", datafilename + + # i. Load meta data only + metadatawsname = str(datafilename.split(".")[0]+"_meta") + splitwsname = str(datafilename.split(".")[0] + "_splitters") + splitinfowsname = str(datafilename.split(".")[0] + "_splitinfo") + + script += "# Load data's log only\n" + script += "Load(\n" + script += "{}Filename = '{}',\n".format(DiffractionReductionScripter.WIDTH, datafilename) + script += "{}OutputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, metadatawsname) + script += "{}MetaDataOnly = True)\n".format(DiffractionReductionScripter.WIDTH) + + script += "\n" + + # ii. Generate event filters + script += "# Construct the event filters\n" + script += "GenerateEventsFilter(\n" + script += "{}InputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, metadatawsname) + script += "{}OutputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, splitwsname) + script += "{}InformationWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, splitinfowsname) + if filterdict["FilterByTimeMin"] != "": + script += "{}StartTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["FilterByTimeMin"]) + if filterdict["FilterByTimeMax"] != "": + script += "{}StopTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["FilterByTimeMax"]) + + if filterdict["FilterType"] == "ByTime": + # Filter by time + script += "{}TimeInterval = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LengthOfTimeInterval"]) + script += "{}UnitOfTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["UnitOfTime"]) + script += "{}LogName = '',\n".format(DiffractionReductionScripter.WIDTH) # intentionally empty + + elif filterdict["FilterType"] == "ByLogValue": + # Filter by log value + script += "{}LogName = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LogName"]) + if filterdict["MinimumLogValue"] != "": + script += "{}MinimumLogValue = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["MinimumLogValue"]) + if filterdict["MaximumLogValue"] != "": + script += "{}MaximumLogValue = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["MaximumLogValue"]) + script += "{}FilterLogValueByChangingDirection = '{}',\n".format(DiffractionReductionScripter.WIDTH, + filterdict["FilterLogValueByChangingDirection"]) + if filterdict["LogValueInterval"] != "": + # Filter by log value interval + script += "{}LogValueInterval = '{}',\n".format(DiffractionReductionScripter.WIDTH, + filterdict["LogValueInterval"]) + script += "{}LogBoundary = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LogBoundary"]) + if filterdict["TimeTolerance"] != "": + script += "{}TimeTolerance = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["TimeTolerance"]) + if filterdict["LogValueTolerance"] != "": + script += "{}LogValueTolerance = '{}',\n".format(DiffractionReductionScripter.WIDTH, + filterdict["LogValueTolerance"]) + # ENDIF + script += ")\n" + + # iii. Data reduction + script += self.buildPowderDataReductionScript(runsetupdict, advsetupdict, runnumber, splitwsname, splitinfowsname) + + # ENDFOR data file names + + else: + # b) Construct python scrpt without generating filters + script += self.buildPowderDataReductionScript(runsetupdict, advsetupdict) + + # ENDIF : do filter + + print ("Script and Save XML to default.") + + return script + + def doFiltering(self, filterdict): + """ Check filter dictionary to determine whether filtering is required. + """ + dofilter = False + if filterdict["FilterByTimeMin"] != "": + dofilter = True + # print "Yes! Min Generate Filter will be called!" + + if filterdict["FilterByTimeMax"] != "": + dofilter = True + # print "Yes! Max Generate Filter will be called!" + + if filterdict["FilterType"] != "NoFilter": + dofilter = True + # print "Yes! FilterType Generate Filter will be called!" + + return dofilter + + def getDataFileNames(self, runsetupdict, advsetupdict): + """ Obtain the data file names (run names + SUFFIX) + + Return: list of files + """ + + runnumbers_str = str(runsetupdict["RunNumber"]) + if runnumbers_str.count(':') > 0: + runnumbers_str = runnumbers_str.replace(':', '-') + runnumbers_str = FileFinder.findRuns('{}{}'.format(self.instrument_name, runnumbers_str)) + runnumbers_str = [os.path.split(filename)[-1] for filename in runnumbers_str] + + # create an integer version + runnumbers = [] + for filename in runnumbers_str: + for extension in ['_event.nxs', '.nxs.h5']: + filename = filename.replace(extension, '') + runnumber = filename.split('_')[-1] + runnumbers.append(int(runnumber)) + + # put together the output + datafilenames = [] + for (filename, runnumber) in zip(runnumbers_str, runnumbers): + datafilenames.append((runnumber, filename)) + + return datafilenames + + def buildPowderDataReductionScript(self, runsetupdict, advsetupdict, + runnumber=None, splitwsname=None, + splitinfowsname=None): + """ Build the script to call SNSPowderReduction() + """ + script = 'SNSPowderReduction(\n' + + # 1. Run setup + # a) determine whether to turn on/off corrections + if int(runsetupdict["DisableBackgroundCorrection"]) == 1: + runsetupdict["BackgroundNumber"] = -1 + if int(runsetupdict["DisableVanadiumCorrection"]) == 1: + runsetupdict["VanadiumNumber"] = -1 + if int(runsetupdict["DisableVanadiumBackgroundCorrection"]) == 1: + runsetupdict["VanadiumBackgroundNumber"] = -1 + + # b) do resample X or binning + if int(runsetupdict["DoReSampleX"]) == 0: + # turn off the option of SampleX + runsetupdict["ResampleX"] = '' + else: + # turn off the binning + runsetupdict["Binning"] = '' + + # only NOMAD uses 'ExpIniFile' + if not self.instrument_name.lower().startswith('nom'): + runsetupdict.pop('ExpIniFile', None) + + # c) all properties + for propname, propvalue in runsetupdict.iteritems(): + # skip these pseudo-properties + if propname in ['DisableBackgroundCorrection', 'DisableVanadiumCorrection', + 'DisableVanadiumBackgroundCorrection', 'DoReSampleX']: + continue + + if propvalue == '' or propvalue is None: + # Skip not-defined value + continue + + if propname == "RunNumber": + propname = 'Filename' # change to what SNSPowderReduction uses + + # option to take user input run number + if runnumber is not None: + propvalue = runnumber + + # add the instrument name to the file hint + propvalue = '{}_{}'.format(self.instrument_name, str(propvalue)) + + # Add value + script += "{}{} = '{}',\n".format(DiffractionReductionScripter.WIDTH, propname, propvalue) + # ENDFOR + + # 2. Advanced setup + for propname, propvalue in advsetupdict.iteritems(): + if propvalue == '' or propvalue is None: + # Skip not-defined value + continue + + # Add to script + script += "{}{} = '{}',\n".format(DiffractionReductionScripter.WIDTH, propname, propvalue) + # ENDFOR + + # 3. Optional spliter workspace + if splitwsname is not None and splitwsname != "": + script += "{}SplittersWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, str(splitwsname)) + if splitinfowsname is not None and splitinfowsname != "": + script += "{}SplitInformationWorkspace='{}',\n".format(DiffractionReductionScripter.WIDTH, + str(splitinfowsname)) + script += "{})\n".format(DiffractionReductionScripter.WIDTH) + + return script + + def _synInstrument(self): + """ Syn instrument from observer-widget + """ + # Facility instrument + for observer in self._observers: + observertype = observer._subject.__class__.__name__ + print ("[ToScript] Observer Type = ", observertype) + if observertype.count("AdvancedWidget") == 1: + self.instrument_name = observer._subject._instrument_name + + return + +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +from telemetry.page import page as page_module +from telemetry import story + + +class ToughFiltersCasesPage(page_module.Page): + + def RunPageInteractions(self, action_runner): + with action_runner.CreateInteraction('Filter'): + action_runner.Wait(10) + + +class PirateMarkPage(page_module.Page): + + def RunPageInteractions(self, action_runner): + with action_runner.CreateInteraction('Filter'): + action_runner.EvaluateJavaScript( + 'document.getElementById("benchmarkButtonText").click()') + action_runner.Wait(10) + +class ToughFiltersCasesPageSet(story.StorySet): + + """ + Description: Self-driven filters animation examples + """ + + def __init__(self): + super(ToughFiltersCasesPageSet, self).__init__( + archive_data_file='data/tough_filters_cases.json', + cloud_storage_bucket=story.PARTNER_BUCKET) + + urls_list = [ + 'http://letmespellitoutforyou.com/samples/svg/filter_terrain.svg', + 'http://static.bobdo.net/Analog_Clock.svg', + ] + + for url in urls_list: + self.AddStory(ToughFiltersCasesPage(url, self)) + + self.AddStory(PirateMarkPage( + 'http://ie.microsoft.com/testdrive/Performance/Pirates/', self)) + +"""A parser for SGML, using the derived class as a static DTD.""" + +# XXX This only supports those SGML features used by HTML. + +# XXX There should be a way to distinguish between PCDATA (parsed +# character data -- the normal case), RCDATA (replaceable character +# data -- only char and entity references and end tags are special) +# and CDATA (character data -- only end tags are special). RCDATA is +# not supported at all. + +import _markupbase +import re + +__all__ = ["SGMLParser", "SGMLParseError"] + +# Regular expressions used for parsing + +interesting = re.compile('[&<]') +incomplete = re.compile('&([a-zA-Z][a-zA-Z0-9]*|#[0-9]*)?|' + '<([a-zA-Z][^<>]*|' + '/([a-zA-Z][^<>]*)?|' + '![^<>]*)?') + +entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]') +charref = re.compile('&#([0-9]+)[^0-9]') + +starttagopen = re.compile('<[>a-zA-Z]') +shorttagopen = re.compile('<[a-zA-Z][-.a-zA-Z0-9]*/') +shorttag = re.compile('<([a-zA-Z][-.a-zA-Z0-9]*)/([^/]*)/') +piclose = re.compile('>') +endbracket = re.compile('[<>]') +tagfind = re.compile('[a-zA-Z][-_.a-zA-Z0-9]*') +attrfind = re.compile( + r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*' + r'(\'[^\']*\'|"[^"]*"|[][\-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*))?') + + +class SGMLParseError(RuntimeError): + """Exception raised for all parse errors.""" + pass + + +# SGML parser base class -- find tags and call handler functions. +# Usage: p = SGMLParser(); p.feed(data); ...; p.close(). +# The dtd is defined by deriving a class which defines methods +# with special names to handle tags: start_foo and end_foo to handle +# and , respectively, or do_foo to handle by itself. +# (Tags are converted to lower case for this purpose.) The data +# between tags is passed to the parser by calling self.handle_data() +# with some data as argument (the data may be split up in arbitrary +# chunks). Entity references are passed by calling +# self.handle_entityref() with the entity reference as argument. + +class SGMLParser(_markupbase.ParserBase): + # Definition of entities -- derived classes may override + entity_or_charref = re.compile('&(?:' + '([a-zA-Z][-.a-zA-Z0-9]*)|#([0-9]+)' + ')(;?)') + + def __init__(self, verbose=0): + """Initialize and reset this instance.""" + self.verbose = verbose + self.reset() + + def reset(self): + """Reset this instance. Loses all unprocessed data.""" + self.__starttag_text = None + self.rawdata = '' + self.stack = [] + self.lasttag = '???' + self.nomoretags = 0 + self.literal = 0 + _markupbase.ParserBase.reset(self) + + def setnomoretags(self): + """Enter literal mode (CDATA) till EOF. + + Intended for derived classes only. + """ + self.nomoretags = self.literal = 1 + + def setliteral(self, *args): + """Enter literal mode (CDATA). + + Intended for derived classes only. + """ + self.literal = 1 + + def feed(self, data): + """Feed some data to the parser. + + Call this as often as you want, with as little or as much text + as you want (may include '\n'). (This just saves the text, + all the processing is done by goahead().) + """ + + self.rawdata = self.rawdata + data + self.goahead(0) + + def close(self): + """Handle the remaining data.""" + self.goahead(1) + + def error(self, message): + raise SGMLParseError(message) + + # Internal -- handle data as far as reasonable. May leave state + # and data to be processed by a subsequent call. If 'end' is + # true, force handling all data as if followed by EOF marker. + def goahead(self, end): + rawdata = self.rawdata + i = 0 + n = len(rawdata) + while i < n: + if self.nomoretags: + self.handle_data(rawdata[i:n]) + i = n + break + match = interesting.search(rawdata, i) + if match: j = match.start() + else: j = n + if i < j: + self.handle_data(rawdata[i:j]) + i = j + if i == n: break + if rawdata[i] == '<': + if starttagopen.match(rawdata, i): + if self.literal: + self.handle_data(rawdata[i]) + i = i+1 + continue + k = self.parse_starttag(i) + if k < 0: break + i = k + continue + if rawdata.startswith(" (i + 1): + self.handle_data("<") + i = i+1 + else: + # incomplete + break + continue + if rawdata.startswith("