repo_name
stringlengths
6
61
path
stringlengths
4
230
copies
stringlengths
1
3
size
stringlengths
4
6
text
stringlengths
1.01k
850k
license
stringclasses
15 values
hash
int64
-9,220,477,234,079,998,000
9,219,060,020B
line_mean
float64
11.6
96.6
line_max
int64
32
939
alpha_frac
float64
0.26
0.9
autogenerated
bool
1 class
ratio
float64
1.62
6.1
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
ralphm/udplog
udplog/scribe.py
2
4622
# Copyright (c) Mochi Media, Inc. # Copyright (c) Ralph Meijer. # See LICENSE for details. """ Asynchronous Scribe client support. This provides a Twisted based Scribe client with an asynchronous interface for sending logs and a consumer for L{udplog.twisted.DispatcherFromUDPLogProtocol}. """ from __future__ import division, absolute_import import copy import logging import simplejson from twisted.internet import defer from twisted.internet import protocol from twisted.python import log from scribe import scribe from thrift.Thrift import TApplicationException, TMessageType from thrift.protocol import TBinaryProtocol from thrift.transport import TTwisted class AsyncScribeClient(scribe.Client): """ Asynchronous Scribe client. This derives from L{scribe.Client} to work with the Twisted Thrift transport and provide an asynchronous interface for L{Log}. @ivar _reqs: List of pending requests. When a result comes in, the associated deferred will be fired. If the connection is closed, the deferreds of the pending requests will be fired with an exception. """ def __init__(self, transport, factory): """ Set up a scribe client. @param transport: The transport of the connection to the Scribe server. @param factory: The protocol factory of the Thrift transport protocol. """ scribe.Client.__init__(self, factory.getProtocol(transport)) self._reqs = {} def Log(self, messages): """ Log messages. @param messages: The messages to be sent. @type messages: C{list} of L{scribe.LogEntry}. @return: L{Deferred<twisted.internet.defer.Deferred>}. """ d = defer.Deferred() self._reqs[self._seqid] = d self.send_Log(messages) return d def send_Log(self, messages): """ Called to send log messages. """ scribe.Client.send_Log(self, messages) self._seqid += 1 def recv_Log(self, iprot, mtype, rseqid): """ Called when the result of the log request was received. """ if mtype == TMessageType.EXCEPTION: result = TApplicationException() else: result = scribe.Log_result() result.read(iprot) iprot.readMessageEnd() try: d = self._reqs.pop(rseqid) except KeyError: log.err(result, "Unexpected log result") if isinstance(result, Exception): d.errback(result) elif result.success is not None: d.callback(result.success) else: d.errback(TApplicationException( TApplicationException.MISSING_RESULT, 'Log failed: unknown result')) class ScribeProtocol(TTwisted.ThriftClientProtocol): """ Scribe protocol. This connects an asynchronous Scribe client to a server and sends out log events from C{dispatcher}. """ def __init__(self, dispatcher, minLogLevel=logging.INFO): self.dispatcher = dispatcher self.minLogLevel = minLogLevel factory = TBinaryProtocol.TBinaryProtocolFactory(strictRead=False, strictWrite=False) TTwisted.ThriftClientProtocol.__init__(self, AsyncScribeClient, factory) def connectionMade(self): """ Add this protocol as a consumer of log events. """ TTwisted.ThriftClientProtocol.connectionMade(self) self.dispatcher.register(self.sendEvent) def connectionLost(self, reason=protocol.connectionDone): """ Remove this protocol as a consumer of log events. """ self.dispatcher.unregister(self.sendEvent) TTwisted.ThriftClientProtocol.connectionLost(self, reason) def sendEvent(self, event): """ Write an event to Scribe. """ event = copy.copy(event) # Drop events with a log level lower than the configured minimum. logLevel = logging.getLevelName(event.get('logLevel', 'INFO')) if logLevel < self.minLogLevel: return category = event['category'] del event['category'] try: message = simplejson.dumps(event) except ValueError, e: log.err(e, "Could not encode event to JSON") return entry = scribe.LogEntry(category=category, message=message) d = self.client.Log(messages=[entry]) d.addErrback(log.err)
mit
-4,296,543,714,192,888,300
27.530864
79
0.625054
false
4.46139
false
false
false
chteuchteu/Simple-Backup-Script
backup.py
1
10886
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Easy & simple yet flexible backup script """ import sys import os import argparse import time import socket import traceback import json import plugins import targets from utils.stdio import CRESET, CBOLD, LGREEN, CDIM, LWARN __author__ = 'Quentin Stoeckel' __copyright__ = 'Copyright 2016, Quentin Stoeckel and contributors' __credits__ = ['Contributors at https://github.com/chteuchteu/Simple-Backup-Script/graphs/contributors'] __license__ = 'gpl-v2' __version__ = '1.0.0' __maintainer__ = "qstoeckel" __email__ = 'stoeckel.quentin@gmail.com' __status__ = 'Production' config = { 'days_to_keep': 15, 'backups': [], 'targets': [] } config_filename = 'config.json' config_filename_old = 'config.py' config_filepath = os.path.join(os.path.dirname(__file__), config_filename) config_filepath_old = os.path.join(os.path.dirname(__file__), config_filename_old) # Functions def load_config(): # Load config if not os.path.isfile(config_filepath): if os.path.isfile(config_filepath_old): print(CBOLD + LWARN, '\n{} is deprecated. Please use --migrate to generate {}'.format( config_filename_old, config_filename), CRESET) else: print(CBOLD + LWARN, '\nCould not find configuration file {}'.format(config_filename), CRESET) sys.exit(1) with open(config_filepath, 'r') as config_file: json_config = json.load(config_file) config['days_to_keep'] = json_config.get('days_to_keep', config['days_to_keep']) config['alert_emails'] = json_config.get('alert_emails') config['sentry_dsn'] = json_config.get('sentry_dsn') config['backups'] = json_config.get('backups', []) config['targets'] = json_config.get('targets', []) # Now that we know if Sentry should be enabled, load its sdk: init_sentry() def send_file(backup, backup_filepath, target_profile): # Build destination filename filename, file_extension = os.path.splitext(backup_filepath) # ...explicitly handle ".tar.gz" extensions if backup_filepath.endswith('.tar.gz'): file_extension = '.tar.gz' dest_file_name = 'backup-{hostname}-{timestamp}-{backup_name}({backup_profile}){file_extension}'.format( hostname=socket.gethostname(), timestamp=time.strftime("%Y%m%d-%H%M"), backup_profile=backup.get('profile'), backup_name=backup.get('name'), file_extension=file_extension ) _targets = targets.get_supported_targets() type = target_profile.get('type', 'remote') if type not in _targets: print("Unknown target type \"{}\".".format(type)) sys.exit(1) target = _targets[type]() error = target.copy_to_target(config, target_profile, backup_filepath, dest_file_name) if error is not None: e, traceback = error handle_error(backup, e, traceback) print('') def get_backup(backup_name): candidates = [b for b in config['backups'] if b.get('name') == backup_name] return candidates[0] if len(candidates) == 1 else None def do_backup(backup): backup_profile = backup.get('profile') # Check backup profile profiles = plugins.get_supported_backup_profiles() if backup_profile not in profiles: print("Unknown project type \"{}\".".format(backup_profile)) sys.exit(1) # JUST DO IT print(CBOLD+LGREEN, "Creating backup file", CRESET) plugin = profiles[backup_profile]() backup_filepath = plugin.create_backup_file(backup) if backup_filepath is None: print("Could not create backup file for \"{}\".".format(backup_profile)) return # Send it to the moon (to each target) backup_targets = config['targets'] if args.target == 'all' else [config['targets'][int(args.target)]] for target_profile in backup_targets: try: send_file(backup, backup_filepath, target_profile) except Exception as e: # Print exception (for output in logs) print(traceback.format_exc()) handle_error(backup, e, traceback) # Delete the file if plugin.remove_artifact: print(CDIM, "Deleting {}".format(backup_filepath), CRESET) os.remove(backup_filepath) plugin.clean() return def init_sentry(): sentry_dsn = config.get('sentry_dsn', None) if sentry_dsn is not None: try: import sentry_sdk sentry_sdk.init(sentry_dsn) return sentry_sdk except Exception as error: print(error) return None def handle_error(backup, exception, traceback): # sentry sentry = init_sentry() if sentry is not None: sentry.set_level('error') sentry.set_tag('backup', backup.get('name')) sentry.capture_exception(exception) # mails email_addresses = config.get('alert_emails', None) if email_addresses is not None: message = 'Simple-Backup-Script: backup "{}" failed'.format(backup.get('name')) formatted_traceback = traceback.format_exc() for address in [a for a in email_addresses if a]: if address: send_mail(address, message, formatted_traceback) # Inspired by http://stackoverflow.com/a/27874213/1474079 def send_mail(recipient, subject, body): import subprocess try: process = subprocess.Popen(['mail', '-s', subject, recipient], stdin=subprocess.PIPE) process.communicate(input=bytes(body, 'UTF-8')) return True except Exception as error: print(error) return False try: # Check python version if sys.version_info.major < 3: print('Warning: Python 2.x isn\'t officially supported. Use at your own risk.') # Check command line arguments parser = argparse.ArgumentParser(description='Easily backup projects') parser.add_argument('--self-update', action='store_true', dest='self_update') parser.add_argument('--backup', default='ask_for_it') parser.add_argument('--target', default='all') parser.add_argument('-a', '--all', action='store_true') parser.add_argument('--migrate', action='store_true') parser.add_argument('--test-mails', action='store_true', dest='test_mails') parser.add_argument('--test-config', action='store_true', dest='test_config') args = parser.parse_args() if args.migrate: from utils.migrator import migrate migrate() elif args.self_update: # cd to own directory self_dir = os.path.dirname(os.path.realpath(__file__)) if not os.path.isdir(os.path.join(self_dir, '.git')): print(CDIM+LWARN, "Cannot self-update: missing .git directory", CRESET) sys.exit(1) os.chdir(self_dir) os.system("git pull") print() print(LGREEN, "Updated to the latest version", CRESET) elif args.test_mails: load_config() email_addresses = config['alert_emails'] mail_sent = False if email_addresses: for address in [a for a in email_addresses if a]: if address: if send_mail(address, 'Simple-Backup-Script: test e-mail', ''): mail_sent = True print('Test mail sent to {}'.format(address)) else: print('Could not send mail to {}'.format(address)) if not mail_sent: print('No mail could be sent.') else: print('"alert_emails" is null or empty.') sys.exit(1) elif args.test_config: print('Opening {}'.format(config_filename)) try: load_config() if len(config['backups']) == 0: print(LWARN, 'Error: there a no configured backup profile', CRESET) sys.exit(1) if len(config['targets']) == 0: print(LWARN, 'Error: there are no configured targets', CRESET) sys.exit(1) print(CBOLD + LGREEN, '{} successfully parsed:'.format(config_filename), CRESET) print(' - Default days_to_keep: {}'.format(config['days_to_keep'])) print(' - Alert emails: {}'.format(config['alert_emails'])) print(' - {} backup profile(s)'.format(len(config['backups']))) print(' - {} backup target(s)'.format(len(config['targets']))) for i, target in enumerate(config['targets']): if target.get('host') is None: print(CBOLD + LWARN, 'Warning: Missing "host" attribute in target {}'.format(i), CRESET) host = target.get('host', '#{}'.format(i+1)) if target.get('user') is None: print(CBOLD + LWARN, 'Warning: Missing "user" attribute in target {}'.format(host), CRESET) if target.get('dir') is None: print(CBOLD + LWARN, 'Warning: Missing "dir" attribute in target {}'.format(host), CRESET) except Exception: print('Could not parse configuration:') print(traceback.format_exc()) else: load_config() # Ask for backup to run if len(config['backups']) == 0: print(CBOLD + LGREEN, "Please configure backup projects in backup.py", CRESET) sys.exit(1) if args.all: # Backup all profiles for i, project in enumerate(config['backups']): print(CBOLD+LGREEN, "\n{} - Backing up {} ({})".format(i, project.get('name'), project.get('profile')), CRESET) backup = config['backups'][i] do_backup(backup) elif args.backup == 'ask_for_it': print("Please select a backup profile to execute") for i, project in enumerate(config['backups']): print("\t[{}] {} ({})".format(str(i), project.get('name'), project.get('profile'))) backup_index = -1 is_valid = 0 while not is_valid: try: backup_index = int(input("? ")) is_valid = 1 except ValueError: print("Not a valid integer.") if 0 <= backup_index < len(config['backups']): # Here goes the thing backup = config['backups'][backup_index] do_backup(backup) else: print("I won't take that as an answer") else: # Backup project passed as argument backup = get_backup(args.backup) if backup is None: print("This backup does not exist, or there may be several backups with this name") sys.exit(1) else: do_backup(backup) except KeyboardInterrupt: print('\n^C signal caught, exiting') sys.exit(1)
gpl-2.0
-7,297,012,608,493,930,000
33.55873
127
0.591861
false
3.93992
true
false
false
fishilico/shared
java/keystore/parse_pkcs12.py
1
24506
#!/usr/bin/env python # -*- coding:UTF-8 -*- # Copyright (c) 2019 Nicolas Iooss # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """Parse a KeyStore in PKCS#12 format Using openssl, it is possible to dump the certificates and private keys from a PKCS#12 keystore: openssl pkcs12 -info -passin pass:changeit -nodes -in store.p12 Nevertheless this command does not show the bags with type "secretBag", that contain secret keys for symmetric encryption algorithms. Documentation: * https://tools.ietf.org/html/rfc7292 RFC 7292, PKCS #12: Personal Information Exchange Syntax v1.1 * https://tools.ietf.org/html/rfc2315 RFC 2315, PKCS #7: Cryptographic Message Syntax Version 1.5 * https://tools.ietf.org/html/rfc5208 RFC 5208, Public-Key Cryptography Standards (PKCS) #8: Private-Key Information Syntax Specification Version 1.2 * https://www.openssl.org/docs/man1.0.2/man1/pkcs12.html openssl-pkcs12 man page NB. PKCS#12 pbeWithSHA1And40BitRC2-CBC key-derivation and encryption algorithm is used to encrypt WebLogic passwords. The code uses JSAFE with algorithm "PBE/SHA1/RC2/CBC/PKCS12PBE-5-128", which is pbeWithSHA1And40BitRC2-CBC with five rounds. More information is available on: * https://bitbucket.org/vladimir_dyuzhev/recover-weblogic-password/src/b48ef4a82db57f12e52788fe08b80e54e847d42c/src/weblogic/security/internal/encryption/JSafeSecretKeyEncryptor.java * https://www.cryptsoft.com/pkcs11doc/v220/group__SEC__12__27__PKCS____12__PASSWORD__BASED__ENCRYPTION__AUTHENTICATION__MECHANISMS.html * https://github.com/maaaaz/weblogicpassworddecryptor * https://blog.netspi.com/decrypting-weblogic-passwords/ * https://github.com/NetSPI/WebLogicPasswordDecryptor/blob/master/Invoke-WebLogicPasswordDecryptor.psm1 """ import argparse import binascii import datetime import hashlib import hmac import logging import os.path import re import struct import sys import tempfile import Cryptodome.Cipher.ARC2 import Cryptodome.Cipher.DES3 import rc2 import util_asn1 from util_bin import run_openssl_show_cert, run_process_with_input, xx from util_crypto import report_if_missing_cryptography, describe_der_certificate logger = logging.getLogger(__name__) def generate_p12_keystore(password): """Generate a PKCS#12 keystore with some content""" temporary_dir = tempfile.mkdtemp(suffix='_java_keystore-test') ks_path = os.path.join(temporary_dir, 'store.jks') try: # By default it generates a DSA keypair run_process_with_input( [ 'keytool', '-genkeypair', '-noprompt', '-keyalg', 'dsa', '-storetype', 'pkcs12', '-keystore', ks_path, '-storepass', password, '-alias', 'mykeypair', '-dname', 'CN=example', ], None, fatal=True) run_process_with_input( [ 'keytool', '-genkeypair', '-noprompt', '-keyalg', 'rsa', '-sigalg', 'SHA256withRSA', '-storetype', 'pkcs12', '-keystore', ks_path, '-storepass', password, '-alias', 'mykeypair_rsa_sha256sig', '-dname', 'CN=example', ], None, fatal=True) # Add a secret key run_process_with_input( [ 'keytool', '-genseckey', '-keyalg', 'aes', '-keysize', '192', '-storetype', 'pkcs12', '-keystore', ks_path, '-storepass', password, '-alias', 'mysecret_aes192key', ], None, fatal=True) with open(ks_path, 'rb') as fks: ks_content = fks.read() if not ks_content: raise ValueError("keytool did not produce any output") return ks_content finally: try: os.remove(ks_path) except OSError as exc: # If removing the files failed, the error will appear in rmdir logger.debug("Error while removing files: %r", exc) os.rmdir(temporary_dir) def pkcs12_derivation(alg, id_byte, password, salt, iterations, result_size=None): """Compute a key and iv from a password and salt according to PKCS#12 id_byte is, according to https://tools.ietf.org/html/rfc7292#appendix-B.3 : * 1 to generate a key * 2 to generate an initial value (IV) * 3 to generate an integrity key OpenSSL implementation: https://github.com/openssl/openssl/blob/OpenSSL_1_1_1/crypto/pkcs12/p12_key.c """ if alg == 'SHA1': hash_func = hashlib.sha1 u = 160 # SHA1 digest size, in bits v = 512 # SHA1 block size, in bits else: raise NotImplementedError("Unimplemented algorithm {} for PKCS#12 key derivation".format(alg)) assert (u % 8) == (v % 8) == 0 u_bytes = u // 8 v_bytes = v // 8 if result_size is None: result_size = u_bytes diversifier = struct.pack('B', id_byte) * v_bytes expanded_salt_size = v_bytes * ((len(salt) + v_bytes - 1) // v_bytes) expanded_salt = (salt * ((expanded_salt_size // len(salt)) + 1))[:expanded_salt_size] assert len(expanded_salt) == expanded_salt_size pass_bytes = password.encode('utf-16be') + b'\0\0' expanded_pass_size = v_bytes * ((len(pass_bytes) + v_bytes - 1) // v_bytes) expanded_pass = (pass_bytes * ((expanded_pass_size // len(pass_bytes)) + 1))[:expanded_pass_size] assert len(expanded_pass) == expanded_pass_size i_size = expanded_salt_size + expanded_pass_size i_value = expanded_salt + expanded_pass result = b'' while len(result) < result_size: ctx = hash_func(diversifier) ctx.update(i_value) a_value = ctx.digest() for _ in range(1, iterations): a_value = hash_func(a_value).digest() assert len(a_value) == u_bytes result += a_value b_value = struct.unpack(v_bytes * 'B', (a_value * ((v_bytes + u_bytes - 1) // u_bytes))[:v_bytes]) new_i_value = [] for j in range(0, i_size, v_bytes): # Ij = Ij + B + 1 ij = list(struct.unpack(v_bytes * 'B', i_value[j:j + v_bytes])) c = 1 for k in range(v_bytes - 1, -1, -1): c += ij[k] + b_value[k] ij[k] = c & 0xff c = c >> 8 new_i_value.append(struct.pack(v_bytes * 'B', *ij)) i_value = b''.join(new_i_value) return result[:result_size] # Check the implementation with values from "openssl pkcs12" with OPENSSL_DEBUG_KEYGEN assert pkcs12_derivation( 'SHA1', 3, 'changeit', binascii.unhexlify('c6b068958d7d6085ba52c9cc3212a8fc2e50b3da'), 100000 ) == binascii.unhexlify('ef3c7f41e19e7bc7bf06650164aff556d15206d7') assert pkcs12_derivation( 'SHA1', 1, 'changeit', binascii.unhexlify('a9fb3e857865d5e2aeff3983389c980d5de4bf39'), 50000, 24 ) == binascii.unhexlify('12fe77bc0be3ae0d063c4858e948ff4e85c39daa08b833c9') assert pkcs12_derivation( 'SHA1', 2, 'changeit', binascii.unhexlify('a9fb3e857865d5e2aeff3983389c980d5de4bf39'), 50000, 8 ) == binascii.unhexlify('13515c2efce50ef9') def try_pkcs12_decrypt(encrypted, enc_alg, password, indent=''): """Try to decrypt some data with the given password and PKCS#12 password-based encryption algorithms""" if not isinstance(enc_alg, util_asn1.PKCS12PbeAlg): raise NotImplementedError("Unimplemented encryption algorithm {}".format(enc_alg)) if enc_alg.oid_name == 'pbeWithSHA1And3-KeyTripleDES-CBC': # 192-bits 3DES key and 64-bit IV from SHA1 key = pkcs12_derivation(alg='SHA1', id_byte=1, password=password, salt=enc_alg.salt, iterations=enc_alg.iterations, result_size=24) iv = pkcs12_derivation(alg='SHA1', id_byte=2, password=password, salt=enc_alg.salt, iterations=enc_alg.iterations, result_size=8) crypto_3des = Cryptodome.Cipher.DES3.new(key, Cryptodome.Cipher.DES3.MODE_CBC, iv) decrypted = crypto_3des.decrypt(encrypted) elif enc_alg.oid_name == 'pbeWithSHA1And40BitRC2-CBC': # 40-bits RC2 key and 64-bit IV from SHA1 key = pkcs12_derivation(alg='SHA1', id_byte=1, password=password, salt=enc_alg.salt, iterations=enc_alg.iterations, result_size=5) iv = pkcs12_derivation(alg='SHA1', id_byte=2, password=password, salt=enc_alg.salt, iterations=enc_alg.iterations, result_size=8) try: crypto_rc2 = Cryptodome.Cipher.ARC2.new(key, Cryptodome.Cipher.ARC2.MODE_CBC, iv, effective_keylen=40) decrypted = crypto_rc2.decrypt(encrypted) except ValueError: # Use custom RC2 implementation because "effective_keylen=40" is not always supported # https://github.com/Legrandin/pycryptodome/issues/267 crypto_rc2 = rc2.RC2(key) decrypted = crypto_rc2.decrypt(encrypted, rc2.MODE_CBC, iv) else: raise NotImplementedError("Unimplemented encryption algorithm {}".format(enc_alg)) # Check PKCS#5 padding padlen, = struct.unpack('B', decrypted[-1:]) if not (1 <= padlen <= 0x10) or any(x != decrypted[-1] for x in decrypted[-padlen:]): print("{}* wrong password (bad PKCS#5 padding)".format(indent)) return None print("{}(password: {})".format(indent, repr(password))) return decrypted[:-padlen] def print_p12_keybag(keybag_der, password, show_pem=False, list_only=False, indent=''): """Parse PKCS#12 keyBag ASN.1 data""" # KeyBag ::= PrivateKeyInfo -- from PKCS #8 # EncryptedPrivateKeyInfo ::= SEQUENCE { # encryptionAlgorithm EncryptionAlgorithmIdentifier, # encryptedData EncryptedData # } # EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier # EncryptedData ::= OCTET STRING enc_alg_der, enc_data_der = util_asn1.decode_sequence(keybag_der, 2) enc_alg = util_asn1.decode_x509_algid(enc_alg_der) enc_data = util_asn1.decode_octet_string(enc_data_der) print("{}* encryption algorithm: {}".format(indent, enc_alg)) if not isinstance(enc_alg, util_asn1.PKCS12PbeAlg): raise NotImplementedError("Unimplemented encryption algorithm {}".format(enc_alg)) decrypted = try_pkcs12_decrypt(enc_data, enc_alg, password, indent=indent) if decrypted is not None: # Show the private key util_asn1.show_pkcs8_private_key_info(decrypted, list_only=list_only, show_pem=show_pem, indent=indent) def print_p12_certBag(certbag_der, show_pem=False, list_only=False, indent=''): """Parse PKCS#12 certBag ASN.1 data""" # CertBag ::= SEQUENCE { # certId BAG-TYPE.&id ({CertTypes}), # certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId}) # } cert_id_der, cert_value_der = util_asn1.decode_sequence(certbag_der, 2) cert_id = util_asn1.decode_oid(cert_id_der) cert_value_der = util_asn1.decode_object(cert_value_der) if cert_id != 'x509Certificate': raise NotImplementedError("Unknown certificate format {}".format(repr(cert_id))) cert = util_asn1.decode_octet_string(cert_value_der) description = describe_der_certificate(cert) if description: print("{}* Certificate: {}".format(indent, description)) else: print("{}* Certificate: (no description available)".format(indent)) run_openssl_show_cert(cert, list_only=list_only, show_pem=show_pem, indent=indent) def print_p12_secretBag(secretbag_der, password, show_pem=False, list_only=False, indent=''): """Parse PKCS#12 secretBag ASN.1 data""" # SecretBag ::= SEQUENCE { # secretTypeId BAG-TYPE.&id ({SecretTypes}), # secretValue [0] EXPLICIT BAG-TYPE.&Type ({SecretTypes} {@secretTypeId}) # } secret_type_id_der, secret_value_der = util_asn1.decode_sequence(secretbag_der, 2) secret_type_id = util_asn1.decode_oid(secret_type_id_der) secret_value_der = util_asn1.decode_object(secret_value_der) print("{}* secret type: {}".format(indent, secret_type_id)) secret_value = util_asn1.decode_octet_string(secret_value_der) if secret_type_id == 'keyBag': print_p12_keybag(secret_value, password, show_pem=show_pem, list_only=list_only, indent=indent) else: raise NotImplementedError("Unimplemented secretBag type {}".format(secret_type_id)) def print_p12_safe_contents(safe_contents_der, password, show_pem=False, list_only=False, indent=''): """Parse PKCS#12 SafeContents ASN.1 data https://tools.ietf.org/html/rfc7292#section-4.2 The SafeContents type is made up of SafeBags. Each SafeBag holds one piece of information -- a key, a certificate, etc. -- which is identified by an object identifier. """ # SafeContents ::= SEQUENCE OF SafeBag # SafeBag ::= SEQUENCE { # bagId BAG-TYPE.&id ({PKCS12BagSet}) # bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}), # bagAttributes SET OF PKCS12Attribute OPTIONAL # } # PKCS12Attribute ::= SEQUENCE { # attrId ATTRIBUTE.&id ({PKCS12AttrSet}), # attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId}) # } -- This type is compatible with the X.500 type 'Attribute' # PKCS12AttrSet ATTRIBUTE ::= { # friendlyName | -- from PKCS #9 # localKeyId, -- from PKCS #9 # ... -- Other attributes are allowed # } safe_bags = util_asn1.decode_sequence(safe_contents_der) print("{}* {} {}:".format(indent, len(safe_bags), "safe bags" if len(safe_bags) >= 2 else "safe bag")) for idx_safe_bag, safe_bag_der in enumerate(safe_bags): safe_bag = util_asn1.decode_sequence(safe_bag_der, counts=(2, 3)) bag_id = util_asn1.decode_oid(safe_bag[0]) bag_value = util_asn1.decode_object(safe_bag[1]) try: bag_attributes = util_asn1.decode_set(safe_bag[2]) if len(safe_bag) >= 3 else [] except NotImplementedError as exc: # Recover from error caused by old PyCrypto logger.warning("Unable to decode bag attributes: %s", exc) attr_descs = ['?'] else: attr_descs = [] for bag_attribute_der in bag_attributes: attr_id_der, attr_values_der = util_asn1.decode_sequence(bag_attribute_der, 2) attr_id = util_asn1.decode_oid(attr_id_der) attr_values_der = util_asn1.decode_set(attr_values_der) attr_values = [util_asn1.decode_any_string(v) for v in attr_values_der] attr_descs.append("{}={}".format(attr_id, ','.join(repr(v) for v in attr_values))) if attr_id == 'localKeyID' and len(attr_values) == 1: m = re.match(r'^Time ([0-9]+)$', attr_values[0]) if m: # Parse the timestamp from the local key ID timestamp = int(m.group(1)) attr_descs.append("date='{}'".format(datetime.datetime.fromtimestamp(timestamp / 1000.))) print("{} [{}] {} ({})".format(indent, idx_safe_bag + 1, bag_id, ', '.join(attr_descs))) if bag_id == 'keyBag': print_p12_keybag(bag_value, password, show_pem=show_pem, list_only=list_only, indent=indent + " ") elif bag_id == 'certBag': print_p12_certBag(bag_value, show_pem=show_pem, list_only=list_only, indent=indent + " ") elif bag_id == 'secretBag': print_p12_secretBag(bag_value, password, show_pem=show_pem, list_only=list_only, indent=indent + " ") else: print("{} * bag value: {}".format(indent, repr(bag_value))) raise NotImplementedError("Unimplemented bag id {}".format(bag_id)) def print_p12_keystore(ks_content, password, show_pem=False, list_only=False): """Parse a PKCS#12 KeyStore file and print it""" # run_process_with_input(['openssl', 'asn1parse', '-i', '-inform', 'DER'], ks_content, fatal=True) # PFX (Personal Information Exchange) is defined as: # PFX ::= SEQUENCE { # version INTEGER {v3(3)}(v3,...), # authSafe ContentInfo, # macData MacData OPTIONAL # } version, authsafe_der, macdata_der = util_asn1.decode_sequence(ks_content, 3) if version != 3: raise NotImplementedError("Unimplemented PFX version {}".format(version)) # ContentInfo ::= SEQUENCE { # contentType ContentType, # content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL # } # ContentType ::= OBJECT IDENTIFIER authsafe_content_type_der, authsafe_content_der = util_asn1.decode_sequence(authsafe_der, 2) authsafe_content_type = util_asn1.decode_oid(authsafe_content_type_der) if authsafe_content_type != 'pkcs7-data': raise NotImplementedError("Unimplemented PFX content type {}".format(authsafe_content_type)) authsafe_content_der = util_asn1.decode_object(authsafe_content_der) authsafe_content = util_asn1.decode_octet_string(authsafe_content_der) # MacData ::= SEQUENCE { # mac DigestInfo, # macSalt OCTET STRING, # iterations INTEGER DEFAULT 1 # } macdata_asn1 = util_asn1.decode_sequence(macdata_der) if len(macdata_asn1) == 2: mac_der, mac_salt_der = macdata_asn1 mac_iterations = 1 elif len(macdata_asn1) == 3: mac_der, mac_salt_der, mac_iterations = macdata_asn1 else: raise ValueError("Unexpected number of items in ASN.1 MacData sequence") mac_salt = util_asn1.decode_octet_string(mac_salt_der) # DigestInfo ::= SEQUENCE { # digestAlgorithm DigestAlgorithmIdentifier, # digest Digest # } # DigestAlgorithmIdentifier ::= AlgorithmIdentifier # Digest ::= OCTET STRING mac_digest_algorithm_der, mac_digest_der = util_asn1.decode_sequence(mac_der, 2) mac_digest_algorithm = util_asn1.decode_x509_algid(mac_digest_algorithm_der) mac_digest = util_asn1.decode_octet_string(mac_digest_der) print("* PKCS#12 Keystore MAC:") print(" * algorithm: {}".format(mac_digest_algorithm)) print(" * salt: {}".format(xx(mac_salt))) print(" * iterations: {}".format(mac_iterations)) print(" * HMAC digest: {}".format(xx(mac_digest))) mac_key = pkcs12_derivation( alg=mac_digest_algorithm, id_byte=3, password=password, salt=mac_salt, iterations=mac_iterations) mac_hmac = hmac.new(key=mac_key, msg=authsafe_content, digestmod=hashlib.sha1).digest() if mac_hmac == mac_digest: print(" (password: {})".format(repr(password))) print(" (HMAC key: {})".format(xx(mac_key))) else: print(" (computed HMAC: {})".format(xx(mac_hmac))) print(" * wrong password (pad HMAC digest)") # AuthenticatedSafe ::= SEQUENCE OF ContentInfo # -- Data if unencrypted # -- EncryptedData if password-encrypted # -- EnvelopedData if public key-encrypted authsafe_seq = util_asn1.decode_sequence(authsafe_content) print("* {} data blocks:".format(len(authsafe_seq))) for blk_index, blk_der in enumerate(authsafe_seq): blk_content_type_der, blk_content_der = util_asn1.decode_sequence(blk_der, 2) blk_content_type = util_asn1.decode_oid(blk_content_type_der) blk_content_der = util_asn1.decode_object(blk_content_der) # tag "cont[0]" if blk_content_type == 'pkcs7-data': safe_contents = util_asn1.decode_octet_string(blk_content_der) print(" [{}] unencrypted safe contents:".format(blk_index + 1)) print_p12_safe_contents(safe_contents, password, show_pem=show_pem, list_only=list_only, indent=" ") elif blk_content_type == 'pkcs7-encryptedData': print(" [{}] encrypted safe contents:".format(blk_index + 1)) # EncryptedData ::= SEQUENCE { # version Version, # encryptedContentInfo EncryptedContentInfo # } encblk_version, encrypted_ci_der = util_asn1.decode_sequence(blk_content_der, 2) if encblk_version != 0: raise NotImplementedError("Unimplemented PKCS#7 EncryptedData version {}".format(encblk_version)) # EncryptedContentInfo ::= SEQUENCE { # contentType ContentType, # contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, # encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL # } # ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier # EncryptedContent ::= OCTET STRING enc_ctype_der, enc_alg_der, enc_content_der = util_asn1.decode_sequence(encrypted_ci_der, 3) enc_ctype = util_asn1.decode_oid(enc_ctype_der) enc_alg = util_asn1.decode_x509_algid(enc_alg_der) enc_content = util_asn1.decode_object(enc_content_der) # tag "cont[0]" if enc_ctype != 'pkcs7-data': raise NotImplementedError("Unimplemented PKCS#7 EncryptedData content type {}".format(enc_ctype)) print(" * encryption algorithm: {}".format(enc_alg)) safe_contents = try_pkcs12_decrypt(enc_content, enc_alg, password, indent=" ") if safe_contents is not None: print_p12_safe_contents(safe_contents, password, show_pem=show_pem, list_only=list_only, indent=" ") else: raise NotImplementedError("Unimplemented bag content type {}".format(blk_content_type)) def main(argv=None): """Program entry point""" parser = argparse.ArgumentParser( description="Parse a PKCS#12 keystore file", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('input', metavar='KEYSTORE', nargs='?', type=str, help="load a keystore instead of generating one") parser.add_argument('-d', '--debug', action='store_true', help="show debug messages") parser.add_argument('-p', '--password', type=str, default='changeit', help="keystore password") parser.add_argument('-l', '--list', action='store_true', help="list only, without printing the data") parser.add_argument('-P', '--pem', action='store_true', help="show certificates and private keys in PEM format") args = parser.parse_args(argv) logging.basicConfig(format='[%(levelname)-5s] %(message)s', level=logging.DEBUG if args.debug else logging.INFO) report_if_missing_cryptography() if args.input: with open(args.input, 'rb') as fin: ks_content = fin.read() logger.debug("Parsing file %r (%d bytes)", args.input, len(ks_content)) else: try: ks_content = generate_p12_keystore(args.password) except ValueError as exc: logger.fatal("Generating a keystore failed: %s", exc) return 1 logger.debug("Parsing keystore (%d bytes)", len(ks_content)) try: print_p12_keystore(ks_content, args.password, show_pem=args.pem, list_only=args.list) except ValueError as exc: logger.fatal("Parsing the keystore failed: %s", exc) raise # Show the stack trace return 0 if __name__ == '__main__': sys.exit(main())
mit
-6,064,908,502,802,874,000
44.465677
182
0.634171
false
3.506367
false
false
false
CSCSI/Lost-Visions
lost_visions/ajax.py
1
3140
from lost_visions import forms from lost_visions.utils.flickr import getImageTags from lost_visions.views import get_random_image_data from dajaxice.utils import deserialize_form __author__ = 'ubuntu' from dajax.core import Dajax from dajaxice.decorators import dajaxice_register def get_an_image(): # assume a problem !!! problem = True # loop to prefer failing than looping forever loop = 0 while problem and loop < 20: loop+=1 image_data = get_random_image_data() if image_data is not None: if 'flickr_id' in image_data: # this is all we care about for now problem = False # return flickr_tags['image_location'] return image_data['flickr_large_source'] def read_category_form(form): CategoryForm = forms.category_form_factory() form = CategoryForm(deserialize_form(form)) if form.is_valid(): print 'valid form' else: print 'invalid form' categories = ["cycling", "cover", "castle", "decorative papers", "ship", "technology", "sciencefiction", "children's book illustration", "letter", "decoration", "map", "fashion", "portrait", "christmas"] for cat in categories: if cat in form.cleaned_data: print cat def read_tags_form(form): TagsForm = forms.tag_form_factory({}) tags_form = TagsForm(form) @dajaxice_register def load_image(request): dajax = Dajax() img_url = get_an_image() # print img_url dajax.script('change_image("' + img_url + '");') # dajax.assign('#result', 'value', 'hi' + image_id) # dajax.alert('You sent "%s"' % name) return dajax.json() @dajaxice_register def submit_tags(request, form, image_id): read_tags_form(form) print request.POST dajax = Dajax() # img_url = get_an_image() # print img_url # dajax.script('change_image("' + img_url + '");') dajax.script('add_new_tags();') # dajax.assign('#result', 'value', 'hi' + image_id) # dajax.alert('You sent "%s"' % name) return dajax.json() @dajaxice_register def submit_creation_techniques(request, form, image_id): read_category_form(form) print request.POST dajax = Dajax() dajax.script('add_description();') return dajax.json() @dajaxice_register def submit_free_text(request, form, image_id): read_category_form(form) print request.POST dajax = Dajax() # TODO start here dajax.script('add_thank_you();') return dajax.json() @dajaxice_register def submit_new_tags(request, form, image_id): print request.POST dajax = Dajax() # TODO start here dajax.script('add_categories();') return dajax.json() @dajaxice_register def submit_categories(request, form, image_id): read_category_form(form) print request.POST dajax = Dajax() dajax.script('add_creation_techniques();') return dajax.json()
apache-2.0
-2,239,828,213,102,041,900
23.539063
56
0.595223
false
3.540023
false
false
false
Mikkyo/heuristic_PRD
models/TaskDAG.py
1
3716
#!/usr/bin/python # -*- coding: utf-8 -*- # --- Import Area from enums.TaskStatus import TaskStatus class TaskDAG: """Class to represent a DAG [Direct Acyclic Graph] for a Task""" # --- Attributes # Private _tasks = None # All the tasks # Constants # --- Constructor def __init__(self, tasks): """" TaskDAG Constructor :param: tasks: Task Array """ self._tasks = tasks # --- Methods def get_root_tasks(self): """ Method to get the root tasks :return: Task Array """ root_task = [] for i in range(0, len(self._tasks)): if self._tasks[i].is_root(): root_task.append(self._tasks[i]) return root_task def get_leaf_tasks(self): """ Method to get the leaf tasks :return Task Array """ leaf_tasks = [] for i in range(0, len(self._tasks)): if self._tasks[i].is_ready(): leaf_tasks.append(self._tasks[i]) return leaf_tasks def get_ready_tasks(self): """ Method to get all the ready tasks :return Task Array """ ready_tasks = [] for i in range(0, len(self._tasks)): if self._tasks[i].is_ready(): ready_tasks.append(self._tasks[i]) return ready_tasks def get_current_tasks(self): """ Method to get the current tasks :return Task Array """ current_tasks = [] for i in range(0, len(self._tasks)): if self._tasks[i].is_current_root(): current_tasks.append(self._tasks[i]) return current_tasks def are_all_tasks_finished(self): """ Method to check if all tasks are finished :return Bool """ for i in range(0, len(self._tasks)): if self._tasks[i].is_finished() is False: return False return True def init(self): """ Method to init all tasks """ for i in range(0, len(self._tasks)): self._tasks[i].status = TaskStatus.PENDING root_tasks = self.get_root_tasks() for i in range(0, len(root_tasks)): self._tasks[i].status = TaskStatus.READY def update_tasks(self, simulation_date): """ Method to update tasks :param simulation_date: Integer """ for i in range(0, len(self._tasks)): self._tasks[i].update(simulation_date) def update(self, simulation_date): """ Method to update all tasks :param simulation_date: Integer """ #Reset all start date for i in range(0, len(self._tasks)): self._tasks[i].reset_start_dates() # Top-Down Update Min Start Dates curr_tasks = self.get_current_tasks() for i in range(0, len(curr_tasks)): if curr_tasks[i].is_running() is False: curr_tasks[i].set_min_start_date(simulation_date) # Compute overall max end Date max_end_date = 0 leaf_tasks = self.get_leaf_tasks() for i in range(0, len(leaf_tasks)): leaf_task = leaf_tasks[i] max_end_date = max(leaf_task.min_start_date + leaf_task.duration, max_end_date) # Bottom-Up Update max start date for i in range(0, len(leaf_tasks)): leaf_task = leaf_tasks[i] leaf_task.set_max_start_date(max_end_date - leaf_task.duration) # Update the criticality of tasks for i in range(0, len(self._tasks)): self._tasks[i].computer_criticality() # --- Getters/Setters
gpl-3.0
-1,933,901,351,726,349,300
26.525926
91
0.532293
false
3.944798
false
false
false
torbjoernk/pySDC
examples/heat1d/TransferClass.py
1
4670
from __future__ import division import numpy as np from pySDC.Transfer import transfer from pySDC.datatype_classes.mesh import mesh, rhs_imex_mesh # FIXME: extend this to ndarrays class mesh_to_mesh_1d(transfer): """ Custon transfer class, implements Transfer.py This implementation can restrict and prolong between 1d meshes, using weigthed restriction and 7th-order prologation via matrix-vector multiplication. Attributes: fine: reference to the fine level coarse: reference to the coarse level init_f: number of variables on the fine level (whatever init represents there) init_c: number of variables on the coarse level (whatever init represents there) Rspace: spatial restriction matrix, dim. Nf x Nc Pspace: spatial prolongation matrix, dim. Nc x Nf """ def __init__(self,fine_level,coarse_level): """ Initialization routine Args: fine_level: fine level connected with the transfer operations (passed to parent) coarse_level: coarse level connected with the transfer operations (passed to parent) """ # invoke super initialization super(mesh_to_mesh_1d,self).__init__(fine_level,coarse_level) # if number of variables is the same on both levels, Rspace and Pspace are identity if self.init_c == self.init_f: self.Rspace = np.eye(self.init_c) # assemble weighted restriction by hand else: self.Rspace = np.zeros((self.init_f,self.init_c)) np.fill_diagonal(self.Rspace[1::2,:],1) np.fill_diagonal(self.Rspace[0::2,:],1/2) np.fill_diagonal(self.Rspace[2::2,:],1/2) self.Rspace = 1/2*self.Rspace.T # if number of variables is the same on both levels, Rspace and Pspace are identity if self.init_f == self.init_c: self.Pspace = np.eye(self.init_f) # assemble 7th-order prolongation by hand else: self.Pspace = np.zeros((self.init_f,self.init_c)) np.fill_diagonal(self.Pspace[1::2,:],1) # np.fill_diagonal(self.Pspace[0::2,:],1/2) # np.fill_diagonal(self.Pspace[2::2,:],1/2) # this would be 3rd-order accurate # c1 = -0.0625 # c2 = 0.5625 # c3 = c2 # c4 = c1 # np.fill_diagonal(self.Pspace[0::2,:],c3) # np.fill_diagonal(self.Pspace[2::2,:],c2) # np.fill_diagonal(self.Pspace[0::2,1:],c4) # np.fill_diagonal(self.Pspace[4::2,:],c1) # self.Pspace[0,0:3] = [0.9375, -0.3125, 0.0625] # self.Pspace[-1,-3:self.init_c] = [0.0625, -0.3125, 0.9375] np.fill_diagonal(self.Pspace[0::2,:],0.5859375) np.fill_diagonal(self.Pspace[2::2,:],0.5859375) np.fill_diagonal(self.Pspace[0::2,1:],-0.09765625) np.fill_diagonal(self.Pspace[4::2,:],-0.09765625) np.fill_diagonal(self.Pspace[0::2,2:],0.01171875) np.fill_diagonal(self.Pspace[6::2,:],0.01171875) self.Pspace[0,0:5] = [1.23046875, -0.8203125, 0.4921875, -0.17578125, 0.02734375] self.Pspace[2,0:5] = [0.41015625, 0.8203125, -0.2734375, 0.08203125, -0.01171875] self.Pspace[-1,-5:self.init_c] = [0.02734375, -0.17578125, 0.4921875, -0.8203125, 1.23046875] self.Pspace[-3,-5:self.init_c] = [-0.01171875, 0.08203125, -0.2734375, 0.8203125, 0.41015625] pass def restrict_space(self,F): """ Restriction implementation Args: F: the fine level data (easier to access than via the fine attribute) """ if isinstance(F,mesh): u_coarse = mesh(self.init_c,val=0) u_coarse.values = np.dot(self.Rspace,F.values) elif isinstance(F,rhs_imex_mesh): u_coarse = rhs_imex_mesh(self.init_c) u_coarse.impl.values = np.dot(self.Rspace,F.impl.values) u_coarse.expl.values = np.dot(self.Rspace,F.expl.values) return u_coarse def prolong_space(self,G): """ Prolongation implementation Args: G: the coarse level data (easier to access than via the coarse attribute) """ if isinstance(G,mesh): u_fine = mesh(self.init_f,val=0) u_fine.values = np.dot(self.Pspace,G.values) elif isinstance(G,rhs_imex_mesh): u_fine = rhs_imex_mesh(self.init_f) u_fine.impl.values = np.dot(self.Pspace,G.impl.values) u_fine.expl.values = np.dot(self.Pspace,G.expl.values) return u_fine
bsd-2-clause
-4,636,805,818,447,047,000
38.584746
120
0.591435
false
3.105053
false
false
false
RecipeML/Recipe
recipe/classifiers/bernoulliNB.py
1
1530
# -*- coding: utf-8 -*- """ Copyright 2016 Walter José and Alex de Sá This file is part of the RECIPE Algorithm. The RECIPE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. RECIPE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See http://www.gnu.org/licenses/. """ from sklearn.naive_bayes import BernoulliNB def bernoulliNB(args): """Uses scikit-learn's BernoulliNB, a naive bayes classifier for multinomial models Parameters ---------- alpha : float Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing). binarize : float or None Threshold for binarizing (mapping to booleans) of sample features. If None, input is presumed to already consist of binary vectors. fit_prior : boolean Whether to learn class prior probabilities or not. If false, a uniform prior will be used. """ alp = 1.0 if(args[2].find("None")==-1): alp = float(args[2]) fit = False if(args[3].find("True")!=-1): fit = True bina = 0.0 if(args[1].find("None")==-1): bina=float(args[1]) return BernoulliNB(alpha=alp, binarize=bina, fit_prior=fit, class_prior=None)
gpl-3.0
-347,470,698,270,359,900
29.58
139
0.689791
false
3.681928
false
false
false
mefly2012/platform
src/parse_validate/qyxg_wscpws.py
1
1948
# -*- coding: utf-8 -*- import sys reload(sys) sys.setdefaultencoding('utf-8') import re from common import public class qyxg_wscpws(): """开庭公告""" need_check_ziduan = ['caseout_come', 'court_litigant', # 'court_acceptance_fee', # 'historycase' ] def check_caseout_come(self, indexstr, ustr): """案件结果""" """可为空,若非空至少包含两个中文汉字或两个英文字母""" ret = None if ustr and len(ustr): if not public.has_count_hz(ustr, 2) \ and not public.has_count_en(ustr, 2): ret = u'没有2个以上汉字页没有2个英文字母' return ret def check_court_litigant(self, indexstr, ustr): """法院当事人""" """可为空,若非空至少包含两个中文汉字或两个英文字母""" ret = None if ustr and len(ustr): if not public.has_count_hz(ustr, 2) \ and not public.has_count_en(ustr, 2): ret = u'没有2个以上汉字页没有2个英文字母' return ret def check_court_acceptance_fee(self, indexstr, ustr): """受理费""" """可为空,若非空为数字+单位的格式""" ret = None if ustr and len(ustr): if not re.compile(u'^\d{1,}(元|\$)$').match(ustr): ret = u'不符合格式数字+单位' return ret def check_historycase(self, indexstr, ustr): """历审案例""" """可为空,必须全为汉字,且包含“法院”两字""" ret = None if ustr and len(ustr): if public.is_allchinese(ustr): if u'法院' not in ustr: ret = u'不包含法院二字' else: ret = u'不全为汉字' return ret
apache-2.0
7,655,155,474,370,721,000
27.172414
61
0.482864
false
2.533333
false
false
false
paksas/anim_tools
anim_tools/transform_utils.py
1
2040
import mathutils import math # # General remarks regarding all functions presented here: # # Each motion is an array of transforms presented in form of tuples # (loc:Vector, rot:Quaternion) # # ============================================================================= # # Calculates a relative movement of 'childMotion' with respect to 'rootMotion'. # # The method works only if both motions have the exact same number of keyframes. # # # @return new motion if the operation was successful, or an empty motion otherwise # def calcRelativeMotion( rootMotion, childMotion ): resultingMotion = [] framesCount = len( rootMotion ) if framesCount != len( childMotion ): op.report( {'ERROR'}, "transform_utils.calcRelativeMotion: The method works only with motions with the same number of keyframes" ) return resultingMotion for frameIdx in range( framesCount ): rootLoc, rootRot = rootMotion[frameIdx] childLoc, childRot = childMotion[frameIdx] invRootRot = rootRot.conjugated().normalized() # translation translation = childLoc - rootLoc translation.rotate( invRootRot ) # rotation rotation = invRootRot * childRot resultingMotion.append( ( translation, rotation ) ) return resultingMotion # # Calculates the rotation around the Z axis ( the yaw ) of the specified transform # def calcYaw( transform ): worldFwdDir = mathutils.Vector( ( 1.0, 0.0, 0.0 ) ) rotatedVec = worldFwdDir.copy() rotatedVec.rotate( transform[1] ) rotatedVec.z = 0.0 rotatedVec.normalize() worldFwdDir2D = worldFwdDir.to_2d() rotatedVec2D = rotatedVec.to_2d() yawAngle = -worldFwdDir2D.angle_signed( rotatedVec2D, 0.0 ) return yawAngle # # Prints the motion definition # def printMotion( motion, header ): print( header ) frameIdx = 1 for keyframe in motion: loc, rot = keyframe[0:2] print( "Frame ", frameIdx, ". loc", loc, "; rot", rot ) frameIdx += 1
gpl-2.0
-1,699,437,732,264,732,400
25.467532
138
0.648675
false
3.816479
false
false
false
aliceinwire/virt-manager
virtinst/Storage.py
2
51198
# # Copyright 2008, 2013 Red Hat, Inc. # Cole Robinson <crobinso@redhat.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301 USA. """ Classes for building and installing libvirt storage xml General workflow for the different storage objects: 1. Storage Pool: Pool type options can be exposed to a user via the static function L{StoragePool.get_pool_types}. Any selection can be fed back into L{StoragePool.get_pool_class} to get the particular volume class to instantiate. From here, values can be set at init time or via properties post init. Different pool types have different options and requirements, so using getattr() is probably the best way to check for parameter availability. 2) Storage Volume: There are a few options for determining what pool volume class to use: - Pass the pools type for L{StoragePool.get_volume_for_pool} - Pass the pool object or name to L{StorageVolume.get_volume_for_pool} These will give back the appropriate class to instantiate. For most cases, all that's needed is a name and capacity, the rest will be filled in. @see: U{http://libvirt.org/storage.html} """ import os import threading import time import logging import libvirt import urlgrabber from virtinst.util import xml_escape as escape from virtinst import util from virtinst import support DEFAULT_DEV_TARGET = "/dev" DEFAULT_LVM_TARGET_BASE = "/dev/" DEFAULT_DIR_TARGET_BASE = "/var/lib/libvirt/images/" DEFAULT_SCSI_TARGET = "/dev/disk/by-path" DEFAULT_MPATH_TARGET = "/dev/mapper" # Pulled from libvirt, used for building on older versions VIR_STORAGE_VOL_FILE = 0 VIR_STORAGE_VOL_BLOCK = 1 def is_create_vol_from_supported(conn): return support.check_pool_support(conn, support.SUPPORT_STORAGE_CREATEVOLFROM) def _parse_pool_source_list(source_xml): def source_parser(node): ret_list = [] child = node.children while child: if child.name == "source": val_dict = {} source = child.children while source: if source.name == "name": val_dict["source_name"] = source.content elif source.name == "host": val_dict["host"] = source.prop("name") elif source.name == "format": val_dict["format"] = source.prop("type") elif source.name in ["device", "dir"]: val_dict["source_path"] = source.prop("path") source = source.next ret_list.append(val_dict) child = child.next for val_dict in ret_list: if (val_dict.get("format") == "lvm2" and val_dict.get("source_name") and not val_dict.get("target_path")): val_dict["target_path"] = (DEFAULT_LVM_TARGET_BASE + val_dict["source_name"]) return ret_list return util.parse_node_helper(source_xml, "sources", source_parser) class StorageObject(object): """ Base class for building any libvirt storage object. Mostly meaningless to directly instantiate. """ TYPE_POOL = "pool" TYPE_VOLUME = "volume" def __init__(self, object_type, name, conn=None): """ Initialize storage object parameters """ if object_type not in [self.TYPE_POOL, self.TYPE_VOLUME]: raise ValueError(_("Unknown storage object type: %s") % type) self._object_type = object_type self._conn = None self._name = None if conn is not None: self.conn = conn self.name = name # Initialize all optional properties self._perms = None ## Properties def get_object_type(self): # 'pool' or 'volume' return self._object_type object_type = property(get_object_type) def get_conn(self): return self._conn def set_conn(self, val): if not isinstance(val, libvirt.virConnect): raise ValueError(_("'conn' must be a libvirt connection object.")) if not util.is_storage_capable(val): raise ValueError(_("Passed connection is not libvirt storage " "capable")) self._conn = val conn = property(get_conn, set_conn, doc=""" Libvirt connection to check object against/install on """) def get_name(self): return self._name def set_name(self, val): util.validate_name(_("Storage object"), val) # Check that name doesn't collide with other storage objects self._check_name_collision(val) self._name = val name = property(get_name, set_name, doc=_("Name for the storage object.")) # Get/Set methods for use by some objects. Will register where applicable def get_perms(self): return self._perms def set_perms(self, val): if type(val) is not dict: raise ValueError(_("Permissions must be passed as a dict object")) for key in ["mode", "owner", "group"]: if not key in val: raise ValueError(_("Permissions must contain 'mode', 'owner' and 'group' keys.")) self._perms = val # Validation helper functions def _validate_path(self, path): if not isinstance(path, str) or not path.startswith("/"): raise ValueError(_("'%s' is not an absolute path." % path)) def _check_name_collision(self, name): ignore = name raise NotImplementedError() # XML Building def _get_storage_xml(self): """ Returns the pool/volume specific xml blob """ raise NotImplementedError() def _get_perms_xml(self): perms = self.get_perms() if not perms: return "" xml = " <permissions>\n" + \ " <mode>0%o</mode>\n" % perms["mode"] + \ " <owner>%d</owner>\n" % perms["owner"] + \ " <group>%d</group>\n" % perms["group"] if "label" in perms: xml += " <label>%s</label>\n" % perms["label"] xml += " </permissions>\n" return xml def get_xml_config(self): """ Construct the xml description of the storage object @returns: xml description @rtype: C{str} """ if not hasattr(self, "type"): root_xml = "<%s>\n" % self.object_type else: _type = getattr(self, "type") root_xml = "<%s type='%s'>\n" % (self.object_type, _type) xml = "%s" % (root_xml) + \ """ <name>%s</name>\n""" % (self.name) + \ """%(stor_xml)s""" % {"stor_xml" : self._get_storage_xml()} + \ """</%s>\n""" % (self.object_type) return xml class StoragePool(StorageObject): """ Base class for building and installing libvirt storage pool xml """ # @group Types: TYPE_* TYPE_DIR = "dir" TYPE_FS = "fs" TYPE_NETFS = "netfs" TYPE_LOGICAL = "logical" TYPE_DISK = "disk" TYPE_ISCSI = "iscsi" TYPE_SCSI = "scsi" TYPE_MPATH = "mpath" # Pool type descriptions for use in higher level programs _types = {} _types[TYPE_DIR] = _("Filesystem Directory") _types[TYPE_FS] = _("Pre-Formatted Block Device") _types[TYPE_NETFS] = _("Network Exported Directory") _types[TYPE_LOGICAL] = _("LVM Volume Group") _types[TYPE_DISK] = _("Physical Disk Device") _types[TYPE_ISCSI] = _("iSCSI Target") _types[TYPE_SCSI] = _("SCSI Host Adapter") _types[TYPE_MPATH] = _("Multipath Device Enumerator") def get_pool_class(ptype): """ Return class associated with passed pool type. @param ptype: Pool type @type ptype: member of I{Types} """ if ptype not in StoragePool._types: raise ValueError(_("Unknown storage pool type: %s" % ptype)) if ptype == StoragePool.TYPE_DIR: return DirectoryPool if ptype == StoragePool.TYPE_FS: return FilesystemPool if ptype == StoragePool.TYPE_NETFS: return NetworkFilesystemPool if ptype == StoragePool.TYPE_LOGICAL: return LogicalPool if ptype == StoragePool.TYPE_DISK: return DiskPool if ptype == StoragePool.TYPE_ISCSI: return iSCSIPool if ptype == StoragePool.TYPE_SCSI: return SCSIPool if ptype == StoragePool.TYPE_MPATH: return MultipathPool get_pool_class = staticmethod(get_pool_class) def get_volume_for_pool(pool_type): """Convenience method, returns volume class associated with pool_type""" pool_class = StoragePool.get_pool_class(pool_type) return pool_class.get_volume_class() get_volume_for_pool = staticmethod(get_volume_for_pool) def get_pool_types(): """Return list of appropriate pool types""" return StoragePool._types.keys() get_pool_types = staticmethod(get_pool_types) def get_pool_type_desc(pool_type): """Return human readable description for passed pool type""" if pool_type in StoragePool._types: return StoragePool._types[pool_type] else: return "%s pool" % pool_type get_pool_type_desc = staticmethod(get_pool_type_desc) def pool_list_from_sources(conn, name, pool_type, host=None): """ Return a list of StoragePool instances built from libvirt's pool source enumeration (if supported). @param conn: Libvirt connection @param name: Name for the new pool @param pool_type: Pool type string from I{Types} @param host: Option host string to poll for sources """ if not support.check_conn_support(conn, support.SUPPORT_CONN_FINDPOOLSOURCES): return [] pool_class = StoragePool.get_pool_class(pool_type) pool_inst = pool_class(conn=conn, name=name) if host: source_xml = "<source><host name='%s'/></source>" % host else: source_xml = "<source/>" try: xml = conn.findStoragePoolSources(pool_type, source_xml, 0) except libvirt.libvirtError, e: if support.is_error_nosupport(e): return [] raise retlist = [] source_list = _parse_pool_source_list(xml) for source in source_list: pool_inst = pool_class(conn=conn, name=name) for key, val in source.items(): if not hasattr(pool_inst, key): continue setattr(pool_inst, key, val) retlist.append(pool_inst) return retlist pool_list_from_sources = staticmethod(pool_list_from_sources) def __init__(self, conn, name, type, target_path=None, uuid=None): # pylint: disable=W0622 # Redefining built-in 'type', but it matches the XML so keep it StorageObject.__init__(self, object_type=StorageObject.TYPE_POOL, name=name, conn=conn) if type not in self.get_pool_types(): raise ValueError(_("Unknown storage pool type: %s" % type)) self._type = type self._target_path = None self._host = None self._format = None self._source_path = None self._uuid = None if target_path is None: target_path = self._get_default_target_path() self.target_path = target_path if uuid: self.uuid = uuid # Initialize all optional properties self._host = None self._source_path = None self._random_uuid = util.generate_uuid(self.conn) # Properties used by all pools def get_type(self): return self._type type = property(get_type, doc=_("Storage device type the pool will represent.")) def get_target_path(self): return self._target_path def set_target_path(self, val): self._validate_path(val) self._target_path = os.path.abspath(val) # Get/Set methods for use by some pools. Will be registered when applicable def get_source_path(self): return self._source_path def set_source_path(self, val): self._validate_path(val) self._source_path = os.path.abspath(val) def get_host(self): return self._host def set_host(self, val): if not isinstance(val, str): raise ValueError(_("Host name must be a string")) self._host = val # uuid: uuid of the storage object. optional: generated if not set def get_uuid(self): return self._uuid def set_uuid(self, val): val = util.validate_uuid(val) self._uuid = val uuid = property(get_uuid, set_uuid) # Validation functions def _check_name_collision(self, name): pool = None try: pool = self.conn.storagePoolLookupByName(name) except libvirt.libvirtError: pass if pool: raise ValueError(_("Name '%s' already in use by another pool." % name)) def _get_default_target_path(self): raise NotImplementedError() # XML Building def _get_target_xml(self): raise NotImplementedError() def _get_source_xml(self): raise NotImplementedError() def _get_storage_xml(self): src_xml = "" if self._get_source_xml() != "": src_xml = " <source>\n" + \ "%s" % (self._get_source_xml()) + \ " </source>\n" tar_xml = " <target>\n" + \ "%s" % (self._get_target_xml()) + \ " </target>\n" return " <uuid>%s</uuid>\n" % (self.uuid or self._random_uuid) + \ "%s" % src_xml + \ "%s" % tar_xml def install(self, meter=None, create=False, build=False, autostart=False): """ Install storage pool xml. """ xml = self.get_xml_config() logging.debug("Creating storage pool '%s' with xml:\n%s", self.name, xml) if not meter: meter = urlgrabber.progress.BaseMeter() try: pool = self.conn.storagePoolDefineXML(xml, 0) except Exception, e: raise RuntimeError(_("Could not define storage pool: %s" % str(e))) errmsg = None if build: try: pool.build(libvirt.VIR_STORAGE_POOL_BUILD_NEW) except Exception, e: errmsg = _("Could not build storage pool: %s" % str(e)) if create and not errmsg: try: pool.create(0) except Exception, e: errmsg = _("Could not start storage pool: %s" % str(e)) if autostart and not errmsg: try: pool.setAutostart(True) except Exception, e: errmsg = _("Could not set pool autostart flag: %s" % str(e)) if errmsg: # Try and clean up the leftover pool try: pool.undefine() except Exception, e: logging.debug("Error cleaning up pool after failure: " + "%s" % str(e)) raise RuntimeError(errmsg) return pool class DirectoryPool(StoragePool): """ Create a directory based storage pool """ def get_volume_class(): return FileVolume get_volume_class = staticmethod(get_volume_class) # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Directory to use for the storage pool.")) def __init__(self, conn, name, target_path=None, uuid=None, perms=None): StoragePool.__init__(self, name=name, type=StoragePool.TYPE_DIR, target_path=target_path, uuid=uuid, conn=conn) if perms: self.perms = perms def _get_default_target_path(self): path = (DEFAULT_DIR_TARGET_BASE + self.name) return path def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) + \ "%s" % self._get_perms_xml() return xml def _get_source_xml(self): return "" class FilesystemPool(StoragePool): """ Create a formatted partition based storage pool """ def get_volume_class(): return FileVolume get_volume_class = staticmethod(get_volume_class) formats = ["auto", "ext2", "ext3", "ext4", "ufs", "iso9660", "udf", "gfs", "gfs2", "vfat", "hfs+", "xfs"] # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) source_path = property(StoragePool.get_source_path, StoragePool.set_source_path, doc=_("The existing device to mount for the pool.")) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Location to mount the source device.")) def __init__(self, conn, name, source_path=None, target_path=None, format="auto", uuid=None, perms=None): # pylint: disable=W0622 # Redefining built-in 'format', but it matches the XML so keep it StoragePool.__init__(self, name=name, type=StoragePool.TYPE_FS, target_path=target_path, uuid=uuid, conn=conn) self.format = format if source_path: self.source_path = source_path if perms: self.perms = perms def get_format(self): return self._format def set_format(self, val): if not val in self.formats: raise ValueError(_("Unknown Filesystem format: %s" % val)) self._format = val format = property(get_format, set_format, doc=_("Filesystem type of the source device.")) def _get_default_target_path(self): path = (DEFAULT_DIR_TARGET_BASE + self.name) return path def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) + \ "%s" % self._get_perms_xml() return xml def _get_source_xml(self): if not self.source_path: raise RuntimeError(_("Device path is required")) xml = " <format type='%s'/>\n" % self.format + \ " <device path='%s'/>\n" % escape(self.source_path) return xml class NetworkFilesystemPool(StoragePool): """ Create a network mounted filesystem storage pool """ def get_volume_class(): return FileVolume get_volume_class = staticmethod(get_volume_class) formats = ["auto", "nfs", "glusterfs"] # Register applicable property methods from parent class source_path = property(StoragePool.get_source_path, StoragePool.set_source_path, doc=_("Path on the host that is being shared.")) host = property(StoragePool.get_host, StoragePool.set_host, doc=_("Name of the host sharing the storage.")) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Location to mount the source device.")) def __init__(self, conn, name, source_path=None, host=None, target_path=None, format="auto", uuid=None): # pylint: disable=W0622 # Redefining built-in 'format', but it matches the XML so keep it StoragePool.__init__(self, name=name, type=StoragePool.TYPE_NETFS, uuid=uuid, target_path=target_path, conn=conn) self.format = format if source_path: self.source_path = source_path if host: self.host = host def get_format(self): return self._format def set_format(self, val): if not val in self.formats: raise ValueError(_("Unknown Network Filesystem format: %s" % val)) self._format = val format = property(get_format, set_format, doc=_("Type of network filesystem.")) def _get_default_target_path(self): path = (DEFAULT_DIR_TARGET_BASE + self.name) return path def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) return xml def _get_source_xml(self): if not self.host: raise RuntimeError(_("Hostname is required")) if not self.source_path: raise RuntimeError(_("Host path is required")) xml = """ <format type="%s"/>\n""" % self.format + \ """ <host name="%s"/>\n""" % self.host + \ """ <dir path="%s"/>\n""" % escape(self.source_path) return xml class LogicalPool(StoragePool): """ Create a logical (lvm volume group) storage pool """ def get_volume_class(): return LogicalVolume get_volume_class = staticmethod(get_volume_class) # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Location of the existing LVM volume group.")) def __init__(self, conn, name, target_path=None, uuid=None, perms=None, source_path=None, source_name=None): StoragePool.__init__(self, name=name, type=StoragePool.TYPE_LOGICAL, target_path=target_path, uuid=uuid, conn=conn) self._source_name = None if perms: self.perms = perms if source_path: self.source_path = source_path if source_name: self.source_name = source_name # Need to overwrite storage path checks, since this optionally be a list # of devices def get_source_path(self): return self._source_path def set_source_path(self, val): if not val: self._source_path = None return if type(val) != list: StoragePool.set_source_path(self, val) else: self._source_path = val source_path = property(get_source_path, set_source_path, doc=_("Optional device(s) to build new LVM volume " "on.")) def get_source_name(self): if self._source_name: return self._source_name # If a source name isn't explictly set, try to determine it from # existing parameters srcname = self.name if (self.target_path and self.target_path.startswith(DEFAULT_LVM_TARGET_BASE)): # If there is a target path, parse it for an expected VG # location, and pull the name from there vg = self.target_path[len(DEFAULT_LVM_TARGET_BASE):] srcname = vg.split("/", 1)[0] return srcname def set_source_name(self, val): self._source_name = val source_name = property(get_source_name, set_source_name, doc=_("Name of the Volume Group")) def _make_source_name(self): srcname = self.name if self.source_path: # Building a pool, so just use pool name return srcname def _get_default_target_path(self): return DEFAULT_LVM_TARGET_BASE + self.name def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) + \ "%s" % self._get_perms_xml() return xml def _get_source_xml(self): sources = self.source_path if type(sources) != list: sources = sources and [sources] or [] xml = "" for s in sources: xml += " <device path='%s'/>\n" % s if self.source_name: xml += " <name>%s</name>\n" % self.source_name return xml def install(self, meter=None, create=False, build=False, autostart=False): if build and not self.source_path: raise ValueError(_("Must explicitly specify source path if " "building pool")) return StoragePool.install(self, meter=meter, create=create, build=build, autostart=autostart) class DiskPool(StoragePool): """ Create a storage pool from a physical disk """ def get_volume_class(): return DiskVolume get_volume_class = staticmethod(get_volume_class) # Register applicable property methods from parent class source_path = property(StoragePool.get_source_path, StoragePool.set_source_path, doc=_("Path to the existing disk device.")) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Root location for identifying new storage" " volumes.")) formats = ["auto", "bsd", "dos", "dvh", "gpt", "mac", "pc98", "sun"] def __init__(self, conn, name, source_path=None, target_path=None, format="auto", uuid=None): # pylint: disable=W0622 # Redefining built-in 'format', but it matches the XML so keep it StoragePool.__init__(self, name=name, type=StoragePool.TYPE_DISK, uuid=uuid, target_path=target_path, conn=conn) self.format = format if source_path: self.source_path = source_path def get_format(self): return self._format def set_format(self, val): if not val in self.formats: raise ValueError(_("Unknown Disk format: %s" % val)) self._format = val format = property(get_format, set_format, doc=_("Format of the source device's partition table.")) def _get_default_target_path(self): return DEFAULT_DEV_TARGET def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) return xml def _get_source_xml(self): if not self.source_path: raise RuntimeError(_("Host path is required")) xml = "" # There is no explicit "auto" type for disk pools, but leaving out # the format type seems to do the job for existing formatted disks if self.format != "auto": xml = """ <format type="%s"/>\n""" % self.format xml += """ <device path="%s"/>\n""" % escape(self.source_path) return xml def install(self, meter=None, create=False, build=False, autostart=False): if self.format == "auto" and build: raise ValueError(_("Must explicitly specify disk format if " "formatting disk device.")) return StoragePool.install(self, meter=meter, create=create, build=build, autostart=autostart) class iSCSIPool(StoragePool): """ Create an iSCSI based storage pool """ host = property(StoragePool.get_host, StoragePool.set_host, doc=_("Name of the host sharing the storage.")) target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Root location for identifying new storage" " volumes.")) def get_volume_class(): raise NotImplementedError(_("iSCSI volume creation is not supported.")) get_volume_class = staticmethod(get_volume_class) def __init__(self, conn, name, source_path=None, host=None, target_path=None, uuid=None): StoragePool.__init__(self, name=name, type=StoragePool.TYPE_ISCSI, uuid=uuid, target_path=target_path, conn=conn) if source_path: self.source_path = source_path if host: self.host = host self._iqn = None # Need to overwrite pool *_source_path since iscsi device isn't # a fully qualified path def get_source_path(self): return self._source_path def set_source_path(self, val): self._source_path = val source_path = property(get_source_path, set_source_path, doc=_("Path on the host that is being shared.")) def _get_iqn(self): return self._iqn def _set_iqn(self, val): self._iqn = val iqn = property(_get_iqn, _set_iqn, doc=_("iSCSI initiator qualified name")) def _get_default_target_path(self): return DEFAULT_SCSI_TARGET def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) return xml def _get_source_xml(self): if not self.host: raise RuntimeError(_("Hostname is required")) if not self.source_path: raise RuntimeError(_("Host path is required")) iqn_xml = "" if self.iqn: iqn_xml += """ <initiator>\n""" iqn_xml += """ <iqn name="%s"/>\n""" % escape(self.iqn) iqn_xml += """ </initiator>\n""" xml = """ <host name="%s"/>\n""" % self.host xml += """ <device path="%s"/>\n""" % escape(self.source_path) xml += iqn_xml return xml class SCSIPool(StoragePool): """ Create a SCSI based storage pool """ target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Root location for identifying new storage" " volumes.")) def get_volume_class(): raise NotImplementedError(_("SCSI volume creation is not supported.")) get_volume_class = staticmethod(get_volume_class) def __init__(self, conn, name, source_path=None, target_path=None, uuid=None): StoragePool.__init__(self, name=name, type=StoragePool.TYPE_SCSI, uuid=uuid, target_path=target_path, conn=conn) if source_path: self.source_path = source_path # Need to overwrite pool *_source_path since iscsi device isn't # a fully qualified path def get_source_path(self): return self._source_path def set_source_path(self, val): self._source_path = val source_path = property(get_source_path, set_source_path, doc=_("Name of the scsi adapter (ex. host2)")) def _get_default_target_path(self): return DEFAULT_SCSI_TARGET def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) return xml def _get_source_xml(self): if not self.source_path: raise RuntimeError(_("Adapter name is required")) xml = """ <adapter name="%s"/>\n""" % escape(self.source_path) return xml class MultipathPool(StoragePool): """ Create a Multipath based storage pool """ target_path = property(StoragePool.get_target_path, StoragePool.set_target_path, doc=_("Root location for identifying new storage" " volumes.")) def get_volume_class(): raise NotImplementedError(_("Multipath volume creation is not " "supported.")) get_volume_class = staticmethod(get_volume_class) def __init__(self, conn, name, target_path=None, uuid=None): StoragePool.__init__(self, name=name, type=StoragePool.TYPE_MPATH, uuid=uuid, target_path=target_path, conn=conn) def _get_default_target_path(self): return DEFAULT_MPATH_TARGET def _get_target_xml(self): xml = " <path>%s</path>\n" % escape(self.target_path) return xml def _get_source_xml(self): return "" ########################## # Storage Volume classes # ########################## class StorageVolume(StorageObject): """ Base class for building and installing libvirt storage volume xml """ formats = [] # File vs. Block for the Volume class _file_type = None def __init__(self, name, capacity, conn=None, pool_name=None, pool=None, allocation=0): """ @param name: Name for the new storage volume @param capacity: Total size of the new volume (in bytes) @param conn: optional virConnect instance to lookup pool_name on @param pool_name: optional pool_name to install on @param pool: virStoragePool object to install on @param allocation: amount of storage to actually allocate (default 0) """ if pool is None: if pool_name is None: raise ValueError(_("One of pool or pool_name must be " "specified.")) if conn is None: raise ValueError(_("'conn' must be specified with 'pool_name'")) pool = StorageVolume.lookup_pool_by_name(pool_name=pool_name, conn=conn) self._pool = None self.pool = pool poolconn = self.pool._conn # pylint: disable=W0212 StorageObject.__init__(self, object_type=StorageObject.TYPE_VOLUME, name=name, conn=poolconn) self._allocation = None self._capacity = None self._format = None self._input_vol = None self.allocation = allocation self.capacity = capacity # Indicate that the volume installation has finished. Used to # definitively tell the storage progress thread to stop polling. self._install_finished = True def get_volume_for_pool(pool_object=None, pool_name=None, conn=None): """ Returns volume class associated with passed pool_object/name """ pool_object = StorageVolume.lookup_pool_by_name(pool_object=pool_object, pool_name=pool_name, conn=conn) return StoragePool.get_volume_for_pool(util.get_xml_path(pool_object.XMLDesc(0), "/pool/@type")) get_volume_for_pool = staticmethod(get_volume_for_pool) def find_free_name(name, pool_object=None, pool_name=None, conn=None, suffix="", collidelist=None, start_num=0): """ Finds a name similar (or equal) to passed 'name' that is not in use by another pool This function scans the list of existing Volumes on the passed or looked up pool object for a collision with the passed name. If the name is in use, it append "-1" to the name and tries again, then "-2", continuing to 100000 (which will hopefully never be reached.") If suffix is specified, attach it to the (potentially incremented) name before checking for collision. Ex name="test", suffix=".img" -> name-3.img @param collidelist: An extra list of names to check for collision @type collidelist: C{list} @returns: A free name @rtype: C{str} """ collidelist = collidelist or [] pool_object = StorageVolume.lookup_pool_by_name( pool_object=pool_object, pool_name=pool_name, conn=conn) pool_object.refresh(0) return util.generate_name(name, pool_object.storageVolLookupByName, suffix, collidelist=collidelist, start_num=start_num) find_free_name = staticmethod(find_free_name) def lookup_pool_by_name(pool_object=None, pool_name=None, conn=None): """ Returns pool object determined from passed parameters. Largely a convenience function for the other static functions. """ if pool_object is None and pool_name is None: raise ValueError(_("Must specify pool_object or pool_name")) if pool_name is not None and pool_object is None: if conn is None: raise ValueError(_("'conn' must be specified with 'pool_name'")) if not util.is_storage_capable(conn): raise ValueError(_("Connection does not support storage " "management.")) try: pool_object = conn.storagePoolLookupByName(pool_name) except Exception, e: raise ValueError(_("Couldn't find storage pool '%s': %s" % (pool_name, str(e)))) if not isinstance(pool_object, libvirt.virStoragePool): raise ValueError(_("pool_object must be a virStoragePool")) return pool_object lookup_pool_by_name = staticmethod(lookup_pool_by_name) # Properties used by all volumes def get_file_type(self): return self._file_type file_type = property(get_file_type) def get_capacity(self): return self._capacity def set_capacity(self, val): if type(val) not in (int, float, long) or val <= 0: raise ValueError(_("Capacity must be a positive number")) newcap = int(val) origcap = self.capacity origall = self.allocation self._capacity = newcap if self.allocation is not None and (newcap < self.allocation): self._allocation = newcap ret = self.is_size_conflict() if ret[0]: self._capacity = origcap self._allocation = origall raise ValueError(ret[1]) elif ret[1]: logging.warn(ret[1]) capacity = property(get_capacity, set_capacity) def get_allocation(self): return self._allocation def set_allocation(self, val): if type(val) not in (int, float, long) or val < 0: raise ValueError(_("Allocation must be a non-negative number")) newall = int(val) if self.capacity is not None and newall > self.capacity: logging.debug("Capping allocation at capacity.") newall = self.capacity origall = self._allocation self._allocation = newall ret = self.is_size_conflict() if ret[0]: self._allocation = origall raise ValueError(ret[1]) elif ret[1]: logging.warn(ret[1]) allocation = property(get_allocation, set_allocation) def get_pool(self): return self._pool def set_pool(self, newpool): if not isinstance(newpool, libvirt.virStoragePool): raise ValueError(_("'pool' must be a virStoragePool instance.")) if newpool.info()[0] != libvirt.VIR_STORAGE_POOL_RUNNING: raise ValueError(_("pool '%s' must be active." % newpool.name())) self._pool = newpool pool = property(get_pool, set_pool) def get_input_vol(self): return self._input_vol def set_input_vol(self, vol): if vol is None: self._input_vol = None return if not isinstance(vol, libvirt.virStorageVol): raise ValueError(_("input_vol must be a virStorageVol")) poolconn = self.pool._conn # pylint: disable=W0212 if not is_create_vol_from_supported(poolconn): raise ValueError(_("Creating storage from an existing volume is" " not supported by this libvirt version.")) self._input_vol = vol input_vol = property(get_input_vol, set_input_vol, doc=_("virStorageVolume pointer to clone/use as " "input.")) # Property functions used by more than one child class def get_format(self): return self._format def set_format(self, val): if val not in self.formats: raise ValueError(_("'%s' is not a valid format.") % val) self._format = val def _check_name_collision(self, name): vol = None try: vol = self.pool.storageVolLookupByName(name) except libvirt.libvirtError: pass if vol: raise ValueError(_("Name '%s' already in use by another volume." % name)) def _check_target_collision(self, path): col = None try: col = self.conn.storageVolLookupByPath(path) except libvirt.libvirtError: pass if col: return True return False # xml building functions def _get_target_xml(self): raise NotImplementedError() def _get_source_xml(self): raise NotImplementedError() def _get_storage_xml(self): src_xml = "" if self._get_source_xml() != "": src_xml = " <source>\n" + \ "%s" % (self._get_source_xml()) + \ " </source>\n" tar_xml = " <target>\n" + \ "%s" % (self._get_target_xml()) + \ " </target>\n" return " <capacity>%d</capacity>\n" % self.capacity + \ " <allocation>%d</allocation>\n" % self.allocation + \ "%s" % src_xml + \ "%s" % tar_xml def install(self, meter=None): """ Build and install storage volume from xml """ xml = self.get_xml_config() logging.debug("Creating storage volume '%s' with xml:\n%s", self.name, xml) t = threading.Thread(target=self._progress_thread, name="Checking storage allocation", args=(meter,)) t.setDaemon(True) if not meter: meter = urlgrabber.progress.BaseMeter() try: self._install_finished = False t.start() meter.start(size=self.capacity, text=_("Allocating '%s'") % self.name) if self.input_vol: vol = self.pool.createXMLFrom(xml, self.input_vol, 0) else: vol = self.pool.createXML(xml, 0) meter.end(self.capacity) logging.debug("Storage volume '%s' install complete.", self.name) return vol except libvirt.libvirtError, e: if support.is_error_nosupport(e): raise RuntimeError("Libvirt version does not support " "storage cloning.") raise except Exception, e: raise RuntimeError("Couldn't create storage volume " "'%s': '%s'" % (self.name, str(e))) finally: self._install_finished = True def _progress_thread(self, meter): lookup_attempts = 10 vol = None if not meter: return while lookup_attempts > 0: try: vol = self.pool.storageVolLookupByName(self.name) break except: lookup_attempts -= 1 time.sleep(.2) if self._install_finished: break else: continue break if vol is None: logging.debug("Couldn't lookup storage volume in prog thread.") return while not self._install_finished: ignore, ignore, alloc = vol.info() meter.update(alloc) time.sleep(1) def is_size_conflict(self): """ Report if requested size exceeds its pool's available amount @returns: 2 element tuple: 1. True if collision is fatal, false otherwise 2. String message if some collision was encountered. @rtype: 2 element C{tuple}: (C{bool}, C{str}) """ # pool info is [pool state, capacity, allocation, available] avail = self.pool.info()[3] if self.allocation > avail: return (True, _("There is not enough free space on the storage " "pool to create the volume. " "(%d M requested allocation > %d M available)" % ((self.allocation / (1024 * 1024)), (avail / (1024 * 1024))))) elif self.capacity > avail: return (False, _("The requested volume capacity will exceed the " "available pool space when the volume is fully " "allocated. " "(%d M requested capacity > %d M available)" % ((self.capacity / (1024 * 1024)), (avail / (1024 * 1024))))) return (False, "") class FileVolume(StorageVolume): """ Build and install xml for use on pools which use file based storage """ _file_type = VIR_STORAGE_VOL_FILE formats = ["raw", "bochs", "cloop", "cow", "dmg", "iso", "qcow", "qcow2", "qed", "vmdk", "vpc"] create_formats = ["raw", "cow", "qcow", "qcow2", "qed", "vmdk", "vpc"] # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) format = property(StorageVolume.get_format, StorageVolume.set_format) def __init__(self, name, capacity, pool=None, pool_name=None, conn=None, format="raw", allocation=None, perms=None): # pylint: disable=W0622 # Redefining built-in 'format', but it matches the XML so keep it StorageVolume.__init__(self, name=name, pool=pool, pool_name=pool_name, allocation=allocation, capacity=capacity, conn=conn) self.format = format if perms: self.perms = perms def _get_target_xml(self): return " <format type='%s'/>\n" % self.format + \ "%s" % self._get_perms_xml() def _get_source_xml(self): return "" class DiskVolume(StorageVolume): """ Build and install xml volumes for use on physical disk pools """ _file_type = VIR_STORAGE_VOL_BLOCK # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) def __init__(self, name, capacity, pool=None, pool_name=None, conn=None, allocation=None, perms=None): StorageVolume.__init__(self, name=name, pool=pool, pool_name=pool_name, allocation=allocation, capacity=capacity, conn=conn) if perms: self.perms = perms def _get_target_xml(self): return "%s" % self._get_perms_xml() def _get_source_xml(self): return "" class LogicalVolume(StorageVolume): """ Build and install logical volumes for lvm pools """ _file_type = VIR_STORAGE_VOL_BLOCK # Register applicable property methods from parent class perms = property(StorageObject.get_perms, StorageObject.set_perms) def __init__(self, name, capacity, pool=None, pool_name=None, conn=None, allocation=None, perms=None): if allocation and allocation != capacity: logging.warn(_("Sparse logical volumes are not supported, " "setting allocation equal to capacity")) StorageVolume.__init__(self, name=name, pool=pool, pool_name=pool_name, allocation=capacity, capacity=capacity, conn=conn) if perms: self.perms = perms def set_capacity(self, capacity): super(LogicalVolume, self).set_capacity(capacity) self.allocation = capacity capacity = property(StorageVolume.get_capacity, set_capacity) def set_allocation(self, allocation): if allocation != self.capacity: logging.warn(_("Sparse logical volumes are not supported, " "setting allocation equal to capacity")) super(LogicalVolume, self).set_allocation(self.capacity) capacity = property(StorageVolume.get_allocation, set_allocation) def _get_target_xml(self): return "%s" % self._get_perms_xml() def _get_source_xml(self): return "" class CloneVolume(StorageVolume): """ Build and install a volume that is a clone of an existing volume """ format = property(StorageVolume.get_format, StorageVolume.set_format) def __init__(self, name, input_vol): if not isinstance(input_vol, libvirt.virStorageVol): raise ValueError(_("input_vol must be a virStorageVol")) pool = input_vol.storagePoolLookupByVolume() # Populate some basic info xml = input_vol.XMLDesc(0) typ = input_vol.info()[0] cap = int(util.get_xml_path(xml, "/volume/capacity")) alc = int(util.get_xml_path(xml, "/volume/allocation")) fmt = util.get_xml_path(xml, "/volume/target/format/@type") StorageVolume.__init__(self, name=name, pool=pool, pool_name=pool.name(), allocation=alc, capacity=cap) self.input_vol = input_vol self._file_type = typ self._format = fmt def _get_target_xml(self): return "" def _get_source_xml(self): return "" def get_xml_config(self): xml = self.input_vol.XMLDesc(0) newxml = util.set_xml_path(xml, "/volume/name", self.name) return newxml # class iSCSIVolume(StorageVolume): # """ # Build and install xml for use on iSCSI device pools # """ # _file_type = VIR_STORAGE_VOL_BLOCK # # def __init__(self, *args, **kwargs): # raise NotImplementedError
gpl-2.0
7,016,576,030,948,345,000
34.163462
104
0.562053
false
4.203793
false
false
false
openstack/mistral-dashboard
mistraldashboard/action_executions/views.py
1
5486
# Copyright 2016 - Nokia. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.urls import reverse from django.urls import reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views import generic from horizon import forms from horizon import tables from mistraldashboard.action_executions import forms as action_execution_forms from mistraldashboard.action_executions import tables as mistral_tables from mistraldashboard import api from mistraldashboard.default import utils from mistraldashboard import forms as mistral_forms def get_single_action_execution_data(request, **kwargs): action_execution_id = kwargs['action_execution_id'] action_execution = api.action_execution_get( request, action_execution_id ) return action_execution class OverviewView(generic.TemplateView): template_name = 'mistral/action_executions/detail.html' page_title = _("Action Execution Details") workflow_url = 'horizon:mistral:workflows:detail' task_execution_url = 'horizon:mistral:tasks:detail' def get_context_data(self, **kwargs): context = super(OverviewView, self).get_context_data(**kwargs) action_execution = get_single_action_execution_data( self.request, **kwargs ) if action_execution.workflow_name: action_execution.workflow_url = reverse( self.workflow_url, args=[action_execution.workflow_name]) if action_execution.task_execution_id: action_execution.task_execution_url = reverse( self.task_execution_url, args=[action_execution.task_execution_id] ) if action_execution.input: action_execution.input = utils.prettyprint(action_execution.input) if action_execution.output: action_execution.output = utils.prettyprint( action_execution.output ) if action_execution.state: action_execution.state = utils.label(action_execution.state) action_execution.accepted = utils.booleanfield( action_execution.accepted ) breadcrumb = [(action_execution.id, reverse( 'horizon:mistral:action_executions:detail', args=[action_execution.id] ))] context["custom_breadcrumb"] = breadcrumb context['action_execution'] = action_execution return context class CodeView(forms.ModalFormView): template_name = 'mistral/default/code.html' modal_header = _("Code view") form_id = "code_view" form_class = mistral_forms.EmptyForm cancel_label = "OK" cancel_url = reverse_lazy("horizon:mistral:action_executions:index") page_title = _("Code view") def get_context_data(self, **kwargs): context = super(CodeView, self).get_context_data(**kwargs) column = self.kwargs['column'] action_execution = get_single_action_execution_data( self.request, **self.kwargs ) io = {} if column == 'input': io['name'] = _('Input') io['value'] = utils.prettyprint(action_execution.input) elif column == 'output': io['name'] = _('Output') io['value'] = ( utils.prettyprint(action_execution.output) if action_execution.output else _("No available output yet") ) context['io'] = io return context class IndexView(tables.DataTableView): table_class = mistral_tables.ActionExecutionsTable template_name = 'mistral/action_executions/index.html' def get_data(self): return api.action_executions_list(self.request) class UpdateView(forms.ModalFormView): template_name = 'mistral/action_executions/update.html' modal_header = _("Update Action Execution") form_id = "update_action_execution" form_class = action_execution_forms.UpdateForm submit_label = _("Update") success_url = reverse_lazy("horizon:mistral:action_executions:index") submit_url = "horizon:mistral:action_executions:update" cancel_url = "horizon:mistral:action_executions:index" page_title = _("Update Action Execution") def get_initial(self): return {"action_execution_id": self.kwargs["action_execution_id"]} def get_context_data(self, **kwargs): context = super(UpdateView, self).get_context_data(**kwargs) context['submit_url'] = reverse( self.submit_url, args=[self.kwargs["action_execution_id"]] ) return context class FilteredByTaskView(tables.DataTableView): table_class = mistral_tables.ActionExecutionsTable template_name = 'mistral/action_executions/filtered.html' data = {} def get_data(self, **kwargs): task_id = self.kwargs['task_id'] data = api.action_executions_list(self.request, task_id) return data
apache-2.0
1,265,832,847,031,783,200
33.2875
78
0.660955
false
4.016105
false
false
false
super7ramp/vertaal
versioncontrol/lib/types/subversion.py
2
8277
# -*- coding: utf-8 -*- """Copyright (c) 2009-2012 Sergio Gabriel Teves All rights reserved. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import os import os.path import string from django.utils.translation import ugettext as _ from django.utils.encoding import smart_text import versioncontrol.lib.browser as browser from versioncontrol.lib.support import svnclient import logging logger = logging.getLogger('vertaal.vcs') def need_repo(fn): def repo_fn(self, *args, **kw): try: info = self.client.info() if self.client.parseurl(info['url']) <> self.client.parseurl(self.url): self._switch_url() except svnclient.ClientError: self.init_repo() return fn(self, *args, **kw) return repo_fn def need_update(fn): def repo_fn(self, *args, **kw): self.update() return fn(self, *args, **kw) return repo_fn def encode_text(text, encoding='utf-8'): xv = filter(lambda x: x in string.printable, text) return smart_text(xv, encoding=encoding) # try: # return smart_text(text, encoding=encoding) # except UnicodeDecodeError: # xv = filter(lambda x: x in string.printable, text) # return smart_text(xv, encoding=encoding) class SubversionBrowser(browser.RepositoryBrowser): update_completed = 'update' relocate_error_msg = 'is already a working copy for a different URL' def __init__(self, location, url, folder, branch='trunk', auth=None): super(SubversionBrowser, self).__init__(location, url, folder, branch, auth) self.client = svnclient.Client(location=self._normalizePath(location)) self.client.set_trust_server_cert(True) if auth: self.set_login(auth) self.relocated = False def set_login(self, auth): self.client.set_username(auth.get_user()) self.client.set_password(auth.get_password()) def _notify(self, arg_dict): msg = None try: logger.debug(arg_dict) if arg_dict['action'] == self.update_completed: msg = _('At revision %s.') % arg_dict['revision'] elif arg_dict.has_key('path'): if arg_dict['action'] == svnclient.notify_action.added: self._send_callback(self.callback_on_file_add,arg_dict['path']) elif arg_dict['action'] == svnclient.notify_action.deleted: self._send_callback(self.callback_on_file_delete,arg_dict['path']) elif arg_dict['action'] == svnclient.notify_action.updated: self._send_callback(self.callback_on_file_update,arg_dict['path']) elif arg_dict['action'] == svnclient.notify_action.replaced: self._send_callback(self.callback_on_file_update,arg_dict['path']) msg = '%s %s' % (arg_dict['action'], os.path.basename(arg_dict['path'])) except KeyError: logger.error(arg_dict) if msg: self._send_callback(self.callback_on_action_notify, msg) def _parse_event_list(self, eventList): for event in eventList: self._notify(event) @property def _remote_path(self): if self.branch == u'trunk': repo_path = self.branch else: repo_path = "branches/%s" % self.branch return "%s/%s" % (repo_path, self.folder) def _switch_url(self): current_url = self.client.parseurl(self.client.get_remote_url()) new_url = self.client.parseurl(self.url) self._send_callback(self.callback_on_action_notify, _('URL has changed. Relocate from %(prior)s to %(actual)s') % {'prior': current_url, 'actual': new_url}) self.client.relocate(current_url, new_url) def _normalizePath(self, path): return os.path.normpath(os.path.normcase(path)); def init_repo(self): logger.debug("init") self._send_callback(self.callback_on_action_notify,_('Initializing repository %s') % self._remote_path) try: logger.debug("check path %s" % self.location) info = self.client.info() if self.client.parseurl(info['url']) <> self.client.parseurl(self.url): self._switch_url() except svnclient.ClientError, e: logger.debug(e) logger.debug("Checkout %s on %s" % (self.url + self._remote_path, self.location)) rev = 0 try: rev, eventList = self.client.checkout(url = self.url + self._remote_path) self._parse_event_list(eventList) except svnclient.ClientError, e: # TODO handle relocate error more effectivly if e.message.find(self.relocate_error_msg): logger.debug("Must relocate") self._switch_url() rev = self.update() else: raise logger.debug("end") self._notify({'action': self.update_completed, 'revision': rev}) return rev @need_repo def cleanup(self): self.client.cleanup() @need_repo def update(self): self.cleanup() self._send_callback(self.callback_on_action_notify,_('Updating repository %s') % self._remote_path) rev = 0 try: rev, eventList = self.client.update(force=True) self._parse_event_list(eventList) except svnclient.ClientError, e: #if self._checkerror(e, 155000): # relocate logger.debug("Must relocate") self._switch_url() rev, eventList = self.client.update(force=True) self._parse_event_list(eventList) self._notify({'action': self.update_completed, 'revision': rev}) return rev @need_repo def revert(self, path=None): try: if path is not None: self.client.revert([path]) else: filelist = self.client.status() if filelist is not None: files = [] for event in filelist: files.append(event['path']) self.client.revert(files) except Exception, e: logger.error("Revert %s failed: %s" % (self.location, str(e))) pass def _checkin(self, msg): filelist = self.client.status() files = [] if filelist is not None: for event in filelist: if event['action'] == svnclient.notify_action.noadded: files.append(event['path']) if len(files) > 0: self.client.add(files) rev = self.client.commit(message=encode_text(msg)) return rev def submit(self, auth, files, msg): if auth: self.set_login(auth) logger.debug("Perform submit %s (%s) [%s]" % (self.location, files, msg)) self._send_callback(self.callback_on_action_notify,_('Checking in')) rev = 0 try: rev = self._checkin(msg) except svnclient.ClientError, e: logger.debug(str(e)) logger.warn(str(e)) self.cleanup() try: rev = self._checkin(msg) except: raise except: raise self._notify({'action': self.update_completed, 'revision': rev}) return rev
gpl-3.0
-4,671,312,419,987,366,000
36.116592
115
0.567718
false
4.027737
false
false
false
UAVCAN/gui_tool
uavcan_gui_tool/widgets/subscriber.py
1
11412
# # Copyright (C) 2016 UAVCAN Development Team <uavcan.org> # # This software is distributed under the terms of the MIT License. # # Author: Pavel Kirienko <pavel.kirienko@zubax.com> # import time import uavcan import logging import queue from PyQt5.QtWidgets import QWidget, QDialog, QPlainTextEdit, QSpinBox, QHBoxLayout, QVBoxLayout, QComboBox, \ QCompleter, QLabel from PyQt5.QtCore import Qt, QTimer from . import CommitableComboBoxWithHistory, make_icon_button, get_monospace_font, show_error, FilterBar logger = logging.getLogger(__name__) class QuantityDisplay(QWidget): def __init__(self, parent, quantity_name, units_of_measurement): super(QuantityDisplay, self).__init__(parent) self._label = QLabel('?', self) layout = QHBoxLayout(self) layout.addStretch(1) layout.addWidget(QLabel(quantity_name, self)) layout.addWidget(self._label) layout.addWidget(QLabel(units_of_measurement, self)) layout.addStretch(1) layout.setContentsMargins(0, 0, 0, 0) self.setLayout(layout) def set(self, value): self._label.setText(str(value)) class RateEstimator: def __init__(self, update_interval=0.5, averaging_period=4): self._update_interval = update_interval self._estimate_lifetime = update_interval * averaging_period self._averaging_period = averaging_period self._hist = [] self._checkpoint_ts = 0 self._events_since_checkpoint = 0 self._estimate_expires_at = time.monotonic() def register_event(self, timestamp): self._events_since_checkpoint += 1 dt = timestamp - self._checkpoint_ts if dt >= self._update_interval: # Resetting the stat if expired mono_ts = time.monotonic() expired = mono_ts > self._estimate_expires_at self._estimate_expires_at = mono_ts + self._estimate_lifetime if expired: self._hist = [] elif len(self._hist) >= self._averaging_period: self._hist.pop() # Updating the history self._hist.insert(0, self._events_since_checkpoint / dt) self._checkpoint_ts = timestamp self._events_since_checkpoint = 0 def get_rate_with_timestamp(self): if time.monotonic() <= self._estimate_expires_at: return (sum(self._hist) / len(self._hist)), self._checkpoint_ts class SubscriberWindow(QDialog): WINDOW_NAME_PREFIX = 'Subscriber' def __init__(self, parent, node, active_data_type_detector): super(SubscriberWindow, self).__init__(parent) self.setWindowTitle(self.WINDOW_NAME_PREFIX) self.setAttribute(Qt.WA_DeleteOnClose) # This is required to stop background timers! self._node = node self._active_data_type_detector = active_data_type_detector self._active_data_type_detector.message_types_updated.connect(self._update_data_type_list) self._message_queue = queue.Queue() self._subscriber_handle = None self._update_timer = QTimer(self) self._update_timer.setSingleShot(False) self._update_timer.timeout.connect(self._do_redraw) self._update_timer.start(100) self._log_viewer = QPlainTextEdit(self) self._log_viewer.setReadOnly(True) self._log_viewer.setLineWrapMode(QPlainTextEdit.NoWrap) self._log_viewer.setFont(get_monospace_font()) self._log_viewer.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn) try: self._log_viewer.setPlaceholderText('Received messages will be printed here in YAML format') except AttributeError: # Old PyQt pass self._num_rows_spinbox = QSpinBox(self) self._num_rows_spinbox.setToolTip('Number of rows to display; large number will impair performance') self._num_rows_spinbox.valueChanged.connect( lambda: self._log_viewer.setMaximumBlockCount(self._num_rows_spinbox.value())) self._num_rows_spinbox.setMinimum(1) self._num_rows_spinbox.setMaximum(1000000) self._num_rows_spinbox.setValue(100) self._num_errors = 0 self._num_messages_total = 0 self._num_messages_past_filter = 0 self._msgs_per_sec_estimator = RateEstimator() self._num_messages_total_label = QuantityDisplay(self, 'Total', 'msgs') self._num_messages_past_filter_label = QuantityDisplay(self, 'Accepted', 'msgs') self._msgs_per_sec_label = QuantityDisplay(self, 'Accepting', 'msg/sec') self._type_selector = CommitableComboBoxWithHistory(self) self._type_selector.setToolTip('Name of the message type to subscribe to') self._type_selector.setInsertPolicy(QComboBox.NoInsert) completer = QCompleter(self._type_selector) completer.setCaseSensitivity(Qt.CaseSensitive) completer.setModel(self._type_selector.model()) self._type_selector.setCompleter(completer) self._type_selector.on_commit = self._do_start self._type_selector.setFont(get_monospace_font()) self._type_selector.setSizeAdjustPolicy(QComboBox.AdjustToContents) self._type_selector.setFocus(Qt.OtherFocusReason) self._active_filter = None self._filter_bar = FilterBar(self) self._filter_bar.on_filter = self._install_filter self._start_stop_button = make_icon_button('video-camera', 'Begin subscription', self, checkable=True, on_clicked=self._toggle_start_stop) self._pause_button = make_icon_button('pause', 'Pause updates, non-displayed messages will be queued in memory', self, checkable=True) self._clear_button = make_icon_button('trash-o', 'Clear output and reset stat counters', self, on_clicked=self._do_clear) self._show_all_message_types = make_icon_button('puzzle-piece', 'Show all known message types, not only those that are ' 'currently being exchanged over the bus', self, checkable=True, on_clicked=self._update_data_type_list) layout = QVBoxLayout(self) controls_layout = QHBoxLayout(self) controls_layout.addWidget(self._start_stop_button) controls_layout.addWidget(self._pause_button) controls_layout.addWidget(self._clear_button) controls_layout.addWidget(self._filter_bar.add_filter_button) controls_layout.addWidget(self._show_all_message_types) controls_layout.addWidget(self._type_selector, 1) controls_layout.addWidget(self._num_rows_spinbox) layout.addLayout(controls_layout) layout.addWidget(self._filter_bar) layout.addWidget(self._log_viewer, 1) stats_layout = QHBoxLayout(self) stats_layout.addWidget(self._num_messages_total_label) stats_layout.addWidget(self._num_messages_past_filter_label) stats_layout.addWidget(self._msgs_per_sec_label) layout.addLayout(stats_layout) self.setLayout(layout) # Initial updates self._update_data_type_list() def _install_filter(self, f): self._active_filter = f def _apply_filter(self, yaml_message): """This function will throw if the filter expression is malformed!""" if self._active_filter is None: return True return self._active_filter.match(yaml_message) def _on_message(self, e): # Global statistics self._num_messages_total += 1 # Rendering and filtering try: text = uavcan.to_yaml(e) if not self._apply_filter(text): return except Exception as ex: self._num_errors += 1 text = '!!! [%d] MESSAGE PROCESSING FAILED: %s' % (self._num_errors, ex) else: self._num_messages_past_filter += 1 self._msgs_per_sec_estimator.register_event(e.transfer.ts_monotonic) # Sending the text for later rendering try: self._message_queue.put_nowait(text) except queue.Full: pass def _toggle_start_stop(self): try: if self._subscriber_handle is None: self._do_start() else: self._do_stop() finally: self._start_stop_button.setChecked(self._subscriber_handle is not None) def _do_stop(self): if self._subscriber_handle is not None: self._subscriber_handle.remove() self._subscriber_handle = None self._pause_button.setChecked(False) self.setWindowTitle(self.WINDOW_NAME_PREFIX) def _do_start(self): self._do_stop() self._do_clear() try: selected_type = self._type_selector.currentText().strip() if not selected_type: return data_type = uavcan.TYPENAMES[selected_type] except Exception as ex: show_error('Subscription error', 'Could not load requested data type', ex, self) return try: self._subscriber_handle = self._node.add_handler(data_type, self._on_message) except Exception as ex: show_error('Subscription error', 'Could not create requested subscription', ex, self) return self.setWindowTitle('%s [%s]' % (self.WINDOW_NAME_PREFIX, selected_type)) self._start_stop_button.setChecked(True) def _do_redraw(self): self._num_messages_total_label.set(self._num_messages_total) self._num_messages_past_filter_label.set(self._num_messages_past_filter) estimated_rate = self._msgs_per_sec_estimator.get_rate_with_timestamp() self._msgs_per_sec_label.set('N/A' if estimated_rate is None else ('%.0f' % estimated_rate[0])) if self._pause_button.isChecked(): return self._log_viewer.setUpdatesEnabled(False) while True: try: text = self._message_queue.get_nowait() except queue.Empty: break else: self._log_viewer.appendPlainText(text + '\n') self._log_viewer.setUpdatesEnabled(True) def _update_data_type_list(self): logger.info('Updating data type list') if self._show_all_message_types.isChecked(): items = self._active_data_type_detector.get_names_of_all_message_types_with_data_type_id() else: items = self._active_data_type_detector.get_names_of_active_messages() self._type_selector.clear() self._type_selector.addItems(items) def _do_clear(self): self._num_messages_total = 0 self._num_messages_past_filter = 0 self._do_redraw() self._log_viewer.clear() def closeEvent(self, qcloseevent): try: self._subscriber_handle.close() except Exception: pass super(SubscriberWindow, self).closeEvent(qcloseevent) @staticmethod def spawn(parent, node, active_data_type_detector): SubscriberWindow(parent, node, active_data_type_detector).show()
mit
6,253,379,612,114,448,000
38.082192
120
0.61926
false
3.961125
false
false
false
Kefkius/electrum-frc
gui/qt/main_window.py
1
114345
#!/usr/bin/env python # # Electrum - lightweight Bitcoin client # Copyright (C) 2012 thomasv@gitorious # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys, time, datetime, re, threading from electrum_frc.i18n import _, set_language from electrum_frc.util import print_error, print_msg import os.path, json, ast, traceback import webbrowser import shutil import StringIO import PyQt4 from PyQt4.QtGui import * from PyQt4.QtCore import * import PyQt4.QtCore as QtCore from electrum_frc.bitcoin import MIN_RELAY_TX_FEE, is_valid from electrum_frc.plugins import run_hook import icons_rc from electrum_frc.util import format_satoshis, NotEnoughFunds from electrum_frc import Transaction from electrum_frc import mnemonic from electrum_frc import util, bitcoin, commands, Interface, Wallet from electrum_frc import SimpleConfig, Wallet, WalletStorage from electrum_frc import Imported_Wallet from amountedit import AmountEdit, BTCAmountEdit, MyLineEdit from network_dialog import NetworkDialog from qrcodewidget import QRCodeWidget, QRDialog from qrtextedit import ScanQRTextEdit, ShowQRTextEdit from decimal import Decimal import httplib import socket import webbrowser import csv from electrum_frc import ELECTRUM_VERSION import re from util import MyTreeWidget, HelpButton, EnterButton, line_dialog, text_dialog, ok_cancel_buttons, close_button, WaitingDialog from util import filename_field, ok_cancel_buttons2, address_field from util import MONOSPACE_FONT class StatusBarButton(QPushButton): def __init__(self, icon, tooltip, func): QPushButton.__init__(self, icon, '') self.setToolTip(tooltip) self.setFlat(True) self.setMaximumWidth(25) self.clicked.connect(func) self.func = func self.setIconSize(QSize(25,25)) def keyPressEvent(self, e): if e.key() == QtCore.Qt.Key_Return: apply(self.func,()) default_column_widths = { "history":[40,140,350,140], "contacts":[350,330], "receive": [370,200,130] } # status of payment requests PR_UNPAID = 0 PR_EXPIRED = 1 PR_SENT = 2 # sent but not propagated PR_PAID = 3 # send and propagated PR_ERROR = 4 # could not parse pr_icons = { PR_UNPAID:":icons/unpaid.png", PR_PAID:":icons/confirmed.png", PR_EXPIRED:":icons/expired.png" } pr_tooltips = { PR_UNPAID:_('Unpaid'), PR_PAID:_('Paid'), PR_EXPIRED:_('Expired') } class ElectrumWindow(QMainWindow): labelsChanged = pyqtSignal() def __init__(self, config, network, gui_object): QMainWindow.__init__(self) self.config = config self.network = network self.gui_object = gui_object self.tray = gui_object.tray self.go_lite = gui_object.go_lite self.lite = None self.create_status_bar() self.need_update = threading.Event() self.decimal_point = config.get('decimal_point', 8) self.num_zeros = int(config.get('num_zeros',0)) self.invoices = {} self.completions = QStringListModel() self.tabs = tabs = QTabWidget(self) self.column_widths = self.config.get("column_widths_2", default_column_widths ) tabs.addTab(self.create_history_tab(), _('History') ) tabs.addTab(self.create_send_tab(), _('Send') ) tabs.addTab(self.create_receive_tab(), _('Receive') ) tabs.addTab(self.create_addresses_tab(), _('Addresses') ) tabs.addTab(self.create_contacts_tab(), _('Contacts') ) tabs.addTab(self.create_invoices_tab(), _('Invoices') ) tabs.addTab(self.create_console_tab(), _('Console') ) tabs.setMinimumSize(600, 400) tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding) self.setCentralWidget(tabs) try: self.setGeometry(*self.config.get("winpos-qt")) except: self.setGeometry(100, 100, 840, 400) if self.config.get("is_maximized"): self.showMaximized() self.setWindowIcon(QIcon(":icons/electrum.png")) self.init_menubar() QShortcut(QKeySequence("Ctrl+W"), self, self.close) QShortcut(QKeySequence("Ctrl+Q"), self, self.close) QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet) QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() - 1 )%tabs.count() )) QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() + 1 )%tabs.count() )) for i in range(tabs.count()): QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: tabs.setCurrentIndex(i)) self.connect(self, QtCore.SIGNAL('update_status'), self.update_status) self.connect(self, QtCore.SIGNAL('banner_signal'), lambda: self.console.showMessage(self.network.banner) ) self.connect(self, QtCore.SIGNAL('transaction_signal'), lambda: self.notify_transactions() ) self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok) self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error) self.labelsChanged.connect(self.update_tabs) self.history_list.setFocus(True) # network callbacks if self.network: self.network.register_callback('updated', lambda: self.need_update.set()) self.network.register_callback('banner', lambda: self.emit(QtCore.SIGNAL('banner_signal'))) self.network.register_callback('status', lambda: self.emit(QtCore.SIGNAL('update_status'))) self.network.register_callback('new_transaction', lambda: self.emit(QtCore.SIGNAL('transaction_signal'))) self.network.register_callback('stop', self.close) # set initial message self.console.showMessage(self.network.banner) self.wallet = None self.payment_request = None self.qr_window = None self.not_enough_funds = False self.pluginsdialog = None def update_account_selector(self): # account selector accounts = self.wallet.get_account_names() self.account_selector.clear() if len(accounts) > 1: self.account_selector.addItems([_("All accounts")] + accounts.values()) self.account_selector.setCurrentIndex(0) self.account_selector.show() else: self.account_selector.hide() def close_wallet(self): self.wallet.stop_threads() self.hide() run_hook('close_wallet') def load_wallet(self, wallet): import electrum_frc self.wallet = wallet self.update_wallet_format() # address used to create a dummy transaction and estimate transaction fee a = self.wallet.addresses(False) self.dummy_address = a[0] if a else None self.invoices = self.wallet.storage.get('invoices', {}) self.accounts_expanded = self.wallet.storage.get('accounts_expanded',{}) self.current_account = self.wallet.storage.get("current_account", None) title = 'Electrum-FRC ' + self.wallet.electrum_version + ' - ' + os.path.basename(self.wallet.storage.path) if self.wallet.is_watching_only(): title += ' [%s]' % (_('watching only')) self.setWindowTitle( title ) self.update_history_tab() self.show() self.update_wallet() # Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized self.notify_transactions() self.update_account_selector() # update menus self.new_account_menu.setEnabled(self.wallet.can_create_accounts()) self.private_keys_menu.setEnabled(not self.wallet.is_watching_only()) self.password_menu.setEnabled(self.wallet.can_change_password()) self.seed_menu.setEnabled(self.wallet.has_seed()) self.mpk_menu.setEnabled(self.wallet.is_deterministic()) self.import_menu.setEnabled(self.wallet.can_import()) self.export_menu.setEnabled(self.wallet.can_export()) self.update_lock_icon() self.update_buttons_on_seed() self.update_console() self.clear_receive_tab() self.update_receive_tab() run_hook('load_wallet', wallet) def update_wallet_format(self): # convert old-format imported keys if self.wallet.imported_keys: password = self.password_dialog(_("Please enter your password in order to update imported keys")) try: self.wallet.convert_imported_keys(password) except: self.show_message("error") def open_wallet(self): wallet_folder = self.wallet.storage.path filename = unicode( QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder) ) if not filename: return try: storage = WalletStorage({'wallet_path': filename}) except Exception as e: self.show_message(str(e)) return if not storage.file_exists: self.show_message(_("File not found") + ' ' + filename) return # read wizard action try: wallet = Wallet(storage) except BaseException as e: QMessageBox.warning(None, _('Warning'), str(e), _('OK')) return action = wallet.get_action() # ask for confirmation if action is not None: if not self.question(_("This file contains an incompletely created wallet.\nDo you want to complete its creation now?")): return # close current wallet self.close_wallet() # run wizard if action is not None: import installwizard wizard = installwizard.InstallWizard(self.config, self.network, storage) try: wallet = wizard.run(action) except BaseException as e: traceback.print_exc(file=sys.stdout) QMessageBox.information(None, _('Error'), str(e), _('OK')) return if not wallet: return else: wallet.start_threads(self.network) # load new wallet in gui self.load_wallet(wallet) def backup_wallet(self): import shutil path = self.wallet.storage.path wallet_folder = os.path.dirname(path) filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) ) if not filename: return new_path = os.path.join(wallet_folder, filename) if new_path != path: try: shutil.copy2(path, new_path) QMessageBox.information(None,"Wallet backup created", _("A copy of your wallet file was created in")+" '%s'" % str(new_path)) except (IOError, os.error), reason: QMessageBox.critical(None,"Unable to create backup", _("Electrum-FRC was unable to copy your wallet file to the specified location.")+"\n" + str(reason)) def new_wallet(self): import installwizard wallet_folder = os.path.dirname(os.path.abspath(self.wallet.storage.path)) i = 1 while True: filename = "wallet_%d"%i if filename in os.listdir(wallet_folder): i += 1 else: break filename = line_dialog(self, _('New Wallet'), _('Enter file name') + ':', _('OK'), filename) if not filename: return full_path = os.path.join(wallet_folder, filename) storage = WalletStorage({'wallet_path': full_path}) if storage.file_exists: QMessageBox.critical(None, "Error", _("File exists")) return if self.wallet: self.close_wallet() wizard = installwizard.InstallWizard(self.config, self.network, storage) wallet = wizard.run('new') if wallet: self.load_wallet(wallet) def init_menubar(self): menubar = QMenuBar() file_menu = menubar.addMenu(_("&File")) file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open) file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New) file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs) file_menu.addAction(_("&Quit"), self.close) wallet_menu = menubar.addMenu(_("&Wallet")) wallet_menu.addAction(_("&New contact"), self.new_contact_dialog) self.new_account_menu = wallet_menu.addAction(_("&New account"), self.new_account_dialog) wallet_menu.addSeparator() self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog) self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog) self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys) wallet_menu.addSeparator() labels_menu = wallet_menu.addMenu(_("&Labels")) labels_menu.addAction(_("&Import"), self.do_import_labels) labels_menu.addAction(_("&Export"), self.do_export_labels) self.private_keys_menu = wallet_menu.addMenu(_("&Private keys")) self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog) self.import_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey) self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog) wallet_menu.addAction(_("&Export History"), self.export_history_dialog) tools_menu = menubar.addMenu(_("&Tools")) # Settings / Preferences are all reserved keywords in OSX using this as work around tools_menu.addAction(_("Electrum-FRC preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog) tools_menu.addAction(_("&Network"), self.run_network_dialog) tools_menu.addAction(_("&Plugins"), self.plugins_dialog) tools_menu.addSeparator() tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message) tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message) tools_menu.addSeparator() csv_transaction_menu = tools_menu.addMenu(_("&Create transaction")) csv_transaction_menu.addAction(_("&From CSV file"), self.do_process_from_csv_file) csv_transaction_menu.addAction(_("&From CSV text"), self.do_process_from_csv_text) raw_transaction_menu = tools_menu.addMenu(_("&Load transaction")) raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file) raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text) raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid) raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode) self.raw_transaction_menu = raw_transaction_menu help_menu = menubar.addMenu(_("&Help")) help_menu.addAction(_("&About"), self.show_about) help_menu.addAction(_("&Official website"), lambda: webbrowser.open("http://electrum.org")) help_menu.addSeparator() help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://electrum.orain.org/")).setShortcut(QKeySequence.HelpContents) help_menu.addAction(_("&Report Bug"), self.show_report_bug) self.setMenuBar(menubar) def show_about(self): QMessageBox.about(self, "Electrum-FRC", _("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("Electrum-FRC's focus is speed, with low resource usage and simplifying Freicoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the Freicoin system.")) def show_report_bug(self): QMessageBox.information(self, "Electrum-FRC - " + _("Reporting Bugs"), _("Please report any bugs as issues on github:")+" <a href=\"https://github.com/spesmilo/electrum/issues\">https://github.com/spesmilo/electrum/issues</a>") def notify_transactions(self): if not self.network or not self.network.is_connected(): return print_error("Notifying GUI") if len(self.network.pending_transactions_for_notifications) > 0: # Combine the transactions if there are more then three tx_amount = len(self.network.pending_transactions_for_notifications) if(tx_amount >= 3): total_amount = 0 for tx in self.network.pending_transactions_for_notifications: is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx) if(v > 0): total_amount += v self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s %(unit)s") \ % { 'txs' : tx_amount, 'amount' : self.format_amount(total_amount), 'unit' : self.base_unit()}) self.network.pending_transactions_for_notifications = [] else: for tx in self.network.pending_transactions_for_notifications: if tx: self.network.pending_transactions_for_notifications.remove(tx) is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx) if(v > 0): self.notify(_("New transaction received. %(amount)s %(unit)s") % { 'amount' : self.format_amount(v), 'unit' : self.base_unit()}) def notify(self, message): if self.tray: self.tray.showMessage("Electrum-FRC", message, QSystemTrayIcon.Information, 20000) # custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user def getOpenFileName(self, title, filter = ""): directory = self.config.get('io_dir', unicode(os.path.expanduser('~'))) fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) ) if fileName and directory != os.path.dirname(fileName): self.config.set_key('io_dir', os.path.dirname(fileName), True) return fileName def getSaveFileName(self, title, filename, filter = ""): directory = self.config.get('io_dir', unicode(os.path.expanduser('~'))) path = os.path.join( directory, filename ) fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) ) if fileName and directory != os.path.dirname(fileName): self.config.set_key('io_dir', os.path.dirname(fileName), True) return fileName def close(self): if self.qr_window: self.qr_window.close() QMainWindow.close(self) run_hook('close_main_window') def connect_slots(self, sender): self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions) self.previous_payto_e='' def timer_actions(self): if self.need_update.is_set(): self.update_wallet() self.need_update.clear() run_hook('timer_actions') def format_amount(self, x, is_diff=False, whitespaces=False): return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces) def get_decimal_point(self): return self.decimal_point def base_unit(self): assert self.decimal_point in [2, 5, 8] if self.decimal_point == 2: return 'uFRC' if self.decimal_point == 5: return 'mFRC' if self.decimal_point == 8: return 'FRC' raise Exception('Unknown base unit') def update_status(self): if not self.wallet: return if self.network is None or not self.network.is_running(): text = _("Offline") icon = QIcon(":icons/status_disconnected.png") elif self.network.is_connected(): server_lag = self.network.get_local_height() - self.network.get_server_height() if not self.wallet.up_to_date: text = _("Synchronizing...") icon = QIcon(":icons/status_waiting.png") elif server_lag > 1: text = _("Server is lagging (%d blocks)"%server_lag) icon = QIcon(":icons/status_lagging.png") else: use_height = self.network.get_local_height() c, u = self.wallet.get_account_balance(self.current_account, use_height) text = _( "Balance" ) + ": %s "%( self.format_amount(c) ) + self.base_unit() if u: text += " [%s unconfirmed]"%( self.format_amount(u,True).strip() ) # append fiat balance and price from exchange rate plugin r = {} run_hook('get_fiat_status_text', c+u, r) quote = r.get(0) if quote: text += "%s"%quote if self.tray: self.tray.setToolTip(text) icon = QIcon(":icons/status_connected.png") else: text = _("Not connected") icon = QIcon(":icons/status_disconnected.png") self.balance_label.setText(text) self.status_button.setIcon( icon ) def update_wallet(self): self.update_status() if self.wallet.up_to_date or not self.network or not self.network.is_connected(): self.update_tabs() def update_tabs(self): self.update_history_tab() self.update_receive_tab() self.update_address_tab() self.update_contacts_tab() self.update_completions() self.update_invoices_tab() def create_history_tab(self): self.history_list = l = MyTreeWidget(self) l.setColumnCount(5) for i,width in enumerate(self.column_widths['history']): l.setColumnWidth(i, width) l.setHeaderLabels( [ '', _('Date'), _('Description') , _('Amount'), _('Balance')] ) l.itemDoubleClicked.connect(self.tx_label_clicked) l.itemChanged.connect(self.tx_label_changed) l.customContextMenuRequested.connect(self.create_history_menu) return l def create_history_menu(self, position): self.history_list.selectedIndexes() item = self.history_list.currentItem() be = self.config.get('block_explorer', 'Coinplorer') if be == 'Coinplorer': block_explorer = 'https://coinplorer.com/FRC/Transactions/' #elif be == 'Blockr.io': # block_explorer = 'https://blockr.io/tx/info/' #elif be == 'Insight.is': # block_explorer = 'http://live.insight.is/tx/' #elif be == "Blocktrail.com": # block_explorer = 'https://www.blocktrail.com/BTC/tx/' if not item: return tx_hash = str(item.data(0, Qt.UserRole).toString()) if not tx_hash: return menu = QMenu() menu.addAction(_("Copy ID to Clipboard"), lambda: self.app.clipboard().setText(tx_hash)) menu.addAction(_("Details"), lambda: self.show_transaction(self.wallet.transactions.get(tx_hash))) menu.addAction(_("Edit description"), lambda: self.tx_label_clicked(item,2)) menu.addAction(_("View on block explorer"), lambda: webbrowser.open(block_explorer + tx_hash)) menu.exec_(self.contacts_list.viewport().mapToGlobal(position)) def show_transaction(self, tx): import transaction_dialog d = transaction_dialog.TxDialog(tx, self) d.exec_() def tx_label_clicked(self, item, column): if column==2 and item.isSelected(): self.is_edit=True item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) self.history_list.editItem( item, column ) item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) self.is_edit=False def tx_label_changed(self, item, column): if self.is_edit: return self.is_edit=True tx_hash = str(item.data(0, Qt.UserRole).toString()) tx = self.wallet.transactions.get(tx_hash) text = unicode( item.text(2) ) self.wallet.set_label(tx_hash, text) if text: item.setForeground(2, QBrush(QColor('black'))) else: text = self.wallet.get_default_label(tx_hash) item.setText(2, text) item.setForeground(2, QBrush(QColor('gray'))) self.is_edit=False def edit_label(self, is_recv): l = self.address_list if is_recv else self.contacts_list item = l.currentItem() item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) l.editItem( item, 1 ) item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) def address_label_clicked(self, item, column, l, column_addr, column_label): if column == column_label and item.isSelected(): is_editable = item.data(0, 32).toBool() if not is_editable: return addr = unicode( item.text(column_addr) ) label = unicode( item.text(column_label) ) item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) l.editItem( item, column ) item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled) def address_label_changed(self, item, column, l, column_addr, column_label): if column == column_label: addr = unicode( item.text(column_addr) ) text = unicode( item.text(column_label) ) is_editable = item.data(0, 32).toBool() if not is_editable: return changed = self.wallet.set_label(addr, text) if changed: self.update_history_tab() self.update_completions() self.current_item_changed(item) run_hook('item_changed', item, column) def current_item_changed(self, a): run_hook('current_item_changed', a) def format_time(self, timestamp): try: time_str = datetime.datetime.fromtimestamp( timestamp).isoformat(' ')[:-3] except Exception: time_str = _("error") return time_str def update_history_tab(self): self.history_list.clear() for item in self.wallet.get_tx_history(self.current_account): tx_hash, conf, is_mine, value, fee, balance, timestamp = item time_str = _("unknown") if conf > 0: time_str = self.format_time(timestamp) if conf == -1: time_str = 'unverified' icon = QIcon(":icons/unconfirmed.png") elif conf == 0: time_str = 'pending' icon = QIcon(":icons/unconfirmed.png") elif conf < 6: icon = QIcon(":icons/clock%d.png"%conf) else: icon = QIcon(":icons/confirmed.png") if value is not None: v_str = self.format_amount(value, True, whitespaces=True) else: v_str = '--' balance_str = self.format_amount(balance, whitespaces=True) if tx_hash: label, is_default_label = self.wallet.get_label(tx_hash) else: label = _('Pruned transaction outputs') is_default_label = False item = QTreeWidgetItem( [ '', time_str, label, v_str, balance_str] ) item.setFont(2, QFont(MONOSPACE_FONT)) item.setFont(3, QFont(MONOSPACE_FONT)) item.setFont(4, QFont(MONOSPACE_FONT)) if value < 0: item.setForeground(3, QBrush(QColor("#BC1E1E"))) if tx_hash: item.setData(0, Qt.UserRole, tx_hash) item.setToolTip(0, "%d %s\nTxId:%s" % (conf, _('Confirmations'), tx_hash) ) if is_default_label: item.setForeground(2, QBrush(QColor('grey'))) item.setIcon(0, icon) self.history_list.insertTopLevelItem(0,item) self.history_list.setCurrentItem(self.history_list.topLevelItem(0)) run_hook('history_tab_update') def create_receive_tab(self): w = QWidget() grid = QGridLayout(w) grid.setColumnMinimumWidth(3, 300) grid.setColumnStretch(5, 1) self.receive_address_e = QLineEdit() self.receive_address_e.setReadOnly(True) grid.addWidget(QLabel(_('Receiving address')), 0, 0) grid.addWidget(self.receive_address_e, 0, 1, 1, 3) self.receive_address_e.textChanged.connect(self.update_receive_qr) self.copy_button = QPushButton() self.copy_button.setIcon(QIcon(":icons/copy.png")) self.copy_button.clicked.connect(lambda: self.app.clipboard().setText(self.receive_address_e.text())) grid.addWidget(self.copy_button, 0, 4) self.receive_message_e = QLineEdit() grid.addWidget(QLabel(_('Message')), 1, 0) grid.addWidget(self.receive_message_e, 1, 1, 1, 3) self.receive_message_e.textChanged.connect(self.update_receive_qr) self.receive_amount_e = BTCAmountEdit(self.get_decimal_point) grid.addWidget(QLabel(_('Requested amount')), 2, 0) grid.addWidget(self.receive_amount_e, 2, 1, 1, 2) self.receive_amount_e.textChanged.connect(self.update_receive_qr) self.save_request_button = QPushButton(_('Save')) self.save_request_button.clicked.connect(self.save_payment_request) grid.addWidget(self.save_request_button, 3, 1) clear_button = QPushButton(_('New')) clear_button.clicked.connect(self.new_receive_address) grid.addWidget(clear_button, 3, 2) grid.setRowStretch(4, 1) self.receive_qr = QRCodeWidget(fixedSize=200) grid.addWidget(self.receive_qr, 0, 5, 5, 2) self.receive_qr.mousePressEvent = lambda x: self.toggle_qr_window() grid.setRowStretch(5, 1) self.receive_requests_label = QLabel(_('Saved Requests')) self.receive_list = MyTreeWidget(self) self.receive_list.customContextMenuRequested.connect(self.receive_list_menu) self.receive_list.currentItemChanged.connect(self.receive_item_changed) self.receive_list.itemClicked.connect(self.receive_item_changed) self.receive_list.setHeaderLabels( [_('Date'), _('Account'), _('Address'), _('Message'), _('Amount')] ) self.receive_list.setSortingEnabled(True) self.receive_list.setColumnWidth(0, 180) self.receive_list.hideColumn(1) # the update will show it if necessary self.receive_list.setColumnWidth(2, 340) h = self.receive_list.header() h.setStretchLastSection(False) h.setResizeMode(3, QHeaderView.Stretch) grid.addWidget(self.receive_requests_label, 6, 0) grid.addWidget(self.receive_list, 7, 0, 1, 6) return w def receive_item_changed(self, item): if item is None: return addr = str(item.text(2)) req = self.receive_requests[addr] time, amount, message = req['time'], req['amount'], req['msg'] self.receive_address_e.setText(addr) self.receive_message_e.setText(message) self.receive_amount_e.setAmount(amount) def receive_list_delete(self, item): addr = str(item.text(2)) self.receive_requests.pop(addr) self.wallet.storage.put('receive_requests2', self.receive_requests) self.update_receive_tab() self.clear_receive_tab() def receive_list_menu(self, position): item = self.receive_list.itemAt(position) menu = QMenu() menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(str(item.text(2)))) menu.addAction(_("Delete"), lambda: self.receive_list_delete(item)) menu.exec_(self.receive_list.viewport().mapToGlobal(position)) def save_payment_request(self): timestamp = int(time.time()) addr = str(self.receive_address_e.text()) amount = self.receive_amount_e.get_amount() message = unicode(self.receive_message_e.text()) if not message and not amount: QMessageBox.warning(self, _('Error'), _('No message or amount'), _('OK')) return self.receive_requests = self.wallet.storage.get('receive_requests2',{}) self.receive_requests[addr] = {'time':timestamp, 'amount':amount, 'msg':message} self.wallet.storage.put('receive_requests2', self.receive_requests) self.update_receive_tab() def get_receive_address(self): domain = self.wallet.get_account_addresses(self.current_account, include_change=False) for addr in domain: if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys(): return addr def new_receive_address(self): addr = self.get_receive_address() if addr is None: if isinstance(self.wallet, Imported_Wallet): self.show_message(_('No more addresses in your wallet.')) return if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")): return addr = self.wallet.create_new_address(self.current_account, False) self.set_receive_address(addr) def set_receive_address(self, addr): self.receive_address_e.setText(addr) self.receive_message_e.setText('') self.receive_amount_e.setAmount(None) def clear_receive_tab(self): self.receive_requests = self.wallet.storage.get('receive_requests2',{}) domain = self.wallet.get_account_addresses(self.current_account, include_change=False) for addr in domain: if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys(): break else: addr = '' self.receive_address_e.setText(addr) self.receive_message_e.setText('') self.receive_amount_e.setAmount(None) def toggle_qr_window(self): import qrwindow if not self.qr_window: self.qr_window = qrwindow.QR_Window(self) self.qr_window.setVisible(True) self.qr_window_geometry = self.qr_window.geometry() else: if not self.qr_window.isVisible(): self.qr_window.setVisible(True) self.qr_window.setGeometry(self.qr_window_geometry) else: self.qr_window_geometry = self.qr_window.geometry() self.qr_window.setVisible(False) self.update_receive_qr() def receive_at(self, addr): if not bitcoin.is_address(addr): return self.tabs.setCurrentIndex(2) self.receive_address_e.setText(addr) def update_receive_tab(self): self.receive_requests = self.wallet.storage.get('receive_requests2',{}) # hide receive tab if no receive requests available b = len(self.receive_requests) > 0 self.receive_list.setVisible(b) self.receive_requests_label.setVisible(b) # check if it is necessary to show the account self.receive_list.setColumnHidden(1, len(self.wallet.get_accounts()) == 1) # update the receive address if necessary current_address = self.receive_address_e.text() domain = self.wallet.get_account_addresses(self.current_account, include_change=False) if not current_address in domain: addr = self.get_receive_address() if addr: self.set_receive_address(addr) # clear the list and fill it again self.receive_list.clear() for address, req in self.receive_requests.viewitems(): timestamp, amount, message = req['time'], req['amount'], req['msg'] # only show requests for the current account if address not in domain: continue date = self.format_time(timestamp) account = self.wallet.get_account_name(self.wallet.get_account_from_address(address)) item = QTreeWidgetItem( [ date, account, address, message, self.format_amount(amount) if amount else ""]) item.setFont(2, QFont(MONOSPACE_FONT)) self.receive_list.addTopLevelItem(item) def update_receive_qr(self): import urlparse, urllib addr = str(self.receive_address_e.text()) amount = self.receive_amount_e.get_amount() message = unicode(self.receive_message_e.text()).encode('utf8') self.save_request_button.setEnabled((amount is not None) or (message != "")) if addr: query = [] if amount: query.append('amount=%s'%format_satoshis(amount)) if message: query.append('message=%s'%urllib.quote(message)) p = urlparse.ParseResult(scheme='freicoin', netloc='', path=addr, params='', query='&'.join(query), fragment='') url = urlparse.urlunparse(p) else: url = "" self.receive_qr.setData(url) if self.qr_window: self.qr_window.set_content(addr, amount, message, url) def create_send_tab(self): w = QWidget() self.send_grid = grid = QGridLayout(w) grid.setSpacing(8) grid.setColumnMinimumWidth(3,300) grid.setColumnStretch(5,1) grid.setRowStretch(8, 1) from paytoedit import PayToEdit self.amount_e = BTCAmountEdit(self.get_decimal_point) self.payto_e = PayToEdit(self) self.payto_help = HelpButton(_('Recipient of the funds.') + '\n\n' + _('You may enter a Freicoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a Freicoin address)')) grid.addWidget(QLabel(_('Pay to')), 1, 0) grid.addWidget(self.payto_e, 1, 1, 1, 3) grid.addWidget(self.payto_help, 1, 4) completer = QCompleter() completer.setCaseSensitivity(False) self.payto_e.setCompleter(completer) completer.setModel(self.completions) self.message_e = MyLineEdit() self.message_help = HelpButton(_('Description of the transaction (not mandatory).') + '\n\n' + _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')) grid.addWidget(QLabel(_('Description')), 2, 0) grid.addWidget(self.message_e, 2, 1, 1, 3) grid.addWidget(self.message_help, 2, 4) self.from_label = QLabel(_('From')) grid.addWidget(self.from_label, 3, 0) self.from_list = MyTreeWidget(self) self.from_list.setColumnCount(2) self.from_list.setColumnWidth(0, 350) self.from_list.setColumnWidth(1, 50) self.from_list.setHeaderHidden(True) self.from_list.setMaximumHeight(80) self.from_list.setContextMenuPolicy(Qt.CustomContextMenu) self.from_list.customContextMenuRequested.connect(self.from_list_menu) grid.addWidget(self.from_list, 3, 1, 1, 3) self.set_pay_from([]) self.amount_help = HelpButton(_('Amount to be sent.') + '\n\n' \ + _('The amount will be displayed in red if you do not have enough funds in your wallet. Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') \ + '\n\n' + _('Keyboard shortcut: type "!" to send all your coins.')) grid.addWidget(QLabel(_('Amount')), 4, 0) grid.addWidget(self.amount_e, 4, 1, 1, 2) grid.addWidget(self.amount_help, 4, 3) self.fee_e_label = QLabel(_('Fee')) self.fee_e = BTCAmountEdit(self.get_decimal_point) grid.addWidget(self.fee_e_label, 5, 0) grid.addWidget(self.fee_e, 5, 1, 1, 2) msg = _('Freicoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\ + _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\ + _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.') self.fee_e_help = HelpButton(msg) grid.addWidget(self.fee_e_help, 5, 3) self.update_fee_edit() self.send_button = EnterButton(_("Send"), self.do_send) grid.addWidget(self.send_button, 6, 1) b = EnterButton(_("Clear"), self.do_clear) grid.addWidget(b, 6, 2) self.payto_sig = QLabel('') grid.addWidget(self.payto_sig, 7, 0, 1, 4) w.setLayout(grid) def on_shortcut(): sendable = self.get_sendable_balance() inputs = self.get_coins() for i in inputs: self.wallet.add_input_info(i) addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address output = ('address', addr, sendable) dummy_tx = Transaction(inputs, [output]) fee = self.wallet.estimated_fee(dummy_tx) self.amount_e.setAmount(max(0,sendable-fee)) self.amount_e.textEdited.emit("") self.fee_e.setAmount(fee) self.amount_e.shortcut.connect(on_shortcut) def text_edited(is_fee): outputs = self.payto_e.get_outputs() amount = self.amount_e.get_amount() fee = self.fee_e.get_amount() if is_fee else None if amount is None: self.fee_e.setAmount(None) self.not_enough_funds = False else: if not outputs: addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address outputs = [('address', addr, amount)] try: tx = self.wallet.make_unsigned_transaction(outputs, fee, coins = self.get_coins()) self.not_enough_funds = False except NotEnoughFunds: self.not_enough_funds = True if not is_fee: fee = None if self.not_enough_funds else self.wallet.get_tx_fee(tx) self.fee_e.setAmount(fee) self.payto_e.textChanged.connect(lambda:text_edited(False)) self.amount_e.textEdited.connect(lambda:text_edited(False)) self.fee_e.textEdited.connect(lambda:text_edited(True)) def entry_changed(): if not self.not_enough_funds: palette = QPalette() palette.setColor(self.amount_e.foregroundRole(), QColor('black')) text = "" else: palette = QPalette() palette.setColor(self.amount_e.foregroundRole(), QColor('red')) text = _( "Not enough funds" ) c, u = self.wallet.get_frozen_balance() if c+u: text += ' (' + self.format_amount(c+u).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')' self.statusBar().showMessage(text) self.amount_e.setPalette(palette) self.fee_e.setPalette(palette) self.amount_e.textChanged.connect(entry_changed) self.fee_e.textChanged.connect(entry_changed) run_hook('create_send_tab', grid) return w def update_fee_edit(self): b = self.config.get('can_edit_fees', False) self.fee_e.setVisible(b) self.fee_e_label.setVisible(b) self.fee_e_help.setVisible(b) def from_list_delete(self, item): i = self.from_list.indexOfTopLevelItem(item) self.pay_from.pop(i) self.redraw_from_list() def from_list_menu(self, position): item = self.from_list.itemAt(position) menu = QMenu() menu.addAction(_("Remove"), lambda: self.from_list_delete(item)) menu.exec_(self.from_list.viewport().mapToGlobal(position)) def set_pay_from(self, domain = None): self.pay_from = [] if domain == [] else self.wallet.get_unspent_coins(domain, height=self.network.get_local_height()) self.redraw_from_list() def redraw_from_list(self): self.from_list.clear() self.from_label.setHidden(len(self.pay_from) == 0) self.from_list.setHidden(len(self.pay_from) == 0) def format(x): h = x.get('prevout_hash') return h[0:8] + '...' + h[-8:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address') for item in self.pay_from: self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ])) def update_completions(self): l = [] for addr,label in self.wallet.labels.items(): if addr in self.wallet.addressbook: l.append( label + ' <' + addr + '>') run_hook('update_completions', l) self.completions.setStringList(l) def protected(func): return lambda s, *args: s.do_protect(func, args) def read_send_tab(self): if self.payment_request and self.payment_request.has_expired(): QMessageBox.warning(self, _('Error'), _('Payment request has expired'), _('OK')) return label = unicode( self.message_e.text() ) if self.payment_request: outputs = self.payment_request.get_outputs() else: errors = self.payto_e.get_errors() if errors: self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors])) return outputs = self.payto_e.get_outputs() if not outputs: QMessageBox.warning(self, _('Error'), _('No outputs'), _('OK')) return for _type, addr, amount in outputs: if addr is None: QMessageBox.warning(self, _('Error'), _('Freicoin Address is None'), _('OK')) return if _type == 'address' and not bitcoin.is_address(addr): QMessageBox.warning(self, _('Error'), _('Invalid Freicoin Address'), _('OK')) return if amount is None: QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK')) return fee = self.fee_e.get_amount() if fee is None: QMessageBox.warning(self, _('Error'), _('Invalid Fee'), _('OK')) return amount = sum(map(lambda x:x[2], outputs)) confirm_amount = self.config.get('confirm_amount', 100000000) if amount >= confirm_amount: o = '\n'.join(map(lambda x:x[1], outputs)) if not self.question(_("send %(amount)s to %(address)s?")%{ 'amount' : self.format_amount(amount) + ' '+ self.base_unit(), 'address' : o}): return coins = self.get_coins() return outputs, fee, label, coins def do_send(self): if run_hook('before_send'): return r = self.read_send_tab() if not r: return outputs, fee, label, coins = r try: tx = self.wallet.make_unsigned_transaction(outputs, fee, None, coins = coins) if not tx: raise BaseException(_("Insufficient funds")) except Exception as e: traceback.print_exc(file=sys.stdout) self.show_message(str(e)) return if tx.get_fee() < MIN_RELAY_TX_FEE and tx.requires_fee(self.wallet.verifier): QMessageBox.warning(self, _('Error'), _("This transaction requires a higher fee, or it will not be propagated by the network."), _('OK')) return if not self.config.get('can_edit_fees', False): if not self.question(_("A fee of %(fee)s will be added to this transaction.\nProceed?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}): return else: confirm_fee = self.config.get('confirm_fee', 100000) if fee >= confirm_fee: if not self.question(_("The fee for this transaction seems unusually high.\nAre you really sure you want to pay %(fee)s in fees?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}): return self.send_tx(tx, label) @protected def send_tx(self, tx, label, password): self.send_button.setDisabled(True) # call hook to see if plugin needs gui interaction run_hook('send_tx', tx) # sign the tx def sign_thread(): if self.wallet.is_watching_only(): return tx self.wallet.sign_transaction(tx, password) return tx def sign_done(tx): if not tx.is_complete() or self.config.get('show_before_broadcast'): self.show_transaction(tx) self.do_clear() return if label: self.wallet.set_label(tx.hash(), label) self.broadcast_transaction(tx) # keep a reference to WaitingDialog or the gui might crash self.waiting_dialog = WaitingDialog(self, 'Signing..', sign_thread, sign_done, lambda: self.send_button.setDisabled(False)) self.waiting_dialog.start() def broadcast_transaction(self, tx): def broadcast_thread(): # non-GUI thread pr = self.payment_request if pr is None: return self.wallet.sendtx(tx) if pr.has_expired(): self.payment_request = None return False, _("Payment request has expired") status, msg = self.wallet.sendtx(tx) if not status: return False, msg self.invoices[pr.get_id()] = (pr.get_domain(), pr.get_memo(), pr.get_amount(), pr.get_expiration_date(), PR_PAID, tx.hash()) self.wallet.storage.put('invoices', self.invoices) self.payment_request = None refund_address = self.wallet.addresses()[0] ack_status, ack_msg = pr.send_ack(str(tx), refund_address) if ack_status: msg = ack_msg return status, msg def broadcast_done(status, msg): # GUI thread if status: QMessageBox.information(self, '', _('Payment sent.') + '\n' + msg, _('OK')) self.update_invoices_tab() self.do_clear() else: QMessageBox.warning(self, _('Error'), msg, _('OK')) self.send_button.setDisabled(False) self.waiting_dialog = WaitingDialog(self, 'Broadcasting..', broadcast_thread, broadcast_done) self.waiting_dialog.start() def prepare_for_payment_request(self): self.tabs.setCurrentIndex(1) self.payto_e.is_pr = True for e in [self.payto_e, self.amount_e, self.message_e]: e.setFrozen(True) for h in [self.payto_help, self.amount_help, self.message_help]: h.hide() self.payto_e.setText(_("please wait...")) return True def payment_request_ok(self): pr = self.payment_request pr_id = pr.get_id() if pr_id not in self.invoices: self.invoices[pr_id] = (pr.get_domain(), pr.get_memo(), pr.get_amount(), pr.get_expiration_date(), PR_UNPAID, None) self.wallet.storage.put('invoices', self.invoices) self.update_invoices_tab() else: print_error('invoice already in list') status = self.invoices[pr_id][4] if status == PR_PAID: self.do_clear() self.show_message("invoice already paid") self.payment_request = None return self.payto_help.show() self.payto_help.set_alt(lambda: self.show_pr_details(pr)) if not pr.has_expired(): self.payto_e.setGreen() else: self.payto_e.setExpired() self.payto_e.setText(pr.domain) self.amount_e.setText(self.format_amount(pr.get_amount())) self.message_e.setText(pr.get_memo()) # signal to set fee self.amount_e.textEdited.emit("") def payment_request_error(self): self.do_clear() self.show_message(self.payment_request.error) self.payment_request = None def pay_from_URI(self,URI): if not URI: return address, amount, label, message, request_url = util.parse_URI(URI) try: address, amount, label, message, request_url = util.parse_URI(URI) except Exception as e: QMessageBox.warning(self, _('Error'), _('Invalid freicoin URI:') + '\n' + str(e), _('OK')) return self.tabs.setCurrentIndex(1) if not request_url: if label: if self.wallet.labels.get(address) != label: if self.question(_('Save label "%(label)s" for address %(address)s ?'%{'label':label,'address':address})): if address not in self.wallet.addressbook and not self.wallet.is_mine(address): self.wallet.addressbook.append(address) self.wallet.set_label(address, label) else: label = self.wallet.labels.get(address) if address: self.payto_e.setText(label + ' <'+ address +'>' if label else address) if message: self.message_e.setText(message) if amount: self.amount_e.setAmount(amount) self.amount_e.textEdited.emit("") return from electrum_frc import paymentrequest def payment_request(): self.payment_request = paymentrequest.PaymentRequest(self.config) self.payment_request.read(request_url) if self.payment_request.verify(): self.emit(SIGNAL('payment_request_ok')) else: self.emit(SIGNAL('payment_request_error')) self.pr_thread = threading.Thread(target=payment_request).start() self.prepare_for_payment_request() def do_clear(self): self.not_enough_funds = False self.payto_e.is_pr = False self.payto_sig.setVisible(False) for e in [self.payto_e, self.message_e, self.amount_e, self.fee_e]: e.setText('') e.setFrozen(False) for h in [self.payto_help, self.amount_help, self.message_help]: h.show() self.payto_help.set_alt(None) self.set_pay_from([]) self.update_status() run_hook('do_clear') def set_addrs_frozen(self,addrs,freeze): for addr in addrs: if not addr: continue if addr in self.wallet.frozen_addresses and not freeze: self.wallet.unfreeze(addr) elif addr not in self.wallet.frozen_addresses and freeze: self.wallet.freeze(addr) self.update_address_tab() def create_list_tab(self, headers): "generic tab creation method" l = MyTreeWidget(self) l.setColumnCount( len(headers) ) l.setHeaderLabels( headers ) w = QWidget() vbox = QVBoxLayout() w.setLayout(vbox) vbox.setMargin(0) vbox.setSpacing(0) vbox.addWidget(l) buttons = QWidget() vbox.addWidget(buttons) return l, w def create_addresses_tab(self): l, w = self.create_list_tab([ _('Address'), _('Label'), _('Balance'), _('Tx')]) for i,width in enumerate(self.column_widths['receive']): l.setColumnWidth(i, width) l.setContextMenuPolicy(Qt.CustomContextMenu) l.customContextMenuRequested.connect(self.create_receive_menu) l.setSelectionMode(QAbstractItemView.ExtendedSelection) l.itemDoubleClicked.connect(lambda a, b: self.address_label_clicked(a,b,l,0,1)) l.itemChanged.connect(lambda a,b: self.address_label_changed(a,b,l,0,1)) l.currentItemChanged.connect(lambda a,b: self.current_item_changed(a)) self.address_list = l return w def save_column_widths(self): self.column_widths["receive"] = [] for i in range(self.address_list.columnCount() -1): self.column_widths["receive"].append(self.address_list.columnWidth(i)) self.column_widths["history"] = [] for i in range(self.history_list.columnCount() - 1): self.column_widths["history"].append(self.history_list.columnWidth(i)) self.column_widths["contacts"] = [] for i in range(self.contacts_list.columnCount() - 1): self.column_widths["contacts"].append(self.contacts_list.columnWidth(i)) self.config.set_key("column_widths_2", self.column_widths, True) def create_contacts_tab(self): l, w = self.create_list_tab([_('Address'), _('Label'), _('Tx')]) l.setContextMenuPolicy(Qt.CustomContextMenu) l.customContextMenuRequested.connect(self.create_contact_menu) for i,width in enumerate(self.column_widths['contacts']): l.setColumnWidth(i, width) l.itemDoubleClicked.connect(lambda a, b: self.address_label_clicked(a,b,l,0,1)) l.itemChanged.connect(lambda a,b: self.address_label_changed(a,b,l,0,1)) self.contacts_list = l return w def create_invoices_tab(self): l, w = self.create_list_tab([_('Date'), _('Requestor'), _('Memo'), _('Amount'), _('Status')]) l.setColumnWidth(0, 150) l.setColumnWidth(1, 150) l.setColumnWidth(3, 150) l.setColumnWidth(4, 40) h = l.header() h.setStretchLastSection(False) h.setResizeMode(2, QHeaderView.Stretch) l.setContextMenuPolicy(Qt.CustomContextMenu) l.customContextMenuRequested.connect(self.create_invoice_menu) self.invoices_list = l return w def update_invoices_tab(self): invoices = self.wallet.storage.get('invoices', {}) l = self.invoices_list l.clear() for key, value in sorted(invoices.items(), key=lambda x: -x[1][3]): domain, memo, amount, expiration_date, status, tx_hash = value if status == PR_UNPAID and expiration_date and expiration_date < time.time(): status = PR_EXPIRED date_str = datetime.datetime.fromtimestamp(expiration_date).isoformat(' ')[:-3] item = QTreeWidgetItem( [ date_str, domain, memo, self.format_amount(amount, whitespaces=True), ''] ) icon = QIcon(pr_icons.get(status)) item.setIcon(4, icon) item.setToolTip(4, pr_tooltips.get(status,'')) item.setData(0, 32, key) item.setFont(1, QFont(MONOSPACE_FONT)) item.setFont(3, QFont(MONOSPACE_FONT)) l.addTopLevelItem(item) l.setCurrentItem(l.topLevelItem(0)) def delete_imported_key(self, addr): if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")): self.wallet.delete_imported_key(addr) self.update_address_tab() self.update_history_tab() def edit_account_label(self, k): text, ok = QInputDialog.getText(self, _('Rename account'), _('Name') + ':', text = self.wallet.labels.get(k,'')) if ok: label = unicode(text) self.wallet.set_label(k,label) self.update_address_tab() def account_set_expanded(self, item, k, b): item.setExpanded(b) self.accounts_expanded[k] = b def create_account_menu(self, position, k, item): menu = QMenu() exp = item.isExpanded() menu.addAction(_("Minimize") if exp else _("Maximize"), lambda: self.account_set_expanded(item, k, not exp)) menu.addAction(_("Rename"), lambda: self.edit_account_label(k)) if self.wallet.seed_version > 4: menu.addAction(_("View details"), lambda: self.show_account_details(k)) if self.wallet.account_is_pending(k): menu.addAction(_("Delete"), lambda: self.delete_pending_account(k)) menu.exec_(self.address_list.viewport().mapToGlobal(position)) def delete_pending_account(self, k): self.wallet.delete_pending_account(k) self.update_address_tab() self.update_account_selector() def create_receive_menu(self, position): # fixme: this function apparently has a side effect. # if it is not called the menu pops up several times #self.address_list.selectedIndexes() selected = self.address_list.selectedItems() multi_select = len(selected) > 1 addrs = [unicode(item.text(0)) for item in selected] if not multi_select: item = self.address_list.itemAt(position) if not item: return addr = addrs[0] if not is_valid(addr): k = str(item.data(0,32).toString()) if k: self.create_account_menu(position, k, item) else: item.setExpanded(not item.isExpanded()) return menu = QMenu() if not multi_select: menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(addr)) menu.addAction(_("Request payment"), lambda: self.receive_at(addr)) menu.addAction(_("Edit label"), lambda: self.edit_label(True)) menu.addAction(_("Public keys"), lambda: self.show_public_keys(addr)) if self.wallet.can_export(): menu.addAction(_("Private key"), lambda: self.show_private_key(addr)) if not self.wallet.is_watching_only(): menu.addAction(_("Sign/verify message"), lambda: self.sign_verify_message(addr)) menu.addAction(_("Encrypt/decrypt message"), lambda: self.encrypt_message(addr)) if self.wallet.is_imported(addr): menu.addAction(_("Remove from wallet"), lambda: self.delete_imported_key(addr)) if any(addr not in self.wallet.frozen_addresses for addr in addrs): menu.addAction(_("Freeze"), lambda: self.set_addrs_frozen(addrs, True)) if any(addr in self.wallet.frozen_addresses for addr in addrs): menu.addAction(_("Unfreeze"), lambda: self.set_addrs_frozen(addrs, False)) def can_send(addr): return addr not in self.wallet.frozen_addresses and self.wallet.get_addr_balance(addr) != (0, 0) if any(can_send(addr) for addr in addrs): menu.addAction(_("Send From"), lambda: self.send_from_addresses(addrs)) run_hook('receive_menu', menu, addrs) menu.exec_(self.address_list.viewport().mapToGlobal(position)) def get_sendable_balance(self): return sum(map(lambda x:x['value'], self.get_coins())) def get_coins(self): if self.pay_from: return self.pay_from else: domain = self.wallet.get_account_addresses(self.current_account) for i in self.wallet.frozen_addresses: if i in domain: domain.remove(i) return self.wallet.get_unspent_coins(domain, height=self.network.get_local_height()) def send_from_addresses(self, addrs): self.set_pay_from( addrs ) self.tabs.setCurrentIndex(1) def payto(self, addr): if not addr: return label = self.wallet.labels.get(addr) m_addr = label + ' <' + addr + '>' if label else addr self.tabs.setCurrentIndex(1) self.payto_e.setText(m_addr) self.amount_e.setFocus() def delete_contact(self, x): if self.question(_("Do you want to remove")+" %s "%x +_("from your list of contacts?")): self.wallet.delete_contact(x) self.wallet.set_label(x, None) self.update_history_tab() self.update_contacts_tab() self.update_completions() def create_contact_menu(self, position): item = self.contacts_list.itemAt(position) menu = QMenu() if not item: menu.addAction(_("New contact"), lambda: self.new_contact_dialog()) else: addr = unicode(item.text(0)) label = unicode(item.text(1)) is_editable = item.data(0,32).toBool() payto_addr = item.data(0,33).toString() menu.addAction(_("Copy to Clipboard"), lambda: self.app.clipboard().setText(addr)) menu.addAction(_("Pay to"), lambda: self.payto(payto_addr)) menu.addAction(_("QR code"), lambda: self.show_qrcode("freicoin:" + addr, _("Address"))) if is_editable: menu.addAction(_("Edit label"), lambda: self.edit_label(False)) menu.addAction(_("Delete"), lambda: self.delete_contact(addr)) run_hook('create_contact_menu', menu, item) menu.exec_(self.contacts_list.viewport().mapToGlobal(position)) def delete_invoice(self, key): self.invoices.pop(key) self.wallet.storage.put('invoices', self.invoices) self.update_invoices_tab() def show_invoice(self, key): from electrum_frc.paymentrequest import PaymentRequest domain, memo, value, expiration, status, tx_hash = self.invoices[key] pr = PaymentRequest(self.config) pr.read_file(key) pr.domain = domain pr.verify() self.show_pr_details(pr, tx_hash) def show_pr_details(self, pr, tx_hash=None): msg = 'Domain: ' + pr.domain msg += '\nStatus: ' + pr.get_status() msg += '\nMemo: ' + pr.get_memo() msg += '\nPayment URL: ' + pr.payment_url msg += '\n\nOutputs:\n' + '\n'.join(map(lambda x: x[1] + ' ' + self.format_amount(x[2])+ self.base_unit(), pr.get_outputs())) if tx_hash: msg += '\n\nTransaction ID: ' + tx_hash QMessageBox.information(self, 'Invoice', msg , 'OK') def do_pay_invoice(self, key): from electrum_frc.paymentrequest import PaymentRequest domain, memo, value, expiration, status, tx_hash = self.invoices[key] pr = PaymentRequest(self.config) pr.read_file(key) pr.domain = domain self.payment_request = pr self.prepare_for_payment_request() if pr.verify(): self.payment_request_ok() else: self.payment_request_error() def create_invoice_menu(self, position): item = self.invoices_list.itemAt(position) if not item: return key = str(item.data(0, 32).toString()) domain, memo, value, expiration, status, tx_hash = self.invoices[key] menu = QMenu() menu.addAction(_("Details"), lambda: self.show_invoice(key)) if status == PR_UNPAID: menu.addAction(_("Pay Now"), lambda: self.do_pay_invoice(key)) menu.addAction(_("Delete"), lambda: self.delete_invoice(key)) menu.exec_(self.invoices_list.viewport().mapToGlobal(position)) def update_address_tab(self): l = self.address_list # extend the syntax for consistency l.addChild = l.addTopLevelItem l.insertChild = l.insertTopLevelItem l.clear() accounts = self.wallet.get_accounts() if self.current_account is None: account_items = sorted(accounts.items()) else: account_items = [(self.current_account, accounts.get(self.current_account))] for k, account in account_items: if len(accounts) > 1: name = self.wallet.get_account_name(k) c,u = self.wallet.get_account_balance(k) account_item = QTreeWidgetItem( [ name, '', self.format_amount(c+u), ''] ) l.addTopLevelItem(account_item) account_item.setExpanded(self.accounts_expanded.get(k, True)) account_item.setData(0, 32, k) else: account_item = l sequences = [0,1] if account.has_change() else [0] for is_change in sequences: if len(sequences) > 1: name = _("Receiving") if not is_change else _("Change") seq_item = QTreeWidgetItem( [ name, '', '', '', ''] ) account_item.addChild(seq_item) if not is_change: seq_item.setExpanded(True) else: seq_item = account_item used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] ) used_flag = False addr_list = account.get_addresses(is_change) for address in addr_list: num, is_used = self.wallet.is_used(address) label = self.wallet.labels.get(address,'') c, u = self.wallet.get_addr_balance(address) balance = self.format_amount(c + u) item = QTreeWidgetItem( [ address, label, balance, "%d"%num] ) item.setFont(0, QFont(MONOSPACE_FONT)) item.setData(0, 32, True) # label can be edited if address in self.wallet.frozen_addresses: item.setBackgroundColor(0, QColor('lightblue')) if self.wallet.is_beyond_limit(address, account, is_change): item.setBackgroundColor(0, QColor('red')) if is_used: if not used_flag: seq_item.insertChild(0, used_item) used_flag = True used_item.addChild(item) else: seq_item.addChild(item) # we use column 1 because column 0 may be hidden l.setCurrentItem(l.topLevelItem(0),1) def update_contacts_tab(self): l = self.contacts_list l.clear() for address in self.wallet.addressbook: label = self.wallet.labels.get(address,'') n = self.wallet.get_num_tx(address) item = QTreeWidgetItem( [ address, label, "%d"%n] ) item.setFont(0, QFont(MONOSPACE_FONT)) # 32 = label can be edited (bool) item.setData(0,32, True) # 33 = payto string item.setData(0,33, address) l.addTopLevelItem(item) run_hook('update_contacts_tab', l) l.setCurrentItem(l.topLevelItem(0)) def create_console_tab(self): from console import Console self.console = console = Console() return console def update_console(self): console = self.console console.history = self.config.get("console-history",[]) console.history_index = len(console.history) console.updateNamespace({'wallet' : self.wallet, 'network' : self.network, 'gui':self}) console.updateNamespace({'util' : util, 'bitcoin':bitcoin}) c = commands.Commands(self.wallet, self.network, lambda: self.console.set_json(True)) methods = {} def mkfunc(f, method): return lambda *args: apply( f, (method, args, self.password_dialog )) for m in dir(c): if m[0]=='_' or m in ['network','wallet']: continue methods[m] = mkfunc(c._run, m) console.updateNamespace(methods) def change_account(self,s): if s == _("All accounts"): self.current_account = None else: accounts = self.wallet.get_account_names() for k, v in accounts.items(): if v == s: self.current_account = k self.update_history_tab() self.update_status() self.update_address_tab() self.update_receive_tab() def create_status_bar(self): sb = QStatusBar() sb.setFixedHeight(35) qtVersion = qVersion() self.balance_label = QLabel("") sb.addWidget(self.balance_label) from version_getter import UpdateLabel self.updatelabel = UpdateLabel(self.config, sb) self.account_selector = QComboBox() self.account_selector.setSizeAdjustPolicy(QComboBox.AdjustToContents) self.connect(self.account_selector,SIGNAL("activated(QString)"),self.change_account) sb.addPermanentWidget(self.account_selector) if (int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7): sb.addPermanentWidget( StatusBarButton( QIcon(":icons/switchgui.png"), _("Switch to Lite Mode"), self.go_lite ) ) self.lock_icon = QIcon() self.password_button = StatusBarButton( self.lock_icon, _("Password"), self.change_password_dialog ) sb.addPermanentWidget( self.password_button ) sb.addPermanentWidget( StatusBarButton( QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) ) self.seed_button = StatusBarButton( QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog ) sb.addPermanentWidget( self.seed_button ) self.status_button = StatusBarButton( QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog ) sb.addPermanentWidget( self.status_button ) run_hook('create_status_bar', sb) self.setStatusBar(sb) def update_lock_icon(self): icon = QIcon(":icons/lock.png") if self.wallet.use_encryption else QIcon(":icons/unlock.png") self.password_button.setIcon( icon ) def update_buttons_on_seed(self): self.seed_button.setVisible(self.wallet.has_seed()) self.password_button.setVisible(self.wallet.can_change_password()) self.send_button.setText(_("Create unsigned transaction") if self.wallet.is_watching_only() else _("Send")) def change_password_dialog(self): from password_dialog import PasswordDialog d = PasswordDialog(self.wallet, self) d.run() self.update_lock_icon() def new_contact_dialog(self): d = QDialog(self) d.setWindowTitle(_("New Contact")) vbox = QVBoxLayout(d) vbox.addWidget(QLabel(_('New Contact')+':')) grid = QGridLayout() line1 = QLineEdit() line2 = QLineEdit() grid.addWidget(QLabel(_("Address")), 1, 0) grid.addWidget(line1, 1, 1) grid.addWidget(QLabel(_("Name")), 2, 0) grid.addWidget(line2, 2, 1) vbox.addLayout(grid) vbox.addLayout(ok_cancel_buttons(d)) if not d.exec_(): return address = str(line1.text()) label = unicode(line2.text()) if not is_valid(address): QMessageBox.warning(self, _('Error'), _('Invalid Address'), _('OK')) return self.wallet.add_contact(address) if label: self.wallet.set_label(address, label) self.update_contacts_tab() self.update_history_tab() self.update_completions() self.tabs.setCurrentIndex(3) @protected def new_account_dialog(self, password): dialog = QDialog(self) dialog.setModal(1) dialog.setWindowTitle(_("New Account")) vbox = QVBoxLayout() vbox.addWidget(QLabel(_('Account name')+':')) e = QLineEdit() vbox.addWidget(e) msg = _("Note: Newly created accounts are 'pending' until they receive freicoins.") + " " \ + _("You will need to wait for 2 confirmations until the correct balance is displayed and more addresses are created for that account.") l = QLabel(msg) l.setWordWrap(True) vbox.addWidget(l) vbox.addLayout(ok_cancel_buttons(dialog)) dialog.setLayout(vbox) r = dialog.exec_() if not r: return name = str(e.text()) self.wallet.create_pending_account(name, password) self.update_address_tab() self.update_account_selector() self.tabs.setCurrentIndex(3) def show_master_public_keys(self): dialog = QDialog(self) dialog.setModal(1) dialog.setWindowTitle(_("Master Public Keys")) mpk_dict = self.wallet.get_master_public_keys() vbox = QVBoxLayout() # only show the combobox in case multiple accounts are available if len(mpk_dict) > 1: gb = QGroupBox(_("Master Public Keys")) vbox.addWidget(gb) group = QButtonGroup() first_button = None for name in sorted(mpk_dict.keys()): b = QRadioButton(gb) b.setText(name) group.addButton(b) vbox.addWidget(b) if not first_button: first_button = b mpk_text = ShowQRTextEdit() mpk_text.setMaximumHeight(170) vbox.addWidget(mpk_text) def show_mpk(b): name = str(b.text()) mpk = mpk_dict.get(name, "") mpk_text.setText(mpk) mpk_text.selectAll() # for easy copying group.buttonReleased.connect(show_mpk) first_button.setChecked(True) show_mpk(first_button) #combobox.currentIndexChanged[str].connect(lambda acc: show_mpk(acc)) elif len(mpk_dict) == 1: mpk = mpk_dict.values()[0] mpk_text = ShowQRTextEdit(text=mpk) mpk_text.setMaximumHeight(170) mpk_text.selectAll() # for easy copying vbox.addWidget(mpk_text) vbox.addLayout(close_button(dialog)) dialog.setLayout(vbox) dialog.exec_() @protected def show_seed_dialog(self, password): if not self.wallet.has_seed(): QMessageBox.information(self, _('Message'), _('This wallet has no seed'), _('OK')) return try: mnemonic = self.wallet.get_mnemonic(password) except BaseException as e: QMessageBox.warning(self, _('Error'), str(e), _('OK')) return from seed_dialog import SeedDialog d = SeedDialog(self, mnemonic, self.wallet.has_imported_keys()) d.exec_() def show_qrcode(self, data, title = _("QR code")): if not data: return d = QRDialog(data, self, title) d.exec_() def do_protect(self, func, args): if self.wallet.use_encryption: password = self.password_dialog() if not password: return else: password = None if args != (False,): args = (self,) + args + (password,) else: args = (self,password) apply( func, args) def show_public_keys(self, address): if not address: return try: pubkey_list = self.wallet.get_public_keys(address) except Exception as e: traceback.print_exc(file=sys.stdout) self.show_message(str(e)) return d = QDialog(self) d.setMinimumSize(600, 200) d.setModal(1) d.setWindowTitle(_("Public key")) vbox = QVBoxLayout() vbox.addWidget( QLabel(_("Address") + ': ' + address)) vbox.addWidget( QLabel(_("Public key") + ':')) keys = ShowQRTextEdit(text='\n'.join(pubkey_list)) vbox.addWidget(keys) vbox.addLayout(close_button(d)) d.setLayout(vbox) d.exec_() @protected def show_private_key(self, address, password): if not address: return try: pk_list = self.wallet.get_private_key(address, password) except Exception as e: traceback.print_exc(file=sys.stdout) self.show_message(str(e)) return d = QDialog(self) d.setMinimumSize(600, 200) d.setModal(1) d.setWindowTitle(_("Private key")) vbox = QVBoxLayout() vbox.addWidget( QLabel(_("Address") + ': ' + address)) vbox.addWidget( QLabel(_("Private key") + ':')) keys = ShowQRTextEdit(text='\n'.join(pk_list)) vbox.addWidget(keys) vbox.addLayout(close_button(d)) d.setLayout(vbox) d.exec_() @protected def do_sign(self, address, message, signature, password): message = unicode(message.toPlainText()) message = message.encode('utf-8') try: sig = self.wallet.sign_message(str(address.text()), message, password) signature.setText(sig) except Exception as e: self.show_message(str(e)) def do_verify(self, address, message, signature): message = unicode(message.toPlainText()) message = message.encode('utf-8') if bitcoin.verify_message(address.text(), str(signature.toPlainText()), message): self.show_message(_("Signature verified")) else: self.show_message(_("Error: wrong signature")) def sign_verify_message(self, address=''): d = QDialog(self) d.setModal(1) d.setWindowTitle(_('Sign/verify Message')) d.setMinimumSize(410, 290) layout = QGridLayout(d) message_e = QTextEdit() layout.addWidget(QLabel(_('Message')), 1, 0) layout.addWidget(message_e, 1, 1) layout.setRowStretch(2,3) address_e = QLineEdit() address_e.setText(address) layout.addWidget(QLabel(_('Address')), 2, 0) layout.addWidget(address_e, 2, 1) signature_e = QTextEdit() layout.addWidget(QLabel(_('Signature')), 3, 0) layout.addWidget(signature_e, 3, 1) layout.setRowStretch(3,1) hbox = QHBoxLayout() b = QPushButton(_("Sign")) b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e)) hbox.addWidget(b) b = QPushButton(_("Verify")) b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e)) hbox.addWidget(b) b = QPushButton(_("Close")) b.clicked.connect(d.accept) hbox.addWidget(b) layout.addLayout(hbox, 4, 1) d.exec_() @protected def do_decrypt(self, message_e, pubkey_e, encrypted_e, password): try: decrypted = self.wallet.decrypt_message(str(pubkey_e.text()), str(encrypted_e.toPlainText()), password) message_e.setText(decrypted) except BaseException as e: traceback.print_exc(file=sys.stdout) self.show_warning(str(e)) def do_encrypt(self, message_e, pubkey_e, encrypted_e): message = unicode(message_e.toPlainText()) message = message.encode('utf-8') try: encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text())) encrypted_e.setText(encrypted) except BaseException as e: traceback.print_exc(file=sys.stdout) self.show_warning(str(e)) def encrypt_message(self, address = ''): d = QDialog(self) d.setModal(1) d.setWindowTitle(_('Encrypt/decrypt Message')) d.setMinimumSize(610, 490) layout = QGridLayout(d) message_e = QTextEdit() layout.addWidget(QLabel(_('Message')), 1, 0) layout.addWidget(message_e, 1, 1) layout.setRowStretch(2,3) pubkey_e = QLineEdit() if address: pubkey = self.wallet.get_public_keys(address)[0] pubkey_e.setText(pubkey) layout.addWidget(QLabel(_('Public key')), 2, 0) layout.addWidget(pubkey_e, 2, 1) encrypted_e = QTextEdit() layout.addWidget(QLabel(_('Encrypted')), 3, 0) layout.addWidget(encrypted_e, 3, 1) layout.setRowStretch(3,1) hbox = QHBoxLayout() b = QPushButton(_("Encrypt")) b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e)) hbox.addWidget(b) b = QPushButton(_("Decrypt")) b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e)) hbox.addWidget(b) b = QPushButton(_("Close")) b.clicked.connect(d.accept) hbox.addWidget(b) layout.addLayout(hbox, 4, 1) d.exec_() def question(self, msg): return QMessageBox.question(self, _('Message'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) == QMessageBox.Yes def show_message(self, msg): QMessageBox.information(self, _('Message'), msg, _('OK')) def show_warning(self, msg): QMessageBox.warning(self, _('Warning'), msg, _('OK')) def password_dialog(self, msg=None): d = QDialog(self) d.setModal(1) d.setWindowTitle(_("Enter Password")) pw = QLineEdit() pw.setEchoMode(2) vbox = QVBoxLayout() if not msg: msg = _('Please enter your password') vbox.addWidget(QLabel(msg)) grid = QGridLayout() grid.setSpacing(8) grid.addWidget(QLabel(_('Password')), 1, 0) grid.addWidget(pw, 1, 1) vbox.addLayout(grid) vbox.addLayout(ok_cancel_buttons(d)) d.setLayout(vbox) run_hook('password_dialog', pw, grid, 1) if not d.exec_(): return return unicode(pw.text()) def tx_from_text(self, txt): "json or raw hexadecimal" try: txt.decode('hex') is_hex = True except: is_hex = False if is_hex: try: return Transaction.deserialize(txt) except: traceback.print_exc(file=sys.stdout) QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum-FRC was unable to parse your transaction")) return try: tx_dict = json.loads(str(txt)) assert "hex" in tx_dict.keys() tx = Transaction.deserialize(tx_dict["hex"]) #if tx_dict.has_key("input_info"): # input_info = json.loads(tx_dict['input_info']) # tx.add_input_info(input_info) return tx except Exception: traceback.print_exc(file=sys.stdout) QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum-FRC was unable to parse your transaction")) def read_tx_from_qrcode(self): from electrum_frc import qrscanner try: data = qrscanner.scan_qr(self.config) except BaseException, e: QMessageBox.warning(self, _('Error'), _(e), _('OK')) return if not data: return # if the user scanned a bitcoin URI if data.startswith("freicoin:"): self.pay_from_URI(data) return # else if the user scanned an offline signed tx # transactions are binary, but qrcode seems to return utf8... z = data.decode('utf8') data = ''.join(chr(ord(b)) for b in z).encode('hex') tx = self.tx_from_text(data) if not tx: return self.show_transaction(tx) def read_tx_from_file(self): fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn") if not fileName: return try: with open(fileName, "r") as f: file_content = f.read() except (ValueError, IOError, os.error), reason: QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum-FRC was unable to open your transaction file") + "\n" + str(reason)) return self.tx_from_text(file_content) @protected def sign_raw_transaction(self, tx, password): try: self.wallet.sign_transaction(tx, password) except Exception as e: traceback.print_exc(file=sys.stdout) QMessageBox.warning(self, _("Error"), str(e)) def do_process_from_text(self): text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction")) if not text: return tx = self.tx_from_text(text) if tx: self.show_transaction(tx) def do_process_from_file(self): tx = self.read_tx_from_file() if tx: self.show_transaction(tx) def do_process_from_txid(self): from electrum_frc import transaction txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':') if ok and txid: r = self.network.synchronous_get([ ('blockchain.transaction.get',[str(txid)]) ])[0] if r: tx = transaction.Transaction.deserialize(r) if tx: self.show_transaction(tx) else: self.show_message("unknown transaction") def do_process_from_csvReader(self, csvReader): outputs = [] errors = [] errtext = "" try: for position, row in enumerate(csvReader): address = row[0] if not bitcoin.is_address(address): errors.append((position, address)) continue amount = Decimal(row[1]) amount = int(100000000*amount) outputs.append(('address', address, amount)) except (ValueError, IOError, os.error), reason: QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum-FRC was unable to open your transaction file") + "\n" + str(reason)) return if errors != []: for x in errors: errtext += "CSV Row " + str(x[0]+1) + ": " + x[1] + "\n" QMessageBox.critical(None, _("Invalid Addresses"), _("ABORTING! Invalid Addresses found:") + "\n\n" + errtext) return try: tx = self.wallet.make_unsigned_transaction(outputs, None, None) except Exception as e: self.show_message(str(e)) return self.show_transaction(tx) def do_process_from_csv_file(self): fileName = self.getOpenFileName(_("Select your transaction CSV"), "*.csv") if not fileName: return try: with open(fileName, "r") as f: csvReader = csv.reader(f) self.do_process_from_csvReader(csvReader) except (ValueError, IOError, os.error), reason: QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum-FRC was unable to open your transaction file") + "\n" + str(reason)) return def do_process_from_csv_text(self): text = text_dialog(self, _('Input CSV'), _("Please enter a list of outputs.") + '\n' \ + _("Format: address, amount. One output per line"), _("Load CSV")) if not text: return f = StringIO.StringIO(text) csvReader = csv.reader(f) self.do_process_from_csvReader(csvReader) @protected def export_privkeys_dialog(self, password): if self.wallet.is_watching_only(): self.show_message(_("This is a watching-only wallet")) return try: self.wallet.check_password(password) except Exception as e: QMessageBox.warning(self, _('Error'), str(e), _('OK')) return d = QDialog(self) d.setWindowTitle(_('Private keys')) d.setMinimumSize(850, 300) vbox = QVBoxLayout(d) msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."), _("Exposing a single private key can compromise your entire wallet!"), _("In particular, DO NOT use 'redeem private key' services proposed by third parties.")) vbox.addWidget(QLabel(msg)) e = QTextEdit() e.setReadOnly(True) vbox.addWidget(e) defaultname = 'electrum-private-keys.csv' select_msg = _('Select file to export your private keys to') hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg) vbox.addLayout(hbox) h, b = ok_cancel_buttons2(d, _('Export')) b.setEnabled(False) vbox.addLayout(h) private_keys = {} addresses = self.wallet.addresses(True) done = False def privkeys_thread(): for addr in addresses: time.sleep(0.1) if done: break private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password)) d.emit(SIGNAL('computing_privkeys')) d.emit(SIGNAL('show_privkeys')) def show_privkeys(): s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items())) e.setText(s) b.setEnabled(True) d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses)))) d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys) threading.Thread(target=privkeys_thread).start() if not d.exec_(): done = True return filename = filename_e.text() if not filename: return try: self.do_export_privkeys(filename, private_keys, csv_button.isChecked()) except (IOError, os.error), reason: export_error_label = _("Electrum-FRC was unable to produce a private key-export.") QMessageBox.critical(None, _("Unable to create csv"), export_error_label + "\n" + str(reason)) except Exception as e: self.show_message(str(e)) return self.show_message(_("Private keys exported.")) def do_export_privkeys(self, fileName, pklist, is_csv): with open(fileName, "w+") as f: if is_csv: transaction = csv.writer(f) transaction.writerow(["address", "private_key"]) for addr, pk in pklist.items(): transaction.writerow(["%34s"%addr,pk]) else: import json f.write(json.dumps(pklist, indent = 4)) def do_import_labels(self): labelsFile = self.getOpenFileName(_("Open labels file"), "*.dat") if not labelsFile: return try: f = open(labelsFile, 'r') data = f.read() f.close() for key, value in json.loads(data).items(): self.wallet.set_label(key, value) QMessageBox.information(None, _("Labels imported"), _("Your labels were imported from")+" '%s'" % str(labelsFile)) except (IOError, os.error), reason: QMessageBox.critical(None, _("Unable to import labels"), _("Electrum-FRC was unable to import your labels.")+"\n" + str(reason)) def do_export_labels(self): labels = self.wallet.labels try: fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum_labels.dat', "*.dat") if fileName: with open(fileName, 'w+') as f: json.dump(labels, f) QMessageBox.information(None, _("Labels exported"), _("Your labels where exported to")+" '%s'" % str(fileName)) except (IOError, os.error), reason: QMessageBox.critical(None, _("Unable to export labels"), _("Electrum-FRC was unable to export your labels.")+"\n" + str(reason)) def export_history_dialog(self): d = QDialog(self) d.setWindowTitle(_('Export History')) d.setMinimumSize(400, 200) vbox = QVBoxLayout(d) defaultname = os.path.expanduser('~/electrum-history.csv') select_msg = _('Select file to export your wallet transactions to') hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg) vbox.addLayout(hbox) vbox.addStretch(1) h, b = ok_cancel_buttons2(d, _('Export')) vbox.addLayout(h) run_hook('export_history_dialog', self,hbox) self.update() if not d.exec_(): return filename = filename_e.text() if not filename: return try: self.do_export_history(self.wallet, filename, csv_button.isChecked()) except (IOError, os.error), reason: export_error_label = _("Electrum-FRC was unable to produce a transaction export.") QMessageBox.critical(self, _("Unable to export history"), export_error_label + "\n" + str(reason)) return QMessageBox.information(self,_("History exported"), _("Your wallet history has been successfully exported.")) def do_export_history(self, wallet, fileName, is_csv): history = wallet.get_tx_history() lines = [] for item in history: tx_hash, confirmations, is_mine, value, fee, balance, timestamp = item if confirmations: if timestamp is not None: try: time_string = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3] except [RuntimeError, TypeError, NameError] as reason: time_string = "unknown" pass else: time_string = "unknown" else: time_string = "pending" if value is not None: value_string = format_satoshis(value, True) else: value_string = '--' if fee is not None: fee_string = format_satoshis(fee, True) else: fee_string = '0' if tx_hash: label, is_default_label = wallet.get_label(tx_hash) label = label.encode('utf-8') else: label = "" balance_string = format_satoshis(balance, False) if is_csv: lines.append([tx_hash, label, confirmations, value_string, fee_string, balance_string, time_string]) else: lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string}) with open(fileName, "w+") as f: if is_csv: transaction = csv.writer(f, lineterminator='\n') transaction.writerow(["transaction_hash","label", "confirmations", "value", "fee", "balance", "timestamp"]) for line in lines: transaction.writerow(line) else: import json f.write(json.dumps(lines, indent = 4)) def sweep_key_dialog(self): d = QDialog(self) d.setWindowTitle(_('Sweep private keys')) d.setMinimumSize(600, 300) vbox = QVBoxLayout(d) vbox.addWidget(QLabel(_("Enter private keys"))) keys_e = QTextEdit() keys_e.setTabChangesFocus(True) vbox.addWidget(keys_e) h, address_e = address_field(self.wallet.addresses(False)) vbox.addLayout(h) vbox.addStretch(1) hbox, button = ok_cancel_buttons2(d, _('Sweep')) vbox.addLayout(hbox) button.setEnabled(False) def get_address(): addr = str(address_e.text()) if bitcoin.is_address(addr): return addr def get_pk(): pk = str(keys_e.toPlainText()).strip() if Wallet.is_private_key(pk): return pk.split() f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None) keys_e.textChanged.connect(f) address_e.textChanged.connect(f) if not d.exec_(): return fee = self.wallet.fee_per_kb tx = Transaction.sweep(get_pk(), self.network, get_address(), fee) self.show_transaction(tx) @protected def do_import_privkey(self, password): if not self.wallet.has_imported_keys(): r = QMessageBox.question(None, _('Warning'), '<b>'+_('Warning') +':\n</b><br/>'+ _('Imported keys are not recoverable from seed.') + ' ' \ + _('If you ever need to restore your wallet from its seed, these keys will be lost.') + '<p>' \ + _('Are you sure you understand what you are doing?'), 3, 4) if r == 4: return text = text_dialog(self, _('Import private keys'), _("Enter private keys")+':', _("Import")) if not text: return text = str(text).split() badkeys = [] addrlist = [] for key in text: try: addr = self.wallet.import_key(key, password) except Exception as e: badkeys.append(key) continue if not addr: badkeys.append(key) else: addrlist.append(addr) if addrlist: QMessageBox.information(self, _('Information'), _("The following addresses were added") + ':\n' + '\n'.join(addrlist)) if badkeys: QMessageBox.critical(self, _('Error'), _("The following inputs could not be imported") + ':\n'+ '\n'.join(badkeys)) self.update_address_tab() self.update_history_tab() def settings_dialog(self): self.need_restart = False d = QDialog(self) d.setWindowTitle(_('Electrum-FRC Settings')) d.setModal(1) vbox = QVBoxLayout() grid = QGridLayout() grid.setColumnStretch(0,1) widgets = [] lang_label = QLabel(_('Language') + ':') lang_help = HelpButton(_('Select which language is used in the GUI (after restart).')) lang_combo = QComboBox() from electrum_frc.i18n import languages lang_combo.addItems(languages.values()) try: index = languages.keys().index(self.config.get("language",'')) except Exception: index = 0 lang_combo.setCurrentIndex(index) if not self.config.is_modifiable('language'): for w in [lang_combo, lang_label]: w.setEnabled(False) def on_lang(x): lang_request = languages.keys()[lang_combo.currentIndex()] if lang_request != self.config.get('language'): self.config.set_key("language", lang_request, True) self.need_restart = True lang_combo.currentIndexChanged.connect(on_lang) widgets.append((lang_label, lang_combo, lang_help)) nz_label = QLabel(_('Zeros after decimal point') + ':') nz_help = HelpButton(_('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')) nz = QSpinBox() nz.setMinimum(0) nz.setMaximum(self.decimal_point) nz.setValue(self.num_zeros) if not self.config.is_modifiable('num_zeros'): for w in [nz, nz_label]: w.setEnabled(False) def on_nz(): value = nz.value() if self.num_zeros != value: self.num_zeros = value self.config.set_key('num_zeros', value, True) self.update_history_tab() self.update_address_tab() nz.valueChanged.connect(on_nz) widgets.append((nz_label, nz, nz_help)) fee_label = QLabel(_('Transaction fee per kb') + ':') fee_help = HelpButton(_('Fee per kilobyte of transaction.') + '\n' \ + _('Recommended value') + ': ' + self.format_amount(bitcoin.RECOMMENDED_FEE) + ' ' + self.base_unit()) fee_e = BTCAmountEdit(self.get_decimal_point) fee_e.setAmount(self.wallet.fee_per_kb) if not self.config.is_modifiable('fee_per_kb'): for w in [fee_e, fee_label]: w.setEnabled(False) def on_fee(): fee = fee_e.get_amount() self.wallet.set_fee(fee) fee_e.editingFinished.connect(on_fee) widgets.append((fee_label, fee_e, fee_help)) units = ['FRC', 'mFRC', 'uFRC'] unit_label = QLabel(_('Base unit') + ':') unit_combo = QComboBox() unit_combo.addItems(units) unit_combo.setCurrentIndex(units.index(self.base_unit())) msg = _('Base unit of your wallet.')\ + '\n1FRC=1000mFRC.\n' \ + _(' These settings affects the fields in the Send tab')+' ' unit_help = HelpButton(msg) def on_unit(x): unit_result = units[unit_combo.currentIndex()] if self.base_unit() == unit_result: return if unit_result == 'FRC': self.decimal_point = 8 elif unit_result == 'mFRC': self.decimal_point = 5 elif unit_result == 'uFRC': self.decimal_point = 2 else: raise Exception('Unknown base unit') self.config.set_key('decimal_point', self.decimal_point, True) self.update_history_tab() self.update_receive_tab() self.update_address_tab() self.update_invoices_tab() fee_e.setAmount(self.wallet.fee_per_kb) self.update_status() unit_combo.currentIndexChanged.connect(on_unit) widgets.append((unit_label, unit_combo, unit_help)) block_explorers = ['Coinplorer'] block_ex_label = QLabel(_('Online Block Explorer') + ':') block_ex_combo = QComboBox() block_ex_combo.addItems(block_explorers) block_ex_combo.setCurrentIndex(block_explorers.index(self.config.get('block_explorer', 'Coinplorer'))) block_ex_help = HelpButton(_('Choose which online block explorer to use for functions that open a web browser')) def on_be(x): be_result = block_explorers[block_ex_combo.currentIndex()] self.config.set_key('block_explorer', be_result, True) block_ex_combo.currentIndexChanged.connect(on_be) widgets.append((block_ex_label, block_ex_combo, block_ex_help)) from electrum_frc import qrscanner system_cameras = qrscanner._find_system_cameras() qr_combo = QComboBox() qr_combo.addItem("Default","default") for camera, device in system_cameras.items(): qr_combo.addItem(camera, device) #combo.addItem("Manually specify a device", config.get("video_device")) index = qr_combo.findData(self.config.get("video_device")) qr_combo.setCurrentIndex(index) qr_label = QLabel(_('Video Device') + ':') qr_combo.setEnabled(qrscanner.zbar is not None) qr_help = HelpButton(_("Install the zbar package to enable this.\nOn linux, type: 'apt-get install python-zbar'")) on_video_device = lambda x: self.config.set_key("video_device", str(qr_combo.itemData(x).toString()), True) qr_combo.currentIndexChanged.connect(on_video_device) widgets.append((qr_label, qr_combo, qr_help)) usechange_cb = QCheckBox(_('Use change addresses')) usechange_cb.setChecked(self.wallet.use_change) usechange_help = HelpButton(_('Using change addresses makes it more difficult for other people to track your transactions.')) if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False) def on_usechange(x): usechange_result = x == Qt.Checked if self.wallet.use_change != usechange_result: self.wallet.use_change = usechange_result self.wallet.storage.put('use_change', self.wallet.use_change) usechange_cb.stateChanged.connect(on_usechange) widgets.append((usechange_cb, None, usechange_help)) showtx_cb = QCheckBox(_('Show transaction before broadcast')) showtx_cb.setChecked(self.config.get('show_before_broadcast', False)) showtx_cb.stateChanged.connect(lambda x: self.config.set_key('show_before_broadcast', showtx_cb.isChecked())) showtx_help = HelpButton(_('Display the details of your transactions before broadcasting it.')) widgets.append((showtx_cb, None, showtx_help)) can_edit_fees_cb = QCheckBox(_('Set transaction fees manually')) can_edit_fees_cb.setChecked(self.config.get('can_edit_fees', False)) def on_editfees(x): self.config.set_key('can_edit_fees', x == Qt.Checked) self.update_fee_edit() can_edit_fees_cb.stateChanged.connect(on_editfees) can_edit_fees_help = HelpButton(_('This option lets you edit fees in the send tab.')) widgets.append((can_edit_fees_cb, None, can_edit_fees_help)) for a,b,c in widgets: i = grid.rowCount() if b: grid.addWidget(a, i, 0) grid.addWidget(b, i, 1) else: grid.addWidget(a, i, 0, 1, 2) grid.addWidget(c, i, 2) vbox.addLayout(grid) vbox.addStretch(1) vbox.addLayout(close_button(d)) d.setLayout(vbox) # run the dialog d.exec_() run_hook('close_settings_dialog') if self.need_restart: QMessageBox.warning(self, _('Success'), _('Please restart Electrum-FRC to activate the new GUI settings'), _('OK')) def run_network_dialog(self): if not self.network: QMessageBox.warning(self, _('Offline'), _('You are using Electrum-FRC in offline mode.\nRestart Electrum-FRC if you want to get connected.'), _('OK')) return NetworkDialog(self.wallet.network, self.config, self).do_exec() def closeEvent(self, event): self.config.set_key("is_maximized", self.isMaximized()) if not self.isMaximized(): g = self.geometry() self.config.set_key("winpos-qt", [g.left(),g.top(),g.width(),g.height()]) self.save_column_widths() self.config.set_key("console-history", self.console.history[-50:], True) self.wallet.storage.put('accounts_expanded', self.accounts_expanded) event.accept() def plugins_dialog(self): from electrum_frc.plugins import plugins self.pluginsdialog = d = QDialog(self) d.setWindowTitle(_('Electrum-FRC Plugins')) d.setModal(1) vbox = QVBoxLayout(d) # plugins scroll = QScrollArea() scroll.setEnabled(True) scroll.setWidgetResizable(True) scroll.setMinimumSize(400,250) vbox.addWidget(scroll) w = QWidget() scroll.setWidget(w) w.setMinimumHeight(len(plugins)*35) grid = QGridLayout() grid.setColumnStretch(0,1) w.setLayout(grid) def do_toggle(cb, p, w): if p.is_enabled(): if p.disable(): p.close() else: if p.enable(): p.load_wallet(self.wallet) p.init_qt(self.gui_object) r = p.is_enabled() cb.setChecked(r) if w: w.setEnabled(r) def mk_toggle(cb, p, w): return lambda: do_toggle(cb,p,w) for i, p in enumerate(plugins): try: cb = QCheckBox(p.fullname()) cb.setDisabled(not p.is_available()) cb.setChecked(p.is_enabled()) grid.addWidget(cb, i, 0) if p.requires_settings(): w = p.settings_widget(self) w.setEnabled( p.is_enabled() ) grid.addWidget(w, i, 1) else: w = None cb.clicked.connect(mk_toggle(cb,p,w)) grid.addWidget(HelpButton(p.description()), i, 2) except Exception: print_msg("Error: cannot display plugin", p) traceback.print_exc(file=sys.stdout) grid.setRowStretch(i+1,1) vbox.addLayout(close_button(d)) d.exec_() def show_account_details(self, k): account = self.wallet.accounts[k] d = QDialog(self) d.setWindowTitle(_('Account Details')) d.setModal(1) vbox = QVBoxLayout(d) name = self.wallet.get_account_name(k) label = QLabel('Name: ' + name) vbox.addWidget(label) vbox.addWidget(QLabel(_('Address type') + ': ' + account.get_type())) vbox.addWidget(QLabel(_('Derivation') + ': ' + k)) vbox.addWidget(QLabel(_('Master Public Key:'))) text = QTextEdit() text.setReadOnly(True) text.setMaximumHeight(170) vbox.addWidget(text) mpk_text = '\n'.join( account.get_master_pubkeys() ) text.setText(mpk_text) vbox.addLayout(close_button(d)) d.exec_()
gpl-3.0
3,402,617,299,634,712,000
38.375
458
0.586077
false
3.877285
true
false
false
lazyparser/opentuner
examples/unitary/input_generator.py
5
2819
import numpy as np import math import random def generate_random_Ugoal_HARD(N, **kwargs): # N is the length of random matrix multiplication yielding Ugoal # N ~ 100 should be enough # This method is hard because it creates Ugoal over the whole space # Ugoal 2x2 unitary matrix # create identity matrix Ugoal = np.eye(2) # create all N random angles in 2*pi*[0,1) seq_angle = 2.0 * math.pi * np.random.rand(1, N) # determine random operator help2 = np.random.randint(3, size=(1, N)) for k in range(N): hlp = seq_angle[0][k]; if help2[0][k] == 0: Ugoal = X_Mat(hlp) * Ugoal elif help2[0][k] == 1: Ugoal = Y_Mat(hlp) * Ugoal else: Ugoal = Z_Mat(hlp) * Ugoal return Ugoal def generate_random_Ugoal_EASY(N, alpha): # N is the length of random matrix multiplication yielding Ugoal # N ~ 100 should be enough # alpha is the used angle between rotation axes # This method is easy because it creates Ugoal over the whole space # Ugoal 2x2 unitary matrix # create identity matrix Ugoal = np.eye(2) # create all N random angles in 2*pi*[0,1) seq_angle = 2.0 * math.pi * np.random.rand(1, N) # determine random operator help2 = np.random.randint(2, size=(1, N)) for k in range(N): hlp = seq_angle[0][k]; if help2[0][k] == 0: Ugoal = Z_Mat(hlp) * Ugoal else: Ugoal = W_Mat(hlp, alpha) * Ugoal return Ugoal def generate_random_Ugoal_RANDOM(**kwargs): # Random guess with the following parametrization for U # U = @(q1, q2, q3) [ # [ cos(q1)*exp( i*q2 ), sin(q1)*exp( i*q3 )]; # [-sin(q1)*exp(-i*q3 ), cos(q1)*exp(-i*q2 )] # ]; # create random angles q1 = random.uniform(0.0, 0.5 * math.pi) q2 = random.uniform(0.0, 2.0 * math.pi) q3 = random.uniform(0.0, 2.0 * math.pi) return np.matrix([ [math.cos(q1) * my_cexp(q2), math.sin(q1) * my_cexp(q3)], [-math.sin(q1) * my_cexp(-q3), math.cos(q1) * my_cexp(-q2)]]) def my_cexp(x): return math.cos(x) + 1j * math.sin(x) def X_Mat(a): return np.matrix([[math.cos(a / 2.0), -1j * math.sin(a / 2.0)], [-1j * math.sin(a / 2.0), math.cos(a / 2.0)]]) def Y_Mat(a): return np.matrix([[math.cos(a / 2.0), -math.sin(a / 2.0)], [math.sin(a / 2.0), math.cos(a / 2.0)]]) def Z_Mat(a): return np.matrix([[math.cos(-a / 2.0) + 1j * math.sin(-a / 2.0), 0], [0, math.cos(a / 2.0) + 1j * math.sin(a / 2.0)]]) def W_Mat(a, alpha): return np.matrix([[math.cos(a / 2) - 1j * math.cos(alpha) * math.sin(a / 2.0), -math.sin(a / 2.0) * math.sin(alpha)], [math.sin(a / 2.0) * math.sin(alpha), math.cos(a / 2.0) + 1j * math.cos(alpha) * math.sin( a / 2.0)]])
mit
-9,196,424,694,489,939,000
27.19
80
0.56332
false
2.684762
false
false
false
paulgclark/waveconverter
src/iqFileArgParse.py
1
3934
import re def fileNameTextToFloat(valStr, unitStr): # if there's a 'p' character, then we have to deal with decimal vals if 'p' in valStr: regex = re.compile(r"([0-9]+)p([0-9]+)") wholeVal = regex.findall(valStr)[0][0] decimalVal = regex.findall(valStr)[0][1] baseVal = 1.0*int(wholeVal) + 1.0*int(decimalVal)/10**len(decimalVal) else: baseVal = 1.0*int(valStr) if unitStr == "G": multiplier = 1e9 elif unitStr == "M": multiplier = 1e6 elif unitStr == "k": multiplier = 1e3 else: multiplier = 1.0 return baseVal * multiplier class iqFileObject(): def __init__(self, prefix = None, centerFreq = None, sampRate = None, fileName = None): # if no file name is specified, store the parameters if fileName is None: self.prefix = prefix self.centerFreq = centerFreq self.sampRate = sampRate # if the file name is specified, we must derive the parameters # from the file name else: # first check if we have a simple file name or a name+path regex = re.compile(r"\/") if regex.match(fileName): # separate the filename from the rest of the path regex = re.compile(r"\/([a-zA-Z0-9_.]+)$") justName = regex.findall(fileName)[0] else: justName = fileName # get the substrings representing the values regex = re.compile(r"_c([0-9p]+)([GMK])_s([0-9p]+)([GMk])\.iq$") paramList = regex.findall(justName) try: centerValStr = paramList[0][0] centerUnitStr = paramList[0][1] sampValStr = paramList[0][2] sampUnitStr = paramList[0][3] self.centerFreq = fileNameTextToFloat(centerValStr, centerUnitStr) self.sampRate = fileNameTextToFloat(sampValStr, sampUnitStr) except: return def fileName(self): tempStr = self.prefix # add center frequency # first determine if we should use k, M, G or nothing # then divide by the appropriate unit if self.centerFreq > 1e9: unitMag = 'G' wholeVal = int(1.0*self.centerFreq/1e9) decimalVal = (1.0*self.centerFreq - 1e9*wholeVal) decimalVal = int(decimalVal/1e7) elif self.centerFreq > 1e6: unitMag = 'M' wholeVal = int(1.0*self.centerFreq/1e6) decimalVal = (1.0*self.centerFreq - 1e6*wholeVal) decimalVal = int(decimalVal/1e4) elif self.centerFreq > 1e3: unitMag = 'k' wholeVal = int(1.0*self.centerFreq/1e3) decimalVal = (1.0*self.centerFreq - 1e3*wholeVal) decimalVal = int(decimalVal/1e1) else: unitMag = '' value = int(self.centerFreq) if decimalVal == 0: tempStr += "_c{}{}".format(wholeVal, unitMag) else: tempStr += "_c{}p{}{}".format(wholeVal, decimalVal, unitMag) # do the same thing for the sample rate if self.sampRate > 1e6: unitMag = 'M' wholeVal = int(1.0*self.sampRate/1e6) decimalVal = (1.0*self.sampRate - 1e6*wholeVal) decimalVal = int(decimalVal/1e4) elif self.sampRate > 1e3: unitMag = 'k' wholeVal = int(1.0*self.sampRate/1e3) decimalVal = (1.0*self.sampRate - 1e3*wholeVal) value = self.sampRate/1e1 else: unitMag = '' value = int(self.sampRate) if decimalVal == 0: tempStr += "_s{}{}".format(wholeVal, unitMag) else: tempStr += "_s{}p{}{}".format(wholeVal, decimalVal, unitMag) tempStr += ".iq" return tempStr
mit
1,827,736,291,478,683,100
35.766355
82
0.5394
false
3.632502
false
false
false
citrusbyte/pimotion
pimotion/cloudapp/__init__.py
1
1040
import requests from requests.auth import HTTPDigestAuth import json from cloudapp.exceptions import CloudAppHttpError class CloudAppAPI: def __init__(self, username, password): self.username = username self.password = password def upload(self, path): data = self.__get('http://my.cl.ly/items/new') # TODO: Do something with data['uploads_remaining'] url = data['url'] params = data['params'] headers = {'accept': 'application/json'} response = requests.post(url, files={'file': open(path, 'rb')}, data=params, allow_redirects=False) uri = response.headers['location'] data = self.__get(uri) return data['download_url'] def __get(self, uri): headers = {'accept': 'application/json'} r = requests.get(uri, auth=HTTPDigestAuth(self.username, self.password), headers=headers) if r.status_code != 200: raise CloudAppHttpError(response=r) return json.loads(r.text)
mit
-6,981,160,494,641,404,000
32.548387
107
0.613462
false
4.046693
false
false
false
edx/course-discovery
course_discovery/apps/edx_catalog_extensions/migrations/0002_create_professional_certificate_program_type.py
1
1143
# Generated by Django 1.9.11 on 2016-12-19 19:51 from django.db import migrations PAID_SEAT_TYPES = ('credit', 'professional', 'verified',) PROGRAM_TYPE = 'Professional Certificate' def add_program_type(apps, schema_editor): SeatType = apps.get_model('course_metadata', 'SeatType') ProgramType = apps.get_model('course_metadata', 'ProgramType') seat_types = SeatType.objects.filter(slug__in=PAID_SEAT_TYPES) program_type, __ = ProgramType.objects.update_or_create(name=PROGRAM_TYPE) program_type.applicable_seat_types.clear() program_type.applicable_seat_types.add(*seat_types) program_type.save() def drop_program_type(apps, schema_editor): ProgramType = apps.get_model('course_metadata', 'ProgramType') ProgramType.objects.filter(name=PROGRAM_TYPE).delete() class Migration(migrations.Migration): dependencies = [ ('edx_catalog_extensions', '0001_squashed_0003_create_publish_to_marketing_site_flag'), ] operations = [ migrations.RunPython(add_program_type, drop_program_type) ] run_before = [ ('course_metadata', '0247_auto_20200428_1910') ]
agpl-3.0
-5,059,052,367,050,130,000
28.307692
95
0.699038
false
3.313043
false
false
false
goldeneye-source/ges-python
ges/GamePlay/CaptureTheFlag.py
1
28725
################ Copyright 2005-2016 Team GoldenEye: Source ################# # # This file is part of GoldenEye: Source's Python Library. # # GoldenEye: Source's Python Library is free software: you can redistribute # it and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of the License, # or(at your option) any later version. # # GoldenEye: Source's Python Library is distributed in the hope that it will # be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License # along with GoldenEye: Source's Python Library. # If not, see <http://www.gnu.org/licenses/>. ############################################################################# from . import GEScenario, GEScenarioHelp from .Utils import OppositeTeam, _ from .Utils.GEOvertime import GEOvertime from .Utils.GETimer import EndRoundCallback, TimerTracker, Timer from .Utils.GEWarmUp import GEWarmUp import GEEntity, GEPlayer, GEUtil, GEWeapon, GEMPGameRules as GERules, GEGlobal as Glb USING_API = Glb.API_VERSION_1_2_0 FL_DEBUG = False class Token: def __init__( self ): self._ent = GEEntity.EntityHandle( None ) self.next_drop_time = 0 #Drop-spam prevention self.bothered = False #If we've been dropped by a member of the enemy team and are waiting to be rescued. def SetTokenEnt( self, token ): self._ent = GEEntity.EntityHandle( token ) def GetTokenEnt( self ): return self._ent.Get() def GetOwner( self ): ent = self.GetTokenEnt() if ent is not None: return ent.GetPlayerOwner() else: return None # ----------------------- # BEGIN CAPTURE THE KEY # CLASS DEFINITION # ----------------------- class CaptureTheFlag( GEScenario ): # Static Defines ( PROBAR_MI6, PROBAR_JANUS, PROBAR_OVERRIDE ) = range( 1, 4 ) ( MSG_JANUS_CHANNEL, MSG_MI6_CHANNEL, MSG_MISC_CHANNEL ) = range( 3 ) MSG_JANUS_YPOS = 0.71 MSG_MI6_YPOS = 0.75 MSG_MISC_YPOS = 0.67 OVERTIME_DELAY = 3.0 CVAR_CAPOVERRIDE = "ctf_capture_override_time" CVAR_CAPPOINTS = "ctf_player_capture_points" CVAR_SPEEDMULT = "ctf_player_speed_mult" CVAR_WARMUP = "ctf_warmup_time" CVAR_ALLOWTOSS = "ctf_allow_flag_toss" CVAR_FLAGMI6 = "ctf_flag_skin_mi6" CVAR_FLAGJANUS = "ctf_flag_skin_janus" TOKEN_MI6 = 'token_mi6' TOKEN_JANUS = 'token_janus' COLOR_NEUTRAL = GEUtil.Color( 220, 220, 220, 255 ) COLOR_JANUS_GLOW = GEUtil.Color( 14, 139, 237, 200 ) COLOR_JANUS_RADAR = GEUtil.Color( 94, 171, 231, 255 ) COLOR_JANUS_OBJ_COLD = GEUtil.Color( 94, 171, 231, 150 ) COLOR_JANUS_OBJ_HOT = GEUtil.Color( 94, 171, 231, 235 ) COLOR_MI6_GLOW = GEUtil.Color( 224, 18, 18, 200 ) COLOR_MI6_RADAR = GEUtil.Color( 206, 43, 43, 255 ) COLOR_MI6_OBJ_COLD = GEUtil.Color( 206, 43, 43, 150 ) COLOR_MI6_OBJ_HOT = GEUtil.Color( 206, 43, 43, 235 ) ( COLOR_RADAR, COLOR_OBJ_COLD, COLOR_OBJ_HOT ) = range( 3 ) TAG = "[CTF]" def __init__( self ): super( CaptureTheFlag, self ).__init__() # Create trackers and utilities self.warmupTimer = GEWarmUp( self ) self.timerTracker = TimerTracker( self ) self.overtime = GEOvertime() # Initialize base elements self.game_tokens = { Glb.TEAM_MI6 : Token(), Glb.TEAM_JANUS : Token() } self.game_timers = {} def GetPrintName( self ): return "#GES_GP_CAPTUREKEY_NAME" def GetScenarioHelp( self, help_obj ): assert isinstance( help_obj, GEScenarioHelp ) help_obj.SetInfo( "#GES_GPH_CTK_TAGLINE", "http://wiki.geshl2.com/Capture_The_Key" ) help_obj.SetDescription( "#GES_GP_CAPTUREKEY_HELP" ) pane = help_obj.AddPane( "ctk" ) help_obj.AddHelp( pane, "ctk_objectives", "#GES_GPH_CTK_OBJECTIVES" ) help_obj.AddHelp( pane, "ctk_radar", "#GES_GPH_CTK_RADAR" ) def GetGameDescription( self ): return "Capture The Flag" def GetTeamPlay( self ): return Glb.TEAMPLAY_ALWAYS def OnLoadGamePlay( self ): # Precache our models GEUtil.PrecacheModel( "models/gameplay/capturepoint.mdl" ) # Ensure our sounds are precached GEUtil.PrecacheSound( "GEGamePlay.Overtime" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Grab" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Grab_Enemy" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Capture_Friend" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Capture_Enemy" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Drop_Friend" ) GEUtil.PrecacheSound( "GEGamePlay.Token_Drop_Enemy" ) # Setup our tokens tokenmgr = GERules.GetTokenMgr() # MI6 Team Token Definition tokenmgr.SetupToken( self.TOKEN_MI6, limit=1, team=Glb.TEAM_MI6, location=Glb.SPAWN_TOKEN | Glb.SPAWN_OTHERTEAM, glow_color=self.COLOR_MI6_GLOW, glow_dist=450.0, allow_switch=False, respawn_delay=20, view_model="models/weapons/tokens/v_flagtoken.mdl", world_model="models/weapons/tokens/w_flagtoken.mdl", print_name="#GES_GP_CTK_BRIEFCASE" ) # Janus Team Token Definition tokenmgr.SetupToken( self.TOKEN_JANUS, limit=1, team=Glb.TEAM_JANUS, location=Glb.SPAWN_TOKEN | Glb.SPAWN_OTHERTEAM, glow_color=self.COLOR_JANUS_GLOW, glow_dist=450.0, allow_switch=False, respawn_delay=20, view_model="models/weapons/tokens/v_flagtoken.mdl", world_model="models/weapons/tokens/w_flagtoken.mdl", print_name="#GES_GP_CTK_KEY" ) # Setup the capture areas tokenmgr.SetupCaptureArea( "capture_mi6", model="models/gameplay/capturepoint.mdl", skin=1, limit=1, location=Glb.SPAWN_CAPAREA | Glb.SPAWN_MYTEAM, rqd_token=self.TOKEN_MI6, rqd_team=Glb.TEAM_MI6 ) tokenmgr.SetupCaptureArea( "capture_janus", model="models/gameplay/capturepoint.mdl", skin=2, limit=1, location=Glb.SPAWN_CAPAREA | Glb.SPAWN_MYTEAM, rqd_token=self.TOKEN_JANUS, rqd_team=Glb.TEAM_JANUS ) # Reset variables self.game_inWaitTime = True self.game_inWarmup = True self.game_inOvertime = False self.game_inOvertimeDelay = False self.game_canFinishRound = True # Clear the timer list self.timerTracker.RemoveTimer() # Create the MI6 delay timer self.game_timers[Glb.TEAM_MI6] = self.timerTracker.CreateTimer( "ctk_mi6" ) self.game_timers[Glb.TEAM_MI6].SetAgeRate( 1.2, 1.5 ) self.game_timers[Glb.TEAM_MI6].SetUpdateCallback( self.ctk_OnTokenTimerUpdate, 0.1 ) # Create the Janus delay timer self.game_timers[Glb.TEAM_JANUS] = self.timerTracker.CreateTimer( "ctk_janus" ) self.game_timers[Glb.TEAM_JANUS].SetAgeRate( 1.2, 1.5 ) self.game_timers[Glb.TEAM_JANUS].SetUpdateCallback( self.ctk_OnTokenTimerUpdate, 0.1 ) self.rules_flagmi6 = 1 self.rules_flagjanus = 2 # CVars self.CreateCVar( self.CVAR_CAPOVERRIDE, "0", "Sets the amount of seconds that a player has to stay on a capture point to capture if both tokens are held" ) self.CreateCVar( self.CVAR_CAPPOINTS, "5", "Sets the amount of points a player recieves on token capture" ) self.CreateCVar( self.CVAR_SPEEDMULT, "0.95", "Speed multiplier for a player holding a token [0.5 - 1.5]" ) self.CreateCVar( self.CVAR_WARMUP, "15", "Seconds of warmup time before the match begins (set to 0 to disable)" ) self.CreateCVar( self.CVAR_ALLOWTOSS, "0", "Allow players to toss the flag with !voodoo. WILL ALLOW GRIEFING ON SOME MAPS." ) self.CreateCVar( self.CVAR_FLAGMI6, "1", "Skin of the MI6 flag." ) self.CreateCVar( self.CVAR_FLAGJANUS, "2", "Skin of the Janus flag." ) self.rules_overrideTime = 0 self.rules_playerCapPoints = 5 self.rules_speedMultiplier = 0.95 self.rules_warmupTime = 15 self.rules_allowToss = False # Make sure we don't start out in wait time or have a warmup if we changed gameplay mid-match if GERules.GetNumActivePlayers() >= 2: self.game_inWaitTime = False self.warmupTimer.StartWarmup(0) GERules.GetRadar().SetForceRadar( True ) GERules.SetSpawnInvulnTime( 5, False ) GERules.EnableSuperfluousAreas() GERules.SetAllowTeamSpawns( True ) def OnUnloadGamePlay(self): super( CaptureTheFlag, self ).OnUnloadGamePlay() self.game_timers = None self.warmupTimer = None self.timerTracker = None self.overtime = None def OnCVarChanged( self, name, oldvalue, newvalue ): if name == self.CVAR_CAPOVERRIDE: overridetime = float( newvalue ) self.rules_overrideTime = 0 if overridetime < 0 else overridetime elif name == self.CVAR_CAPPOINTS: points = int( newvalue ) self.rules_playerCapPoints = 0 if points < 0 else points elif name == self.CVAR_SPEEDMULT: self.rules_speedMultiplier = float( newvalue ) elif name == self.CVAR_ALLOWTOSS: self.rules_allowToss = True if int( newvalue ) > 0 else False elif name == self.CVAR_FLAGMI6 and int( newvalue ) != self.rules_flagmi6: self.rules_flagmi6 = int( newvalue ) GEWeapon.ToGEWeapon(self.game_tokens[Glb.TEAM_JANUS].GetTokenEnt()).SetSkin( self.rules_flagmi6 ) elif name == self.CVAR_FLAGJANUS and int( newvalue ) != self.rules_flagjanus: self.rules_flagjanus = int( newvalue ) GEWeapon.ToGEWeapon( self.game_tokens[Glb.TEAM_MI6].GetTokenEnt() ).SetSkin( self.rules_flagjanus ) elif name == self.CVAR_WARMUP: self.rules_warmupTime = int( newvalue ) if self.warmupTimer.IsInWarmup(): self.warmupTimer.StartWarmup( self.rules_warmupTime ) if self.rules_warmupTime <= 0: GERules.EndRound( False ) def OnRoundBegin( self ): GERules.ResetAllPlayersScores() # This makes sure players get a new set of bars on spawn self.game_inOvertime = False self.game_canFinishRound = True if not self.game_inWaitTime and not self.warmupTimer.HadWarmup(): self.warmupTimer.StartWarmup( self.rules_warmupTime ) def CanRoundEnd( self ): if self.warmupTimer.IsInWarmup(): return False # No overtime with only 1 player. if GERules.GetNumActivePlayers() < 2: self.game_canFinishRound = True return True # See if any tokens are picked, if so we postpone the ending # We can only break overtime if a token is dropped or captured if not self.game_inOvertime and not GERules.IsIntermission(): mi6Score = GERules.GetTeam( Glb.TEAM_MI6 ).GetRoundScore() janusScore = GERules.GetTeam( Glb.TEAM_JANUS ).GetRoundScore() # Only go into overtime if our match scores are close and we have a token in hand if abs( mi6Score - janusScore ) <= 0 and ( self.ctk_IsTokenHeld( Glb.TEAM_MI6 ) or self.ctk_IsTokenHeld( Glb.TEAM_JANUS ) ): GEUtil.HudMessage( None, "#GES_GPH_OVERTIME_TITLE", -1, self.MSG_MISC_YPOS, self.COLOR_NEUTRAL, 4.0, self.MSG_MISC_CHANNEL ) GEUtil.PopupMessage( None, "#GES_GPH_OVERTIME_TITLE", "#GES_GPH_OVERTIME" ) GEUtil.EmitGameplayEvent( "ctf_overtime" ) GEUtil.PlaySoundTo( None, "GEGamePlay.Overtime", True ) self.game_inOvertime = True self.game_inOvertimeDelay = False self.game_canFinishRound = False # Ensure overtime never lasts longer than 5 minutes self.overtime.StartOvertime( 300.0 ) elif not self.game_inOvertimeDelay and not GERules.IsIntermission(): # Exit overtime if a team is eliminated, awarding the other team a point if GERules.GetNumInRoundTeamPlayers( Glb.TEAM_MI6 ) == 0: GERules.GetTeam( Glb.TEAM_JANUS ).AddRoundScore( 1 ) GEUtil.HudMessage( None, _( "#GES_GP_CTK_OVERTIME_SCORE", "Janus" ), -1, -1, self.COLOR_NEUTRAL, 5.0 ) self.timerTracker.OneShotTimer( self.OVERTIME_DELAY, EndRoundCallback ) self.game_inOvertimeDelay = True elif GERules.GetNumInRoundTeamPlayers( Glb.TEAM_JANUS ) == 0: GERules.GetTeam( Glb.TEAM_MI6 ).AddRoundScore( 1 ) GEUtil.HudMessage( None, _( "#GES_GP_CTK_OVERTIME_SCORE", "MI6" ), -1, -1, self.COLOR_NEUTRAL, 5.0 ) self.timerTracker.OneShotTimer( self.OVERTIME_DELAY, EndRoundCallback ) self.game_inOvertimeDelay = True elif not self.overtime.CheckOvertime(): # Overtime failsafe tripped, end the round now return True return self.game_canFinishRound def OnPlayerSpawn( self, player ): player.SetSpeedMultiplier( 1.0 ) player.SetScoreBoardColor( Glb.SB_COLOR_NORMAL ) def OnThink( self ): # Enter "wait time" if we only have 1 player if GERules.GetNumActivePlayers() < 2 and self.warmupTimer.HadWarmup(): # Check overtime fail safe if self.game_inOvertime: self.game_canFinishRound = True # Restart the round and count the scores if we were previously not in wait time if not self.game_inWaitTime: GERules.EndRound() self.game_inWaitTime = True return # Restart the round (not counting scores) if we were in wait time if self.game_inWaitTime: GEUtil.HudMessage( None, "#GES_GP_GETREADY", -1, -1, GEUtil.CColor( 255, 255, 255, 255 ), 2.5 ) GERules.EndRound( False ) self.game_inWaitTime = False def OnPlayerKilled( self, victim, killer, weapon ): assert isinstance( victim, GEPlayer.CGEMPPlayer ) assert isinstance( killer, GEPlayer.CGEMPPlayer ) # In warmup? No victim? if self.warmupTimer.IsInWarmup() or not victim: return victimTeam = victim.GetTeamNumber() killerTeam = killer.GetTeamNumber() if killer else -1 # Only need to do this for killer since not having a victim aborts early. # death by world if not killer: victim.AddRoundScore( -1 ) elif victim == killer or victimTeam == killerTeam: # Suicide or team kill killer.AddRoundScore( -1 ) else: # Check to see if this was a kill against a token bearer (defense). We know we have a killer and a victim but there might not be a weapon. if victim == self.game_tokens[victimTeam].GetOwner(): clr_hint = '^i' if killerTeam == Glb.TEAM_MI6 else '^r' GEUtil.EmitGameplayEvent( "ctf_tokendefended", str( killer.GetUserID() ), str( victim.GetUserID() ), str( victimTeam ), weapon.GetClassname().lower() if weapon else "weapon_none", True ) GEUtil.PostDeathMessage( _( "#GES_GP_CTK_DEFENDED", clr_hint, killer.GetCleanPlayerName(), self.ctk_TokenName( victimTeam ) ) ) killer.AddRoundScore( 2 ) else: killer.AddRoundScore( 1 ) def CanPlayerRespawn( self, player ): if self.game_inOvertime: GEUtil.PopupMessage( player, "#GES_GPH_ELIMINATED_TITLE", "#GES_GPH_ELIMINATED" ) player.SetScoreBoardColor( Glb.SB_COLOR_ELIMINATED ) return False return True def OnCaptureAreaSpawned( self, area ): team = area.GetTeamNumber() tknName = self.TOKEN_MI6 if team == Glb.TEAM_MI6 else self.TOKEN_JANUS GERules.GetRadar().AddRadarContact( area, Glb.RADAR_TYPE_OBJECTIVE, True, "sprites/hud/radar/capture_point", self.ctk_GetColor( OppositeTeam(team) ) ) GERules.GetRadar().SetupObjective( area, team, tknName, "#GES_GP_CTK_OBJ_CAPTURE", self.ctk_GetColor( OppositeTeam(team), self.COLOR_OBJ_HOT ), 0, True ) def OnCaptureAreaRemoved( self, area ): GERules.GetRadar().DropRadarContact( area ) def OnCaptureAreaEntered( self, area, player, token ): assert isinstance( area, GEEntity.CBaseEntity ) assert isinstance( player, GEPlayer.CGEMPPlayer ) assert isinstance( token, GEWeapon.CGEWeapon ) if token is None: return # If the other team has our token, we have to wait a set period tokenteam = token.GetTeamNumber() otherteam = OppositeTeam( player.GetTeamNumber() ) if self.ctk_IsTokenHeld( otherteam ) and self.rules_overrideTime >= 0: if self.rules_overrideTime > 0 and not self.game_inOvertime: # Can't capture if other team has your token in overtime. timer = self.game_timers[tokenteam] if timer.state is Timer.STATE_STOP: GEUtil.InitHudProgressBar( player, self.PROBAR_OVERRIDE, "#GES_GP_CTK_CAPTURE_OVR", Glb.HUDPB_SHOWBAR, self.rules_overrideTime, -1, 0.6, 120, 16, GEUtil.CColor( 220, 220, 220, 240 ) ) timer.Start( self.rules_overrideTime ) else: GEUtil.HudMessage( player, "#GES_GP_CTK_CAPTURE_DENY", -1, 0.6, GEUtil.CColor( 220, 220, 220, 240 ), 2.0, self.MSG_MISC_CHANNEL ) else: self.ctk_CaptureToken( token, player ) def OnCaptureAreaExited( self, area, player ): assert isinstance( area, GEEntity.CBaseEntity ) assert isinstance( player, GEPlayer.CGEMPPlayer ) tokenteam = player.GetTeamNumber() self.game_timers[tokenteam].Pause() def OnTokenSpawned( self, token ): tokenTeam = token.GetTeamNumber() GERules.GetRadar().AddRadarContact( token, Glb.RADAR_TYPE_TOKEN, True, "", self.ctk_GetColor( tokenTeam ) ) GERules.GetRadar().SetupObjective( token, Glb.TEAM_NONE, "", self.ctk_TokenName( tokenTeam ), self.ctk_GetColor( tokenTeam, self.COLOR_OBJ_COLD ) ) self.game_tokens[tokenTeam].SetTokenEnt( token ) self.game_tokens[tokenTeam].bothered = False if token.GetTeamNumber() == Glb.TEAM_MI6: token.SetSkin( self.rules_flagjanus ) else: token.SetSkin( self.rules_flagmi6 ) def OnTokenPicked( self, token, player ): tokenTeam = token.GetTeamNumber() otherTeam = OppositeTeam( tokenTeam ) if self.game_tokens[tokenTeam].bothered: self.game_tokens[tokenTeam].next_drop_time = GEUtil.GetTime() + 8.0 else: self.game_tokens[tokenTeam].next_drop_time = GEUtil.GetTime() self.game_tokens[tokenTeam].bothered = True GERules.GetRadar().DropRadarContact( token ) GERules.GetRadar().AddRadarContact( player, Glb.RADAR_TYPE_PLAYER, True, "sprites/hud/radar/run", self.ctk_GetColor( OppositeTeam(player.GetTeamNumber()) ) ) GERules.GetRadar().SetupObjective( player, Glb.TEAM_NONE, "", self.ctk_TokenName( tokenTeam ), self.ctk_GetColor( tokenTeam, self.COLOR_OBJ_HOT ) ) GEUtil.EmitGameplayEvent( "ctf_tokenpicked", str( player.GetUserID() ), str( tokenTeam ) ) # Token bearers move faster player.SetSpeedMultiplier( self.rules_speedMultiplier ) player.SetScoreBoardColor( Glb.SB_COLOR_WHITE ) msgFriend = _( "#GES_GP_CTK_PICKED_FRIEND", player.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) msgEnemy = _( "#GES_GP_CTK_PICKED_FOE", player.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) self.ctk_PostMessage( msgFriend, tokenTeam, otherTeam ) self.ctk_PostMessage( msgEnemy, otherTeam, otherTeam ) GEUtil.PlaySoundTo( tokenTeam, "GEGamePlay.Token_Grab", False ) GEUtil.PlaySoundTo( otherTeam, "GEGamePlay.Token_Grab_Enemy", False ) def OnTokenDropped( self, token, player ): tokenTeam = token.GetTeamNumber() otherTeam = OppositeTeam( tokenTeam ) # Stop the override timer and force remove just in case GEUtil.RemoveHudProgressBar( player, self.PROBAR_OVERRIDE ) self.game_timers[tokenTeam].Stop() # Remove the victim's objective status. GERules.GetRadar().DropRadarContact( player ) GERules.GetRadar().ClearObjective( player ) GEUtil.EmitGameplayEvent( "ctf_tokendropped", str( player.GetUserID() ), str( tokenTeam ) ) GERules.GetRadar().AddRadarContact( token, Glb.RADAR_TYPE_TOKEN, True, "", self.ctk_GetColor( tokenTeam ) ) GERules.GetRadar().SetupObjective( token, Glb.TEAM_NONE, "", self.ctk_TokenName( tokenTeam ), self.ctk_GetColor( tokenTeam, self.COLOR_OBJ_COLD ) ) player.SetSpeedMultiplier( 1.0 ) player.SetScoreBoardColor( Glb.SB_COLOR_NORMAL ) msg = _( "#GES_GP_CTK_DROPPED", player.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) self.ctk_PostMessage( msg, tokenTeam, otherTeam ) self.ctk_PostMessage( msg, otherTeam, otherTeam ) GEUtil.PlaySoundTo( tokenTeam, "GEGamePlay.Token_Drop_Friend", True ) GEUtil.PlaySoundTo( otherTeam, "GEGamePlay.Token_Drop_Enemy", True ) def OnEnemyTokenTouched( self, token, player ): tokenTeam = token.GetTeamNumber() otherTeam = OppositeTeam( tokenTeam ) if self.game_tokens[tokenTeam].bothered: GERules.GetTokenMgr().RemoveTokenEnt( token, False ) player.AddRoundScore( 2 ) msgFriend = _( "#GES_GP_CTK_RETURNED_FRIEND", player.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) msgEnemy = _( "#GES_GP_CTK_RETURNED_FOE", player.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) self.ctk_PostMessage( msgFriend, otherTeam, tokenTeam ) self.ctk_PostMessage( msgEnemy, tokenTeam, tokenTeam ) GEUtil.PlaySoundTo( tokenTeam, "GEGamePlay.Token_Drop_Enemy", False ) GEUtil.PlaySoundTo( otherTeam, "GEGamePlay.Token_Drop_Friend", False ) def OnTokenRemoved( self, token ): tokenTeam = token.GetTeamNumber() GERules.GetRadar().DropRadarContact( token ) self.game_tokens[tokenTeam].SetTokenEnt( None ) def OnPlayerSay( self, player, text ): team = player.GetTeamNumber() # If the player issues !voodoo they will drop their token if text.lower() == Glb.SAY_COMMAND1 and team != Glb.TEAM_SPECTATOR: if self.rules_allowToss: tokendef = self.game_tokens[team] if player == tokendef.GetOwner(): if GEUtil.GetTime() >= tokendef.next_drop_time: GERules.GetTokenMgr().TransferToken( tokendef.GetTokenEnt(), None ) else: timeleft = max( 1, int( tokendef.next_drop_time - GEUtil.GetTime() ) ) GEUtil.HudMessage( player, _( "#GES_GP_CTK_TOKEN_DROP", timeleft ), -1, self.MSG_MISC_YPOS, self.COLOR_NEUTRAL, 2.0, self.MSG_MISC_CHANNEL ) else: GEUtil.HudMessage( player, "#GES_GP_CTK_TOKEN_DROP_NOFLAG", -1, self.MSG_MISC_YPOS, self.COLOR_NEUTRAL, 2.0, self.MSG_MISC_CHANNEL ) else: GEUtil.HudMessage( player, "#GES_GP_CTK_TOKEN_DROP_DISABLED", -1, self.MSG_MISC_YPOS, self.COLOR_NEUTRAL, 2.0, self.MSG_MISC_CHANNEL ) return True return False #-------------------# # Utility Functions # #-------------------# def ctk_GetColor( self, team, color_type=0 ): if team == Glb.TEAM_JANUS: if color_type == CaptureTheFlag.COLOR_RADAR: return self.COLOR_JANUS_RADAR elif color_type == CaptureTheFlag.COLOR_OBJ_COLD: return self.COLOR_JANUS_OBJ_COLD else: return self.COLOR_JANUS_OBJ_HOT elif team == Glb.TEAM_MI6: if color_type == CaptureTheFlag.COLOR_RADAR: return self.COLOR_MI6_RADAR elif color_type == CaptureTheFlag.COLOR_OBJ_COLD: return self.COLOR_MI6_OBJ_COLD else: return self.COLOR_MI6_OBJ_HOT else: return self.COLOR_NEUTRAL def ctk_CaptureToken( self, token, holder ): assert isinstance( token, GEWeapon.CGEWeapon ) assert isinstance( holder, GEPlayer.CGEMPPlayer ) tokenTeam = token.GetTeamNumber() otherTeam = OppositeTeam( tokenTeam ) GERules.GetRadar().DropRadarContact( token ) GERules.GetRadar().DropRadarContact( holder ) holder.SetSpeedMultiplier( 1.0 ) holder.SetScoreBoardColor( Glb.SB_COLOR_NORMAL ) # Check overtime requirements if self.game_inOvertime: if not self.game_inOvertimeDelay: self.game_inOvertimeDelay = True self.timerTracker.OneShotTimer( self.OVERTIME_DELAY, EndRoundCallback ) else: # We already scored in overtime, ignore this return # Capture the token and give the capturing team points GERules.GetTokenMgr().CaptureToken( token ) # Make sure our timer goes away self.game_timers[tokenTeam].Stop() GEUtil.RemoveHudProgressBar( holder, self.PROBAR_OVERRIDE ) # Give points if not in warmup if not self.warmupTimer.IsInWarmup(): GERules.GetTeam( tokenTeam ).AddRoundScore( 1 ) holder.AddRoundScore( self.rules_playerCapPoints ) GEUtil.EmitGameplayEvent( "ctf_tokencapture", str( holder.GetUserID() ), str( tokenTeam ), "", "", True ) GEUtil.PlaySoundTo( tokenTeam, "GEGamePlay.Token_Capture_Friend", True ) GEUtil.PlaySoundTo( otherTeam, "GEGamePlay.Token_Capture_Enemy", True ) msg = _( "#GES_GP_CTK_CAPTURE", holder.GetCleanPlayerName(), self.ctk_TokenName( tokenTeam ) ) self.ctk_PostMessage( msg ) GEUtil.PostDeathMessage( msg ) def ctk_OnTokenTimerUpdate( self, timer, update_type ): assert isinstance( timer, Timer ) tokenTeam = Glb.TEAM_MI6 if ( timer.GetName() == "ctk_mi6" ) else Glb.TEAM_JANUS otherTeam = OppositeTeam( tokenTeam ) time = timer.GetCurrentTime() holder = self.game_tokens[tokenTeam].GetOwner() if holder is not None: if update_type == Timer.UPDATE_FINISH: token = self.game_tokens[tokenTeam].GetTokenEnt() if token is not None: self.ctk_CaptureToken( token, holder ) elif update_type == Timer.UPDATE_STOP: GEUtil.RemoveHudProgressBar( holder, self.PROBAR_OVERRIDE ) elif update_type == Timer.UPDATE_RUN: GEUtil.UpdateHudProgressBar( holder, self.PROBAR_OVERRIDE, time ) # Check to see if the other team dropped their token mid-capture and we are still on the capture point. if not self.ctk_IsTokenHeld( otherTeam ): if timer.state == Timer.STATE_PAUSE: timer.Stop() else: timer.Finish() def ctk_IsTokenHeld( self, team ): return self.game_tokens[team].GetOwner() != None def ctk_PostMessage( self, msg, to_team=Glb.TEAM_NONE, from_team=Glb.TEAM_NONE ): if from_team == Glb.TEAM_MI6: channel = self.MSG_JANUS_CHANNEL ypos = self.MSG_JANUS_YPOS elif from_team == Glb.TEAM_JANUS: channel = self.MSG_MI6_CHANNEL ypos = self.MSG_MI6_YPOS else: channel = self.MSG_MISC_CHANNEL ypos = self.MSG_MISC_YPOS if to_team == Glb.TEAM_NONE: GEUtil.HudMessage( None, msg, -1, ypos, self.ctk_GetColor( from_team ), 5.0, channel ) else: GEUtil.HudMessage( to_team, msg, -1, ypos, self.ctk_GetColor( from_team ), 5.0, channel ) def ctk_TokenName( self, team ): return "#GES_GP_CTK_OBJ_JANUS" if team == Glb.TEAM_MI6 else "#GES_GP_CTK_OBJ_MI6"
gpl-3.0
-6,110,344,920,448,858,000
44.886581
203
0.621793
false
3.313531
false
false
false
funkring/fdoo
addons-funkring/at_sale/report/sale_order_report.py
1
4550
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.addons.at_base import extreport class Parser(extreport.basic_parser): def __init__(self, cr, uid, name, context=None): super(Parser, self).__init__(cr, uid, name, context=context) self.localcontext.update({ "time": time, "sale_order_lines": self.sale_order_lines, "currency" : self.currency, "payment_note" : self.payment_note, "payment_term" : self.payment_term, "taxes" : self.taxes }) def payment_note(self, sale_order): return sale_order.payment_term and sale_order.payment_term.note or "" def currency(self, sale_order): return (sale_order.pricelist_id and sale_order.pricelist_id.currency_id and sale_order.pricelist_id.currency_id.symbol) or '' def payment_term(self,sale_order): return self.payment_note(sale_order) def taxes(self,sale_order): t_res = {} t_tax_obj = self.pool.get("account.tax") t_sale_obj = self.pool.get("sale.order") for t_tax_id, t_tax_amount in t_sale_obj._tax_amount(self.cr,self.uid,sale_order.id,self.localcontext).items(): t_tax = t_tax_obj.browse(self.cr,self.uid,t_tax_id,self.localcontext) t_amount = t_res.get(t_tax.name,0.0) t_amount += t_tax_amount t_res[t_tax.name] = t_amount return t_res def prepare(self, sale_order): prepared_lines = [] order_lines = [] obj_order_line = self.pool.get('sale.order.line') ids = obj_order_line.search(self.cr, self.uid, [('order_id', '=', sale_order.id)]) for sid in range(0, len(ids)): order = obj_order_line.browse(self.cr, self.uid, ids[sid], self.localcontext) order_lines.append(order) pos = 1 for line in order_lines: # line name notes = [] lines = line.name.split("\n") line_name = lines and lines[0] or "" if len(lines) > 1: notes.extend(lines[1:]) # line notes #if line.note: # notes.extend(line.note.split("\n")) line_note = "\n".join(notes) res = {} res['tax_id'] = ', '.join(map(lambda x: x.name, line.tax_id)) or '' res['name'] = line_name res["note"] = line_note res['product_uom_qty'] = line.product_uos and line.product_uos_qty or line.product_uom_qty or 0.00 res['product_uom'] = line.product_uos and line.product_uos.name or line.product_uom.name res['price_unit'] = line.price_unit_untaxed or 0.00 res['discount'] = line.discount or 0.00 res['price_subtotal'] = line.price_subtotal or 0.00 res['price_subtotal_taxed'] = line.price_subtotal_taxed or 0.00 res['currency'] = sale_order.pricelist_id.currency_id.symbol res['pos'] = str(pos) res['id'] = line.id res['price_subtotal_nodisc'] = line.price_subtotal_nodisc or 0.00 pos += 1 prepared_lines.append(res) res = [{ "lines" : prepared_lines, }] return res def sale_order_lines(self, sale_order): return self.prepare(sale_order)[0]["lines"] # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
-7,761,827,827,090,585,000
39.990991
133
0.547692
false
3.807531
false
false
false
SuperDARNCanada/placeholderOS
brian/brian.py
2
11673
#!/usr/bin/python # Copyright 2017 SuperDARN Canada # # brian.py # 2018-01-30 # Communicate with all processes to administrate the borealis software import sys import os import time from datetime import datetime import threading import argparse import zmq sys.path.append(os.environ["BOREALISPATH"]) if __debug__: sys.path.append(os.environ["BOREALISPATH"] + '/build/debug/utils/protobuf') # TODO need to get this from scons environment, 'release' may be 'debug' else: sys.path.append(os.environ["BOREALISPATH"] + '/build/release/utils/protobuf') import driverpacket_pb2 import sigprocpacket_pb2 import rxsamplesmetadata_pb2 import processeddata_pb2 sys.path.append(os.environ["BOREALISPATH"] + '/utils/experiment_options') import experimentoptions as options sys.path.append(os.environ["BOREALISPATH"] + '/utils/zmq_borealis_helpers') import socket_operations as so TIME_PROFILE = True def router(opts): """The router is responsible for moving traffic between modules by routing traffic using named sockets. Args: opts (ExperimentOptions): Options parsed from config. """ context = zmq.Context().instance() router = context.socket(zmq.ROUTER) router.setsockopt(zmq.ROUTER_MANDATORY, 1) router.bind(opts.router_address) sys.stdout.write("Starting router!\n") frames_to_send = [] while True: events = router.poll(timeout=1) if events: dd = router.recv_multipart() sender, receiver, empty, data = dd if __debug__: output = "Router input/// Sender -> {}: Receiver -> {}\n" output = output.format(sender, receiver) sys.stdout.write(output) frames_received = [receiver,sender,empty,data] frames_to_send.append(frames_received) if __debug__: output = "Router output/// Receiver -> {}: Sender -> {}\n" output = output.format(receiver, sender) sys.stdout.write(output) non_sent = [] for frames in frames_to_send: try: router.send_multipart(frames) except zmq.ZMQError as e: if __debug__: output = "Unable to send frame Receiver -> {}: Sender -> {}\n" output = output.format(frames[0], frames[1]) sys.stdout.write(output) non_sent.append(frames) frames_to_send = non_sent def sequence_timing(opts): """Thread function for sequence timing This function simulates the flow of data between brian's sequence timing and other parts of the radar system. This function serves to check whether the sequence timing is working as expected and to rate control the system to make sure the processing can handle data rates. :param context: zmq context, if None, then this method will get one :type context: zmq context, optional """ ids = [opts.brian_to_radctrl_identity, opts.brian_to_driver_identity, opts.brian_to_dspbegin_identity, opts.brian_to_dspend_identity] sockets_list = so.create_sockets(ids, opts.router_address) brian_to_radar_control = sockets_list[0] brian_to_driver = sockets_list[1] brian_to_dsp_begin = sockets_list[2] brian_to_dsp_end = sockets_list[3] sequence_poller = zmq.Poller() sequence_poller.register(brian_to_radar_control, zmq.POLLIN) sequence_poller.register(brian_to_dsp_begin, zmq.POLLIN) sequence_poller.register(brian_to_dsp_end, zmq.POLLIN) sequence_poller.register(brian_to_driver, zmq.POLLIN) def printing(msg): SEQUENCE_TIMING = "\033[31m" + "SEQUENCE TIMING: " + "\033[0m" sys.stdout.write(SEQUENCE_TIMING + msg + "\n") context = zmq.Context().instance() start_new_sock = context.socket(zmq.PAIR) start_new_sock.bind("inproc://start_new") def start_new(): """ This function serves to rate control the system. If processing is faster than the sequence time than the speed of the driver is the limiting factor. If processing takes longer than sequence time, than the dsp unit limits the speed of the system. """ start_new = context.socket(zmq.PAIR) start_new.connect("inproc://start_new") want_to_start = False good_to_start = True dsp_finish_counter = 2 # starting a new sequence and keeping the system correctly pipelined is dependent on 3 # conditions. We trigger a 'want_to_start' when the samples have been collected from the # driver and dsp is ready to do its job. This signals that the driver is capable of # collecting new data. 'good_to_start' is triggered once the samples have been copied to # the GPU and the filtering begins. 'extra_good_to_start' is needed to make sure the # system can keep up with the demand if the gpu is working hard. Without this flag its # possible to overload the gpu and crash the system with overallocation of memory. This # is set once the filtering is complete. # # The last flag is actually a counter because on the first run it is 2 sequences # behind the current sequence and then after that its only 1 sequence behind. The dsp # is always processing the work while a new sequence is being collected. if TIME_PROFILE: time_now = datetime.utcnow() while True: if want_to_start and good_to_start and dsp_finish_counter: #Acknowledge new sequence can begin to Radar Control by requesting new sequence #metadata if __debug__: printing("Requesting metadata from Radar control") so.send_request(brian_to_radar_control, opts.radctrl_to_brian_identity, "Requesting metadata") want_to_start = good_to_start = False dsp_finish_counter -= 1 message = start_new.recv_string() if message == "want_to_start": if TIME_PROFILE: print('Driver ready: {}'.format(datetime.utcnow() - time_now)) time_now = datetime.utcnow() want_to_start = True if message == "good_to_start": if TIME_PROFILE: print('Copied to GPU: {}'.format(datetime.utcnow() - time_now)) time_now = datetime.utcnow() good_to_start = True if message == "extra_good_to_start": if TIME_PROFILE: print('DSP finished w/ data: {}'.format(datetime.utcnow() - time_now)) time_now = datetime.utcnow() dsp_finish_counter = 1; thread = threading.Thread(target=start_new) thread.daemon = True thread.start() time.sleep(1) last_processing_time = 0 first_time = True late_counter = 0 while True: if first_time: #Request new sequence metadata if __debug__: printing("Requesting metadata from Radar control") so.send_request(brian_to_radar_control, opts.radctrl_to_brian_identity, "Requesting metadata") first_time = False socks = dict(sequence_poller.poll()) if brian_to_driver in socks and socks[brian_to_driver] == zmq.POLLIN: #Receive metadata of completed sequence from driver such as timing reply = so.recv_obj(brian_to_driver, opts.driver_to_brian_identity, printing) meta = rxsamplesmetadata_pb2.RxSamplesMetadata() meta.ParseFromString(reply) if __debug__: reply_output = "Driver sent -> time {} ms, sqnum {}" reply_output = reply_output.format(meta.sequence_time*1e3, meta.sequence_num) printing(reply_output) #Requesting acknowledgement of work begins from DSP if __debug__: printing("Requesting work begins from DSP") iden = opts.dspbegin_to_brian_identity + str(meta.sequence_num) so.send_request(brian_to_dsp_begin, iden, "Requesting work begins") start_new_sock.send_string("want_to_start") if brian_to_radar_control in socks and socks[brian_to_radar_control] == zmq.POLLIN: #Get new sequence metadata from radar control reply = so.recv_obj(brian_to_radar_control, opts.radctrl_to_brian_identity, printing) sigp = sigprocpacket_pb2.SigProcPacket() sigp.ParseFromString(reply) if __debug__: reply_output = "Radar control sent -> sequence {} time {} ms" reply_output = reply_output.format(sigp.sequence_num, sigp.sequence_time) printing(reply_output) #Request acknowledgement of sequence from driver if __debug__: printing("Requesting ack from driver") so.send_request(brian_to_driver, opts.driver_to_brian_identity, "Requesting ack") if brian_to_dsp_begin in socks and socks[brian_to_dsp_begin] == zmq.POLLIN: #Get acknowledgement that work began in processing. reply = so.recv_bytes_from_any_iden(brian_to_dsp_begin) sig_p = sigprocpacket_pb2.SigProcPacket() sig_p.ParseFromString(reply) if __debug__: reply_output = "Dsp began -> sqnum {}".format(sig_p.sequence_num) printing(reply_output) #Requesting acknowledgement of work ends from DSP if __debug__: printing("Requesting work end from DSP") iden = opts.dspend_to_brian_identity + str(sig_p.sequence_num) so.send_request(brian_to_dsp_end, iden, "Requesting work ends") #acknowledge we want to start something new. start_new_sock.send_string("good_to_start") if brian_to_dsp_end in socks and socks[brian_to_dsp_end] == zmq.POLLIN: #Receive ack that work finished on previous sequence. reply = so.recv_bytes_from_any_iden(brian_to_dsp_end) sig_p = sigprocpacket_pb2.SigProcPacket() sig_p.ParseFromString(reply) if __debug__: reply_output = "Dsp sent -> time {}, sqnum {}" reply_output = reply_output.format(sig_p.kerneltime, sig_p.sequence_num) printing(reply_output) if sig_p.sequence_num != 0: if sig_p.kerneltime > last_processing_time: late_counter += 1 else: late_counter = 0 last_processing_time = sig_p.kerneltime if __debug__: printing("Late counter {}".format(late_counter)) #acknowledge that we are good and able to start something new. start_new_sock.send_string("extra_good_to_start") def main(): parser = argparse.ArgumentParser() help_msg = 'Run only the router. Do not run any of the other threads or functions.' parser.add_argument('--router-only', action='store_true', help=help_msg) args = parser.parse_args() opts = options.ExperimentOptions() threads = [] threads.append(threading.Thread(target=router, args=(opts,))) if not args.router_only: threads.append(threading.Thread(target=sequence_timing, args=(opts,))) for thread in threads: thread.daemon = True thread.start() while True: time.sleep(1) if __name__ == "__main__": main()
gpl-3.0
2,562,039,298,006,661,600
37.147059
153
0.610469
false
3.963667
false
false
false
syjeon/new_edx
lms/djangoapps/verify_student/views.py
2
12481
""" """ import json import logging import decimal from mitxmako.shortcuts import render_to_response from django.conf import settings from django.core.urlresolvers import reverse from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseRedirect from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from django.views.generic.base import View from django.utils.decorators import method_decorator from django.utils.translation import ugettext as _ from django.utils.http import urlencode from django.contrib.auth.decorators import login_required from course_modes.models import CourseMode from student.models import CourseEnrollment from student.views import course_from_id from shoppingcart.models import Order, CertificateItem from shoppingcart.processors.CyberSource import ( get_signed_purchase_params, get_purchase_endpoint ) from verify_student.models import SoftwareSecurePhotoVerification import ssencrypt log = logging.getLogger(__name__) class VerifyView(View): @method_decorator(login_required) def get(self, request, course_id): """ """ # If the user has already been verified within the given time period, # redirect straight to the payment -- no need to verify again. if SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user): return redirect( reverse('verify_student_verified', kwargs={'course_id': course_id})) elif CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified': return redirect(reverse('dashboard')) else: # If they haven't completed a verification attempt, we have to # restart with a new one. We can't reuse an older one because we # won't be able to show them their encrypted photo_id -- it's easier # bookkeeping-wise just to start over. progress_state = "start" verify_mode = CourseMode.mode_for_course(course_id, "verified") # if the course doesn't have a verified mode, we want to kick them # from the flow if not verify_mode: return redirect(reverse('dashboard')) if course_id in request.session.get("donation_for_course", {}): chosen_price = request.session["donation_for_course"][course_id] else: chosen_price = verify_mode.min_price course = course_from_id(course_id) context = { "progress_state": progress_state, "user_full_name": request.user.profile.name, "course_id": course_id, "course_name": course.display_name_with_default, "course_org" : course.display_org_with_default, "course_num" : course.display_number_with_default, "purchase_endpoint": get_purchase_endpoint(), "suggested_prices": [ decimal.Decimal(price) for price in verify_mode.suggested_prices.split(",") ], "currency": verify_mode.currency.upper(), "chosen_price": chosen_price, "min_price": verify_mode.min_price, } return render_to_response('verify_student/photo_verification.html', context) class VerifiedView(View): """ View that gets shown once the user has already gone through the verification flow """ @method_decorator(login_required) def get(self, request, course_id): """ Handle the case where we have a get request """ if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified': return redirect(reverse('dashboard')) verify_mode = CourseMode.mode_for_course(course_id, "verified") if course_id in request.session.get("donation_for_course", {}): chosen_price = request.session["donation_for_course"][course_id] else: chosen_price = verify_mode.min_price.format("{:g}") course = course_from_id(course_id) context = { "course_id": course_id, "course_name": course.display_name_with_default, "course_org" : course.display_org_with_default, "course_num" : course.display_number_with_default, "purchase_endpoint": get_purchase_endpoint(), "currency": verify_mode.currency.upper(), "chosen_price": chosen_price, } return render_to_response('verify_student/verified.html', context) @login_required def create_order(request): """ Submit PhotoVerification and create a new Order for this verified cert """ if not SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user): attempt = SoftwareSecurePhotoVerification(user=request.user) b64_face_image = request.POST['face_image'].split(",")[1] b64_photo_id_image = request.POST['photo_id_image'].split(",")[1] attempt.upload_face_image(b64_face_image.decode('base64')) attempt.upload_photo_id_image(b64_photo_id_image.decode('base64')) attempt.mark_ready() attempt.save() course_id = request.POST['course_id'] donation_for_course = request.session.get('donation_for_course', {}) current_donation = donation_for_course.get(course_id, decimal.Decimal(0)) contribution = request.POST.get("contribution", donation_for_course.get(course_id, 0)) try: amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN) except decimal.InvalidOperation: return HttpResponseBadRequest(_("Selected price is not valid number.")) if amount != current_donation: donation_for_course[course_id] = amount request.session['donation_for_course'] = donation_for_course verified_mode = CourseMode.modes_for_course_dict(course_id).get('verified', None) # make sure this course has a verified mode if not verified_mode: return HttpResponseBadRequest(_("This course doesn't support verified certificates")) if amount < verified_mode.min_price: return HttpResponseBadRequest(_("No selected price or selected price is below minimum.")) # I know, we should check this is valid. All kinds of stuff missing here cart = Order.get_cart_for_user(request.user) cart.clear() CertificateItem.add_to_order(cart, course_id, amount, 'verified') params = get_signed_purchase_params(cart) return HttpResponse(json.dumps(params), content_type="text/json") @require_POST @csrf_exempt # SS does its own message signing, and their API won't have a cookie value def results_callback(request): """ Software Secure will call this callback to tell us whether a user is verified to be who they said they are. """ body = request.body try: body_dict = json.loads(body) except ValueError: log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body)) return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body)) if not isinstance(body_dict, dict): log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body)) return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body)) headers = { "Authorization": request.META.get("HTTP_AUTHORIZATION", ""), "Date": request.META.get("HTTP_DATE", "") } sig_valid = ssencrypt.has_valid_signature( "POST", headers, body_dict, settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"], settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"] ) _, access_key_and_sig = headers["Authorization"].split(" ") access_key = access_key_and_sig.split(":")[0] # This is what we should be doing... #if not sig_valid: # return HttpResponseBadRequest("Signature is invalid") # This is what we're doing until we can figure out why we disagree on sigs if access_key != settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]: return HttpResponseBadRequest("Access key invalid") receipt_id = body_dict.get("EdX-ID") result = body_dict.get("Result") reason = body_dict.get("Reason", "") error_code = body_dict.get("MessageType", "") try: attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id) except SoftwareSecurePhotoVerification.DoesNotExist: log.error("Software Secure posted back for receipt_id {}, but not found".format(receipt_id)) return HttpResponseBadRequest("edX ID {} not found".format(receipt_id)) if result == "PASS": log.debug("Approving verification for {}".format(receipt_id)) attempt.approve() elif result == "FAIL": log.debug("Denying verification for {}".format(receipt_id)) attempt.deny(json.dumps(reason), error_code=error_code) elif result == "SYSTEM FAIL": log.debug("System failure for {} -- resetting to must_retry".format(receipt_id)) attempt.system_error(json.dumps(reason), error_code=error_code) log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason) else: log.error("Software Secure returned unknown result {}".format(result)) return HttpResponseBadRequest( "Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result) ) return HttpResponse("OK!") @login_required def show_requirements(request, course_id): """ Show the requirements necessary for """ if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified': return redirect(reverse('dashboard')) course = course_from_id(course_id) context = { "course_id": course_id, "course_name": course.display_name_with_default, "course_org" : course.display_org_with_default, "course_num" : course.display_number_with_default, "is_not_active": not request.user.is_active, } return render_to_response("verify_student/show_requirements.html", context) def show_verification_page(request): pass def enroll(user, course_id, mode_slug): """ Enroll the user in a course for a certain mode. This is the view you send folks to when they click on the enroll button. This does NOT cover changing enrollment modes -- it's intended for new enrollments only, and will just redirect to the dashboard if it detects that an enrollment already exists. """ # If the user is already enrolled, jump to the dashboard. Yeah, we could # do upgrades here, but this method is complicated enough. if CourseEnrollment.is_enrolled(user, course_id): return HttpResponseRedirect(reverse('dashboard')) available_modes = CourseModes.modes_for_course(course_id) # If they haven't chosen a mode... if not mode_slug: # Does this course support multiple modes of Enrollment? If so, redirect # to a page that lets them choose which mode they want. if len(available_modes) > 1: return HttpResponseRedirect( reverse('choose_enroll_mode', kwargs={'course_id': course_id}) ) # Otherwise, we use the only mode that's supported... else: mode_slug = available_modes[0].slug # If the mode is one of the simple, non-payment ones, do the enrollment and # send them to their dashboard. if mode_slug in ("honor", "audit"): CourseEnrollment.enroll(user, course_id, mode=mode_slug) return HttpResponseRedirect(reverse('dashboard')) if mode_slug == "verify": if SoftwareSecurePhotoVerification.has_submitted_recent_request(user): # Capture payment info # Create an order # Create a VerifiedCertificate order item return HttpResponse.Redirect(reverse('verified')) # There's always at least one mode available (default is "honor"). If they # haven't specified a mode, we just assume it's if not mode: mode = available_modes[0] elif len(available_modes) == 1: if mode != available_modes[0]: raise Exception() mode = available_modes[0] if mode == "honor": CourseEnrollment.enroll(user, course_id) return HttpResponseRedirect(reverse('dashboard'))
agpl-3.0
2,988,267,064,242,832,000
38.748408
108
0.662848
false
4.141009
false
false
false
rajalokan/nova
nova/tests/unit/compute/test_compute_utils.py
1
45342
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests For miscellaneous util methods used with compute.""" import copy import string import uuid import mock from oslo_serialization import jsonutils import six from nova.compute import flavors from nova.compute import manager from nova.compute import power_state from nova.compute import task_states from nova.compute import utils as compute_utils import nova.conf from nova import context from nova import exception from nova.image import glance from nova.network import model from nova import objects from nova.objects import base from nova.objects import block_device as block_device_obj from nova.objects import fields from nova import rpc from nova import test from nova.tests.unit import fake_block_device from nova.tests.unit import fake_instance from nova.tests.unit import fake_network from nova.tests.unit import fake_notifier from nova.tests.unit import fake_server_actions import nova.tests.unit.image.fake from nova.tests.unit.objects import test_flavor from nova.tests import uuidsentinel as uuids CONF = nova.conf.CONF FAKE_IMAGE_REF = uuids.image_ref def create_instance(context, user_id='fake', project_id='fake', params=None): """Create a test instance.""" flavor = flavors.get_flavor_by_name('m1.tiny') net_info = model.NetworkInfo([]) info_cache = objects.InstanceInfoCache(network_info=net_info) inst = objects.Instance(context=context, image_ref=uuids.fake_image_ref, reservation_id='r-fakeres', user_id=user_id, project_id=project_id, instance_type_id=flavor.id, flavor=flavor, old_flavor=None, new_flavor=None, system_metadata={}, ami_launch_index=0, root_gb=0, ephemeral_gb=0, info_cache=info_cache) if params: inst.update(params) inst.create() return inst class ComputeValidateDeviceTestCase(test.NoDBTestCase): def setUp(self): super(ComputeValidateDeviceTestCase, self).setUp() self.context = context.RequestContext('fake', 'fake') # check if test name includes "xen" if 'xen' in self.id(): self.flags(compute_driver='xenapi.XenAPIDriver') self.instance = objects.Instance(uuid=uuid.uuid4().hex, root_device_name=None, default_ephemeral_device=None) else: self.instance = objects.Instance(uuid=uuid.uuid4().hex, root_device_name='/dev/vda', default_ephemeral_device='/dev/vdb') flavor = objects.Flavor(**test_flavor.fake_flavor) self.instance.system_metadata = {} self.instance.flavor = flavor self.instance.default_swap_device = None self.data = [] def _validate_device(self, device=None): bdms = base.obj_make_list(self.context, objects.BlockDeviceMappingList(), objects.BlockDeviceMapping, self.data) return compute_utils.get_device_name_for_instance( self.instance, bdms, device) @staticmethod def _fake_bdm(device): return fake_block_device.FakeDbBlockDeviceDict({ 'source_type': 'volume', 'destination_type': 'volume', 'device_name': device, 'no_device': None, 'volume_id': 'fake', 'snapshot_id': None, 'guest_format': None }) def test_wrap(self): self.data = [] for letter in string.ascii_lowercase[2:]: self.data.append(self._fake_bdm('/dev/vd' + letter)) device = self._validate_device() self.assertEqual(device, '/dev/vdaa') def test_wrap_plus_one(self): self.data = [] for letter in string.ascii_lowercase[2:]: self.data.append(self._fake_bdm('/dev/vd' + letter)) self.data.append(self._fake_bdm('/dev/vdaa')) device = self._validate_device() self.assertEqual(device, '/dev/vdab') def test_later(self): self.data = [ self._fake_bdm('/dev/vdc'), self._fake_bdm('/dev/vdd'), self._fake_bdm('/dev/vde'), ] device = self._validate_device() self.assertEqual(device, '/dev/vdf') def test_gap(self): self.data = [ self._fake_bdm('/dev/vdc'), self._fake_bdm('/dev/vde'), ] device = self._validate_device() self.assertEqual(device, '/dev/vdd') def test_no_bdms(self): self.data = [] device = self._validate_device() self.assertEqual(device, '/dev/vdc') def test_lxc_names_work(self): self.instance['root_device_name'] = '/dev/a' self.instance['ephemeral_device_name'] = '/dev/b' self.data = [] device = self._validate_device() self.assertEqual(device, '/dev/c') def test_name_conversion(self): self.data = [] device = self._validate_device('/dev/c') self.assertEqual(device, '/dev/vdc') device = self._validate_device('/dev/sdc') self.assertEqual(device, '/dev/vdc') device = self._validate_device('/dev/xvdc') self.assertEqual(device, '/dev/vdc') def test_invalid_device_prefix(self): self.assertRaises(exception.InvalidDevicePath, self._validate_device, '/baddata/vdc') def test_device_in_use(self): exc = self.assertRaises(exception.DevicePathInUse, self._validate_device, '/dev/vda') self.assertIn('/dev/vda', six.text_type(exc)) def test_swap(self): self.instance['default_swap_device'] = "/dev/vdc" device = self._validate_device() self.assertEqual(device, '/dev/vdd') def test_swap_no_ephemeral(self): self.instance.default_ephemeral_device = None self.instance.default_swap_device = "/dev/vdb" device = self._validate_device() self.assertEqual(device, '/dev/vdc') def test_ephemeral_xenapi(self): self.instance.flavor.ephemeral_gb = 10 self.instance.flavor.swap = 0 device = self._validate_device() self.assertEqual(device, '/dev/xvdc') def test_swap_xenapi(self): self.instance.flavor.ephemeral_gb = 0 self.instance.flavor.swap = 10 device = self._validate_device() self.assertEqual(device, '/dev/xvdb') def test_swap_and_ephemeral_xenapi(self): self.instance.flavor.ephemeral_gb = 10 self.instance.flavor.swap = 10 device = self._validate_device() self.assertEqual(device, '/dev/xvdd') def test_swap_and_one_attachment_xenapi(self): self.instance.flavor.ephemeral_gb = 0 self.instance.flavor.swap = 10 device = self._validate_device() self.assertEqual(device, '/dev/xvdb') self.data.append(self._fake_bdm(device)) device = self._validate_device() self.assertEqual(device, '/dev/xvdd') def test_no_dev_root_device_name_get_next_name(self): self.instance['root_device_name'] = 'vda' device = self._validate_device() self.assertEqual('/dev/vdc', device) class DefaultDeviceNamesForInstanceTestCase(test.NoDBTestCase): def setUp(self): super(DefaultDeviceNamesForInstanceTestCase, self).setUp() self.context = context.RequestContext('fake', 'fake') self.ephemerals = block_device_obj.block_device_make_list( self.context, [fake_block_device.FakeDbBlockDeviceDict( {'id': 1, 'instance_uuid': uuids.block_device_instance, 'device_name': '/dev/vdb', 'source_type': 'blank', 'destination_type': 'local', 'delete_on_termination': True, 'guest_format': None, 'boot_index': -1})]) self.swap = block_device_obj.block_device_make_list( self.context, [fake_block_device.FakeDbBlockDeviceDict( {'id': 2, 'instance_uuid': uuids.block_device_instance, 'device_name': '/dev/vdc', 'source_type': 'blank', 'destination_type': 'local', 'delete_on_termination': True, 'guest_format': 'swap', 'boot_index': -1})]) self.block_device_mapping = block_device_obj.block_device_make_list( self.context, [fake_block_device.FakeDbBlockDeviceDict( {'id': 3, 'instance_uuid': uuids.block_device_instance, 'device_name': '/dev/vda', 'source_type': 'volume', 'destination_type': 'volume', 'volume_id': 'fake-volume-id-1', 'boot_index': 0}), fake_block_device.FakeDbBlockDeviceDict( {'id': 4, 'instance_uuid': uuids.block_device_instance, 'device_name': '/dev/vdd', 'source_type': 'snapshot', 'destination_type': 'volume', 'snapshot_id': 'fake-snapshot-id-1', 'boot_index': -1}), fake_block_device.FakeDbBlockDeviceDict( {'id': 5, 'instance_uuid': uuids.block_device_instance, 'device_name': '/dev/vde', 'source_type': 'blank', 'destination_type': 'volume', 'boot_index': -1})]) self.instance = {'uuid': uuids.instance, 'ephemeral_gb': 2} self.is_libvirt = False self.root_device_name = '/dev/vda' self.update_called = False self.patchers = [] self.patchers.append( mock.patch.object(objects.BlockDeviceMapping, 'save')) for patcher in self.patchers: patcher.start() def tearDown(self): super(DefaultDeviceNamesForInstanceTestCase, self).tearDown() for patcher in self.patchers: patcher.stop() def _test_default_device_names(self, *block_device_lists): compute_utils.default_device_names_for_instance(self.instance, self.root_device_name, *block_device_lists) def test_only_block_device_mapping(self): # Test no-op original_bdm = copy.deepcopy(self.block_device_mapping) self._test_default_device_names([], [], self.block_device_mapping) for original, new in zip(original_bdm, self.block_device_mapping): self.assertEqual(original.device_name, new.device_name) # Assert it defaults the missing one as expected self.block_device_mapping[1]['device_name'] = None self.block_device_mapping[2]['device_name'] = None self._test_default_device_names([], [], self.block_device_mapping) self.assertEqual('/dev/vdb', self.block_device_mapping[1]['device_name']) self.assertEqual('/dev/vdc', self.block_device_mapping[2]['device_name']) def test_with_ephemerals(self): # Test ephemeral gets assigned self.ephemerals[0]['device_name'] = None self._test_default_device_names(self.ephemerals, [], self.block_device_mapping) self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb') self.block_device_mapping[1]['device_name'] = None self.block_device_mapping[2]['device_name'] = None self._test_default_device_names(self.ephemerals, [], self.block_device_mapping) self.assertEqual('/dev/vdc', self.block_device_mapping[1]['device_name']) self.assertEqual('/dev/vdd', self.block_device_mapping[2]['device_name']) def test_with_swap(self): # Test swap only self.swap[0]['device_name'] = None self._test_default_device_names([], self.swap, []) self.assertEqual(self.swap[0]['device_name'], '/dev/vdb') # Test swap and block_device_mapping self.swap[0]['device_name'] = None self.block_device_mapping[1]['device_name'] = None self.block_device_mapping[2]['device_name'] = None self._test_default_device_names([], self.swap, self.block_device_mapping) self.assertEqual(self.swap[0]['device_name'], '/dev/vdb') self.assertEqual('/dev/vdc', self.block_device_mapping[1]['device_name']) self.assertEqual('/dev/vdd', self.block_device_mapping[2]['device_name']) def test_all_together(self): # Test swap missing self.swap[0]['device_name'] = None self._test_default_device_names(self.ephemerals, self.swap, self.block_device_mapping) self.assertEqual(self.swap[0]['device_name'], '/dev/vdc') # Test swap and eph missing self.swap[0]['device_name'] = None self.ephemerals[0]['device_name'] = None self._test_default_device_names(self.ephemerals, self.swap, self.block_device_mapping) self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb') self.assertEqual(self.swap[0]['device_name'], '/dev/vdc') # Test all missing self.swap[0]['device_name'] = None self.ephemerals[0]['device_name'] = None self.block_device_mapping[1]['device_name'] = None self.block_device_mapping[2]['device_name'] = None self._test_default_device_names(self.ephemerals, self.swap, self.block_device_mapping) self.assertEqual(self.ephemerals[0]['device_name'], '/dev/vdb') self.assertEqual(self.swap[0]['device_name'], '/dev/vdc') self.assertEqual('/dev/vdd', self.block_device_mapping[1]['device_name']) self.assertEqual('/dev/vde', self.block_device_mapping[2]['device_name']) class UsageInfoTestCase(test.TestCase): def setUp(self): def fake_get_nw_info(cls, ctxt, instance): self.assertTrue(ctxt.is_admin) return fake_network.fake_get_instance_nw_info(self, 1, 1) super(UsageInfoTestCase, self).setUp() self.stub_out('nova.network.api.get_instance_nw_info', fake_get_nw_info) fake_notifier.stub_notifier(self) self.addCleanup(fake_notifier.reset) self.flags(compute_driver='fake.FakeDriver', network_manager='nova.network.manager.FlatManager') self.compute = manager.ComputeManager() self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) def fake_show(meh, context, id, **kwargs): return {'id': 1, 'properties': {'kernel_id': 1, 'ramdisk_id': 1}} self.flags(group='glance', api_servers=['http://localhost:9292']) self.stub_out('nova.tests.unit.image.fake._FakeImageService.show', fake_show) fake_network.set_stub_network_methods(self) fake_server_actions.stub_out_action_events(self) def test_notify_usage_exists(self): # Ensure 'exists' notification generates appropriate usage data. instance = create_instance(self.context) # Set some system metadata sys_metadata = {'image_md_key1': 'val1', 'image_md_key2': 'val2', 'other_data': 'meow'} instance.system_metadata.update(sys_metadata) instance.save() compute_utils.notify_usage_exists( rpc.get_notifier('compute'), self.context, instance) self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1) msg = fake_notifier.NOTIFICATIONS[0] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.exists') payload = msg.payload self.assertEqual(payload['tenant_id'], self.project_id) self.assertEqual(payload['user_id'], self.user_id) self.assertEqual(payload['instance_id'], instance['uuid']) self.assertEqual(payload['instance_type'], 'm1.tiny') type_id = flavors.get_flavor_by_name('m1.tiny')['id'] self.assertEqual(str(payload['instance_type_id']), str(type_id)) flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid'] self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id)) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'state_description', 'bandwidth', 'audit_period_beginning', 'audit_period_ending', 'image_meta'): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_meta'], {'md_key1': 'val1', 'md_key2': 'val2'}) image_ref_url = "%s/images/%s" % (glance.generate_glance_url(), uuids.fake_image_ref) self.assertEqual(payload['image_ref_url'], image_ref_url) self.compute.terminate_instance(self.context, instance, [], []) def test_notify_usage_exists_deleted_instance(self): # Ensure 'exists' notification generates appropriate usage data. instance = create_instance(self.context) # Set some system metadata sys_metadata = {'image_md_key1': 'val1', 'image_md_key2': 'val2', 'other_data': 'meow'} instance.system_metadata.update(sys_metadata) instance.save() self.compute.terminate_instance(self.context, instance, [], []) compute_utils.notify_usage_exists( rpc.get_notifier('compute'), self.context, instance) msg = fake_notifier.NOTIFICATIONS[-1] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.exists') payload = msg.payload self.assertEqual(payload['tenant_id'], self.project_id) self.assertEqual(payload['user_id'], self.user_id) self.assertEqual(payload['instance_id'], instance['uuid']) self.assertEqual(payload['instance_type'], 'm1.tiny') type_id = flavors.get_flavor_by_name('m1.tiny')['id'] self.assertEqual(str(payload['instance_type_id']), str(type_id)) flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid'] self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id)) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'state_description', 'bandwidth', 'audit_period_beginning', 'audit_period_ending', 'image_meta'): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_meta'], {'md_key1': 'val1', 'md_key2': 'val2'}) image_ref_url = "%s/images/%s" % (glance.generate_glance_url(), uuids.fake_image_ref) self.assertEqual(payload['image_ref_url'], image_ref_url) def test_notify_about_instance_action(self): instance = create_instance(self.context) compute_utils.notify_about_instance_action( self.context, instance, host='fake-compute', action='delete', phase='start') self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1) notification = fake_notifier.VERSIONED_NOTIFICATIONS[0] self.assertEqual(notification['priority'], 'INFO') self.assertEqual(notification['event_type'], 'instance.delete.start') self.assertEqual(notification['publisher_id'], 'nova-compute:fake-compute') payload = notification['payload']['nova_object.data'] self.assertEqual(payload['tenant_id'], self.project_id) self.assertEqual(payload['user_id'], self.user_id) self.assertEqual(payload['uuid'], instance['uuid']) flavorid = flavors.get_flavor_by_name('m1.tiny')['flavorid'] flavor = payload['flavor']['nova_object.data'] self.assertEqual(str(flavor['flavorid']), flavorid) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'task_state', 'display_description', 'locked', 'auto_disk_config'): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_uuid'], uuids.fake_image_ref) def test_notify_about_volume_swap(self): instance = create_instance(self.context) compute_utils.notify_about_volume_swap( self.context, instance, 'fake-compute', fields.NotificationAction.VOLUME_SWAP, fields.NotificationPhase.START, uuids.old_volume_id, uuids.new_volume_id) self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1) notification = fake_notifier.VERSIONED_NOTIFICATIONS[0] self.assertEqual('INFO', notification['priority']) self.assertEqual('instance.%s.%s' % (fields.NotificationAction.VOLUME_SWAP, fields.NotificationPhase.START), notification['event_type']) self.assertEqual('nova-compute:fake-compute', notification['publisher_id']) payload = notification['payload']['nova_object.data'] self.assertEqual(self.project_id, payload['tenant_id']) self.assertEqual(self.user_id, payload['user_id']) self.assertEqual(instance['uuid'], payload['uuid']) flavorid = flavors.get_flavor_by_name('m1.tiny')['flavorid'] flavor = payload['flavor']['nova_object.data'] self.assertEqual(flavorid, str(flavor['flavorid'])) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'task_state'): self.assertIn(attr, payload) self.assertEqual(uuids.fake_image_ref, payload['image_uuid']) self.assertEqual(uuids.old_volume_id, payload['old_volume_id']) self.assertEqual(uuids.new_volume_id, payload['new_volume_id']) def test_notify_about_volume_swap_with_error(self): instance = create_instance(self.context) try: # To get exception trace, raise and catch an exception raise test.TestingException('Volume swap error.') except Exception as ex: compute_utils.notify_about_volume_swap( self.context, instance, 'fake-compute', fields.NotificationAction.VOLUME_SWAP, fields.NotificationPhase.ERROR, uuids.old_volume_id, uuids.new_volume_id, ex) self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1) notification = fake_notifier.VERSIONED_NOTIFICATIONS[0] self.assertEqual('ERROR', notification['priority']) self.assertEqual('instance.%s.%s' % (fields.NotificationAction.VOLUME_SWAP, fields.NotificationPhase.ERROR), notification['event_type']) self.assertEqual('nova-compute:fake-compute', notification['publisher_id']) payload = notification['payload']['nova_object.data'] self.assertEqual(self.project_id, payload['tenant_id']) self.assertEqual(self.user_id, payload['user_id']) self.assertEqual(instance['uuid'], payload['uuid']) flavorid = flavors.get_flavor_by_name('m1.tiny')['flavorid'] flavor = payload['flavor']['nova_object.data'] self.assertEqual(flavorid, str(flavor['flavorid'])) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'task_state'): self.assertIn(attr, payload) self.assertEqual(uuids.fake_image_ref, payload['image_uuid']) self.assertEqual(uuids.old_volume_id, payload['old_volume_id']) self.assertEqual(uuids.new_volume_id, payload['new_volume_id']) # Check ExceptionPayload exception_payload = payload['fault']['nova_object.data'] self.assertEqual('TestingException', exception_payload['exception']) self.assertEqual('Volume swap error.', exception_payload['exception_message']) self.assertEqual('test_notify_about_volume_swap_with_error', exception_payload['function_name']) self.assertEqual('nova.tests.unit.compute.test_compute_utils', exception_payload['module_name']) def test_notify_usage_exists_instance_not_found(self): # Ensure 'exists' notification generates appropriate usage data. instance = create_instance(self.context) self.compute.terminate_instance(self.context, instance, [], []) compute_utils.notify_usage_exists( rpc.get_notifier('compute'), self.context, instance) msg = fake_notifier.NOTIFICATIONS[-1] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.exists') payload = msg.payload self.assertEqual(payload['tenant_id'], self.project_id) self.assertEqual(payload['user_id'], self.user_id) self.assertEqual(payload['instance_id'], instance['uuid']) self.assertEqual(payload['instance_type'], 'm1.tiny') type_id = flavors.get_flavor_by_name('m1.tiny')['id'] self.assertEqual(str(payload['instance_type_id']), str(type_id)) flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid'] self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id)) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'state_description', 'bandwidth', 'audit_period_beginning', 'audit_period_ending', 'image_meta'): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_meta'], {}) image_ref_url = "%s/images/%s" % (glance.generate_glance_url(), uuids.fake_image_ref) self.assertEqual(payload['image_ref_url'], image_ref_url) def test_notify_about_instance_usage(self): instance = create_instance(self.context) # Set some system metadata sys_metadata = {'image_md_key1': 'val1', 'image_md_key2': 'val2', 'other_data': 'meow'} instance.system_metadata.update(sys_metadata) instance.save() extra_usage_info = {'image_name': 'fake_name'} compute_utils.notify_about_instance_usage( rpc.get_notifier('compute'), self.context, instance, 'create.start', extra_usage_info=extra_usage_info) self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1) msg = fake_notifier.NOTIFICATIONS[0] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.create.start') payload = msg.payload self.assertEqual(payload['tenant_id'], self.project_id) self.assertEqual(payload['user_id'], self.user_id) self.assertEqual(payload['instance_id'], instance['uuid']) self.assertEqual(payload['instance_type'], 'm1.tiny') type_id = flavors.get_flavor_by_name('m1.tiny')['id'] self.assertEqual(str(payload['instance_type_id']), str(type_id)) flavor_id = flavors.get_flavor_by_name('m1.tiny')['flavorid'] self.assertEqual(str(payload['instance_flavor_id']), str(flavor_id)) for attr in ('display_name', 'created_at', 'launched_at', 'state', 'state_description', 'image_meta'): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_meta'], {'md_key1': 'val1', 'md_key2': 'val2'}) self.assertEqual(payload['image_name'], 'fake_name') image_ref_url = "%s/images/%s" % (glance.generate_glance_url(), uuids.fake_image_ref) self.assertEqual(payload['image_ref_url'], image_ref_url) self.compute.terminate_instance(self.context, instance, [], []) def test_notify_about_aggregate_update_with_id(self): # Set aggregate payload aggregate_payload = {'aggregate_id': 1} compute_utils.notify_about_aggregate_update(self.context, "create.end", aggregate_payload) self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1) msg = fake_notifier.NOTIFICATIONS[0] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'aggregate.create.end') payload = msg.payload self.assertEqual(payload['aggregate_id'], 1) def test_notify_about_aggregate_update_with_name(self): # Set aggregate payload aggregate_payload = {'name': 'fakegroup'} compute_utils.notify_about_aggregate_update(self.context, "create.start", aggregate_payload) self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1) msg = fake_notifier.NOTIFICATIONS[0] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'aggregate.create.start') payload = msg.payload self.assertEqual(payload['name'], 'fakegroup') def test_notify_about_aggregate_update_without_name_id(self): # Set empty aggregate payload aggregate_payload = {} compute_utils.notify_about_aggregate_update(self.context, "create.start", aggregate_payload) self.assertEqual(len(fake_notifier.NOTIFICATIONS), 0) class ComputeUtilsGetValFromSysMetadata(test.NoDBTestCase): def test_get_value_from_system_metadata(self): instance = fake_instance.fake_instance_obj('fake-context') system_meta = {'int_val': 1, 'int_string': '2', 'not_int': 'Nope'} instance.system_metadata = system_meta result = compute_utils.get_value_from_system_metadata( instance, 'int_val', int, 0) self.assertEqual(1, result) result = compute_utils.get_value_from_system_metadata( instance, 'int_string', int, 0) self.assertEqual(2, result) result = compute_utils.get_value_from_system_metadata( instance, 'not_int', int, 0) self.assertEqual(0, result) class ComputeUtilsGetNWInfo(test.NoDBTestCase): def test_instance_object_none_info_cache(self): inst = fake_instance.fake_instance_obj('fake-context', expected_attrs=['info_cache']) self.assertIsNone(inst.info_cache) result = compute_utils.get_nw_info_for_instance(inst) self.assertEqual(jsonutils.dumps([]), result.json()) class ComputeUtilsGetRebootTypes(test.NoDBTestCase): def setUp(self): super(ComputeUtilsGetRebootTypes, self).setUp() self.context = context.RequestContext('fake', 'fake') def test_get_reboot_type_started_soft(self): reboot_type = compute_utils.get_reboot_type(task_states.REBOOT_STARTED, power_state.RUNNING) self.assertEqual(reboot_type, 'SOFT') def test_get_reboot_type_pending_soft(self): reboot_type = compute_utils.get_reboot_type(task_states.REBOOT_PENDING, power_state.RUNNING) self.assertEqual(reboot_type, 'SOFT') def test_get_reboot_type_hard(self): reboot_type = compute_utils.get_reboot_type('foo', power_state.RUNNING) self.assertEqual(reboot_type, 'HARD') def test_get_reboot_not_running_hard(self): reboot_type = compute_utils.get_reboot_type('foo', 'bar') self.assertEqual(reboot_type, 'HARD') class ComputeUtilsTestCase(test.NoDBTestCase): def setUp(self): super(ComputeUtilsTestCase, self).setUp() self.compute = 'compute' self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) @mock.patch.object(objects.InstanceActionEvent, 'event_start') @mock.patch.object(objects.InstanceActionEvent, 'event_finish_with_failure') def test_wrap_instance_event(self, mock_finish, mock_start): inst = {"uuid": uuids.instance} @compute_utils.wrap_instance_event(prefix='compute') def fake_event(self, context, instance): pass fake_event(self.compute, self.context, instance=inst) self.assertTrue(mock_start.called) self.assertTrue(mock_finish.called) @mock.patch.object(objects.InstanceActionEvent, 'event_start') @mock.patch.object(objects.InstanceActionEvent, 'event_finish_with_failure') def test_wrap_instance_event_return(self, mock_finish, mock_start): inst = {"uuid": uuids.instance} @compute_utils.wrap_instance_event(prefix='compute') def fake_event(self, context, instance): return True retval = fake_event(self.compute, self.context, instance=inst) self.assertTrue(retval) self.assertTrue(mock_start.called) self.assertTrue(mock_finish.called) @mock.patch.object(objects.InstanceActionEvent, 'event_start') @mock.patch.object(objects.InstanceActionEvent, 'event_finish_with_failure') def test_wrap_instance_event_log_exception(self, mock_finish, mock_start): inst = {"uuid": uuids.instance} @compute_utils.wrap_instance_event(prefix='compute') def fake_event(self2, context, instance): raise exception.NovaException() self.assertRaises(exception.NovaException, fake_event, self.compute, self.context, instance=inst) self.assertTrue(mock_start.called) self.assertTrue(mock_finish.called) args, kwargs = mock_finish.call_args self.assertIsInstance(kwargs['exc_val'], exception.NovaException) @mock.patch('netifaces.interfaces') def test_get_machine_ips_value_error(self, mock_interfaces): # Tests that the utility method does not explode if netifaces raises # a ValueError. iface = mock.sentinel mock_interfaces.return_value = [iface] with mock.patch('netifaces.ifaddresses', side_effect=ValueError) as mock_ifaddresses: addresses = compute_utils.get_machine_ips() self.assertEqual([], addresses) mock_ifaddresses.assert_called_once_with(iface) @mock.patch('nova.compute.utils.notify_about_instance_usage') @mock.patch('nova.objects.Instance.destroy') def test_notify_about_instance_delete(self, mock_instance_destroy, mock_notify_usage): instance = fake_instance.fake_instance_obj( self.context, expected_attrs=('system_metadata',)) with compute_utils.notify_about_instance_delete( mock.sentinel.notifier, self.context, instance): instance.destroy() expected_notify_calls = [ mock.call(mock.sentinel.notifier, self.context, instance, 'delete.start'), mock.call(mock.sentinel.notifier, self.context, instance, 'delete.end', system_metadata=instance.system_metadata) ] mock_notify_usage.assert_has_calls(expected_notify_calls) class ComputeUtilsQuotaDeltaTestCase(test.TestCase): def setUp(self): super(ComputeUtilsQuotaDeltaTestCase, self).setUp() self.context = context.RequestContext('fake', 'fake') def test_upsize_quota_delta(self): old_flavor = flavors.get_flavor_by_name('m1.tiny') new_flavor = flavors.get_flavor_by_name('m1.medium') expected_deltas = { 'cores': new_flavor['vcpus'] - old_flavor['vcpus'], 'ram': new_flavor['memory_mb'] - old_flavor['memory_mb'] } deltas = compute_utils.upsize_quota_delta(self.context, new_flavor, old_flavor) self.assertEqual(expected_deltas, deltas) def test_downsize_quota_delta(self): inst = create_instance(self.context, params=None) inst.old_flavor = flavors.get_flavor_by_name('m1.medium') inst.new_flavor = flavors.get_flavor_by_name('m1.tiny') expected_deltas = { 'cores': (inst.new_flavor['vcpus'] - inst.old_flavor['vcpus']), 'ram': (inst.new_flavor['memory_mb'] - inst.old_flavor['memory_mb']) } deltas = compute_utils.downsize_quota_delta(self.context, inst) self.assertEqual(expected_deltas, deltas) def test_reverse_quota_delta(self): inst = create_instance(self.context, params=None) inst.old_flavor = flavors.get_flavor_by_name('m1.tiny') inst.new_flavor = flavors.get_flavor_by_name('m1.medium') expected_deltas = { 'cores': -1 * (inst.new_flavor['vcpus'] - inst.old_flavor['vcpus']), 'ram': -1 * (inst.new_flavor['memory_mb'] - inst.old_flavor['memory_mb']) } deltas = compute_utils.reverse_upsize_quota_delta(self.context, inst) self.assertEqual(expected_deltas, deltas) @mock.patch.object(objects.Quotas, 'reserve') @mock.patch.object(objects.quotas, 'ids_from_instance') def test_reserve_quota_delta(self, mock_ids_from_instance, mock_reserve): quotas = objects.Quotas(context=context) inst = create_instance(self.context, params=None) inst.old_flavor = flavors.get_flavor_by_name('m1.tiny') inst.new_flavor = flavors.get_flavor_by_name('m1.medium') mock_ids_from_instance.return_value = (inst.project_id, inst.user_id) mock_reserve.return_value = quotas deltas = compute_utils.upsize_quota_delta(self.context, inst.new_flavor, inst.old_flavor) compute_utils.reserve_quota_delta(self.context, deltas, inst) mock_reserve.assert_called_once_with(project_id=inst.project_id, user_id=inst.user_id, **deltas) class IsVolumeBackedInstanceTestCase(test.TestCase): def setUp(self): super(IsVolumeBackedInstanceTestCase, self).setUp() self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) def test_is_volume_backed_instance_no_bdm_no_image(self): ctxt = self.context instance = create_instance(ctxt, params={'image_ref': ''}) self.assertTrue( compute_utils.is_volume_backed_instance(ctxt, instance, None)) def test_is_volume_backed_instance_empty_bdm_with_image(self): ctxt = self.context instance = create_instance(ctxt, params={ 'root_device_name': 'vda', 'image_ref': FAKE_IMAGE_REF }) self.assertFalse( compute_utils.is_volume_backed_instance( ctxt, instance, block_device_obj.block_device_make_list(ctxt, []))) def test_is_volume_backed_instance_bdm_volume_no_image(self): ctxt = self.context instance = create_instance(ctxt, params={ 'root_device_name': 'vda', 'image_ref': '' }) bdms = block_device_obj.block_device_make_list(ctxt, [fake_block_device.FakeDbBlockDeviceDict( {'source_type': 'volume', 'device_name': '/dev/vda', 'volume_id': uuids.volume_id, 'instance_uuid': 'f8000000-0000-0000-0000-000000000000', 'boot_index': 0, 'destination_type': 'volume'})]) self.assertTrue( compute_utils.is_volume_backed_instance(ctxt, instance, bdms)) def test_is_volume_backed_instance_bdm_local_no_image(self): # if the root device is local the instance is not volume backed, even # if no image_ref is set. ctxt = self.context instance = create_instance(ctxt, params={ 'root_device_name': 'vda', 'image_ref': '' }) bdms = block_device_obj.block_device_make_list(ctxt, [fake_block_device.FakeDbBlockDeviceDict( {'source_type': 'volume', 'device_name': '/dev/vda', 'volume_id': uuids.volume_id, 'destination_type': 'local', 'instance_uuid': 'f8000000-0000-0000-0000-000000000000', 'boot_index': 0, 'snapshot_id': None}), fake_block_device.FakeDbBlockDeviceDict( {'source_type': 'volume', 'device_name': '/dev/vdb', 'instance_uuid': 'f8000000-0000-0000-0000-000000000000', 'boot_index': 1, 'destination_type': 'volume', 'volume_id': 'c2ec2156-d75e-11e2-985b-5254009297d6', 'snapshot_id': None})]) self.assertFalse( compute_utils.is_volume_backed_instance(ctxt, instance, bdms)) def test_is_volume_backed_instance_bdm_volume_with_image(self): ctxt = self.context instance = create_instance(ctxt, params={ 'root_device_name': 'vda', 'image_ref': FAKE_IMAGE_REF }) bdms = block_device_obj.block_device_make_list(ctxt, [fake_block_device.FakeDbBlockDeviceDict( {'source_type': 'volume', 'device_name': '/dev/vda', 'volume_id': uuids.volume_id, 'boot_index': 0, 'destination_type': 'volume'})]) self.assertTrue( compute_utils.is_volume_backed_instance(ctxt, instance, bdms)) def test_is_volume_backed_instance_bdm_snapshot(self): ctxt = self.context instance = create_instance(ctxt, params={ 'root_device_name': 'vda' }) bdms = block_device_obj.block_device_make_list(ctxt, [fake_block_device.FakeDbBlockDeviceDict( {'source_type': 'volume', 'device_name': '/dev/vda', 'snapshot_id': 'de8836ac-d75e-11e2-8271-5254009297d6', 'instance_uuid': 'f8000000-0000-0000-0000-000000000000', 'destination_type': 'volume', 'boot_index': 0, 'volume_id': None})]) self.assertTrue( compute_utils.is_volume_backed_instance(ctxt, instance, bdms)) @mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid') def test_is_volume_backed_instance_empty_bdm_by_uuid(self, mock_bdms): ctxt = self.context instance = create_instance(ctxt) mock_bdms.return_value = block_device_obj.block_device_make_list( ctxt, []) self.assertFalse( compute_utils.is_volume_backed_instance(ctxt, instance, None)) mock_bdms.assert_called_with(ctxt, instance.uuid)
apache-2.0
1,194,183,343,546,489,300
42.724204
79
0.583631
false
4.023605
true
false
false
Holovin/D_GrabDemo
d_parser/old_configs/v25/d_spider_ele.py
1
9841
from urllib.parse import urljoin from grab.spider import Spider, Task from d_parser.helpers.cookies_init import cookies_init from d_parser.helpers.el_parser import get_max_page from d_parser.helpers.parser_extender import check_body_errors, process_error, common_init from d_parser.helpers.re_set import Ree from helpers.config import Config from helpers.url_generator import UrlGenerator # Warn: Don't remove task argument even if not use it (it's break grab and spider crashed) # Warn: noinspection PyUnusedLocal class DSpider(Spider): initial_urls = Config.get_seq('SITE_URL') def __init__(self, thread_number, try_limit=0): super().__init__(thread_number=thread_number, network_try_limit=try_limit, priority_mode='const') DSpider._check_body_errors = check_body_errors DSpider._process_error = process_error DSpider._common_init = common_init self._common_init(try_limit) Ree.init() Ree.is_page_number(Config.get('SITE_PAGE_PARAM')) self.const_price_on_request = Config.get('APP_PRICE_ON_REQUEST') self.const_stock_zero = Config.get('APP_STOCK_ZERO') self.const_default_place = 'Полежаевская' def create_grab_instance(self, **kwargs): g = super(DSpider, self).create_grab_instance(**kwargs) return cookies_init(self.cookie_jar, g) def task_initial(self, grab, task): self.logger.info('[{}] Initial url: {}'.format(task.name, task.url)) if self._check_body_errors(grab, task): self.logger.fatal('[{}] Err task with url {}, attempt {}'.format(task.name, task.url, task.task_try_count)) return try: cat_list = grab.doc.select('//ul[@class="catalog_nav_1"]//a[contains(@href, "catalog")]') for index, row in enumerate(cat_list): raw_link = row.attr('href') # make absolute urls if needed if raw_link[:1] == '/': raw_link = urljoin(self.domain, raw_link) yield Task('parse_cat', url=raw_link, priority=90, raw=True) except Exception as e: self._process_error(grab, task, e) finally: self.logger.info('[{}] Finish: {}'.format(task.name, task.url)) def task_parse_cat(self, grab, task): self.logger.info('[{}] Start: {}'.format(task.name, task.url)) if self._check_body_errors(grab, task): self.logger.fatal('[{}] Err task with url {}, attempt {}'.format(task.name, task.url, task.task_try_count)) return try: cat_list = grab.doc.select('//div[@class="category_list"]//a[contains(@href, "catalog")]') for index, row in enumerate(cat_list): raw_link = row.attr('href') # make absolute urls if needed if raw_link[:1] == '/': raw_link = urljoin(self.domain, raw_link) yield Task('parse_items', url=raw_link, priority=100, raw=True) except Exception as e: self._process_error(grab, task, e) finally: self.logger.info('[{}] Finish: {}'.format(task.name, task.url)) # def task_parse_page(self, grab, task): # self.logger.info('[{}] Start: {}'.format(task.name, task.url)) # # if self._check_body_errors(grab, task): # self.logger.fatal('[{}] Err task with url {}, attempt {}'.format(task.name, task.url, task.task_try_count)) # return # # try: # items = grab.doc.select('//a[contains(@href, "{}")]'.format(Config.get('SITE_PAGE_PARAM'))) # max_page = get_max_page(items, 1) # self.logger.info('[{}] Find max page: {}'.format(task.name, max_page)) # # url_gen = UrlGenerator(task.url, Config.get('SITE_PAGE_PARAM')) # # for p in range(1, max_page + 1): # url = url_gen.get_page(p) # yield Task('parse_items', url=url, priority=110) # # except Exception as e: # self._process_error(grab, task, e) # # finally: # self.logger.info('[{}] Finish: {}'.format(task.name, task.url)) def task_parse_items(self, grab, task): self.logger.info('[{}] Start: {}'.format(task.name, task.url)) if self._check_body_errors(grab, task): if task.task_try_count < self.err_limit: self.logger.error('[{}] Restart task with url {}, attempt {}'.format(task.name, task.url, task.task_try_count)) yield Task('parse_items', url=task.url, priority=105, task_try_count=task.task_try_count + 1, raw=True) else: self.logger.error('[{}] Skip task with url {}, attempt {}'.format(task.name, task.url, task.task_try_count)) return try: # parse pagination numbers if not task.get('d_skip_page_check'): items = grab.doc.select('//a[contains(@href, "{}")]'.format(Config.get('SITE_PAGE_PARAM'))) max_page = get_max_page(items, 1) self.logger.info('[{}] Find max page: {}'.format(task.name, max_page)) url_gen = UrlGenerator(task.url, Config.get('SITE_PAGE_PARAM')) # self-execute from 2 page (if needed) for p in range(2, max_page + 1): url = url_gen.get_page(p) yield Task('parse_items', url=url, priority=100, d_skip_page_check=True, raw=True) # parse items items_list = grab.doc.select('//div[@class="cart_table"]/div/div/table/tbody/tr') for index, row in enumerate(items_list): try: # NAME item_name = row.select('./td[1]//div[@class="description"]/div/a').text().strip() # UNIT unit = row.select('./td[2]').text().strip() if unit == '': unit = 'ед.' # PRICE price_raw = row.select('./td[6]//meta[@itemprop="lowprice"]').attr('content') match = Ree.float.match(price_raw) # check & fix if not match: self.logger.warning('[{}] Skip item, because price is {} (line: {})'.format(task.name, price_raw, index)) continue price = match.groupdict()['price'].replace(',', '.') # COUNT count = row.select('./td[5]') count_text = count.text().strip() # case 1: string line if count_text == 'распродано': item_count = self.const_price_on_request item_place = self.const_default_place # OUTPUT self.logger.debug('[{}] Item added, index {} at url {}'.format(task.name, index, task.url)) self.result.append({ 'name': item_name, 'count': item_count, 'unit': unit, 'price': price, 'place': item_place }) # case 2: string line elif count_text == 'под заказ': item_count = self.const_stock_zero item_place = self.const_default_place # OUTPUT self.logger.debug('[{}] Item added, index {} at url {}'.format(task.name, index, task.url)) self.result.append({ 'name': item_name, 'count': item_count, 'unit': unit, 'price': price, 'place': item_place }) # case 3 else: count_rows = count.select('.//div[@class="layer_info"]/table/tbody/tr') for count_row in count_rows: item_place = count_row.select('./td[1]').text().strip() item_count = 0 # add stock place_count_stock = count_row.select('./td[1]').text().strip() if Ree.float.match(place_count_stock): item_count += float(place_count_stock) # add expo place_count_expo = count_row.select('./td[2]').text().strip() if Ree.float.match(place_count_expo): item_count += float(place_count_expo) if item_count > 0: # OUTPUT self.logger.debug('[{}] Item added, index {} at url {}'.format(task.name, index, task.url)) self.result.append({ 'name': item_name, # 3.140 -> 3.14; 3.0 -> 3 'count': '{0:g}'.format(item_count), 'unit': unit, 'price': price, 'place': item_place }) except IndexError as e: self.logger.warning('[{}] Skip item: {}, {}'.format(task.name, type(e).__name__, task.url)) except Exception as e: self._process_error(grab, task, e) finally: self.logger.info('[{}] Finish: {}'.format(task.name, task.url))
mit
584,874,508,954,734,000
42.211454
129
0.485574
false
4.100753
true
false
false
olga-weslowskij/olga.weslowskij
mylibs/myList.py
1
2762
# Copyright (C) weslowskij # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. from xml.etree.ElementTree import SubElement,Element from mylibs.myPersistent import PersistentObject class MyList(list, PersistentObject): def __init__(self, *args, **kwargs): #super(MyList,self).__init__(*args, **kwargs) list.__init__(self, *args, **kwargs) self.nid = "None" self.dog_class = "unchanged" self.dog_class = None def toXml2(self, tree, attributename): if tree is not None: song = SubElement(tree, attributename) else: song = Element(str(self.__class__.__name__)) #song = SubElement(tree, attributename) song.attrib["nid"]= repr(self.nid) titles = self.__dict__.iterkeys() for i, o in enumerate(self): #pdb.set_trace() #window = SubElement(song, "pos") #window.attrib["pos"]=repr(i) window = song #o = self.__getattribute__(title) title = str(i) # exceptions """ if title in self.persistentexception: print "skipping" , title continue """ """ if title in self.persistentexceptiondontexpand: newtitle = ET.SubElement(song, str(title)) newtitle.text = repr(o) continue """ if hasattr(o, "toXml2"): o.toXml2(window,"pos_"+repr(i)) else: newtitle = SubElement(window, o.__class__.__name__) newtitle.text = repr(o) return song def fromXml2(self,tree): for pos in tree: dog = None if self.dog_class is None: dog=PersistentObject() else: dog = self.dog_class() dog.fromXml2(pos) #print dog.__dict__ self.append(dog) def append(self, p_object): list.append(self,p_object) def items(self): return list(enumerate(self))
gpl-2.0
8,055,053,476,284,627,000
27.770833
81
0.573135
false
4.197568
false
false
false
hclivess/Stallion
nuitka/Cryptodome/SelfTest/Cipher/test_OFB.py
2
9330
# =================================================================== # # Copyright (c) 2015, Legrandin <helderijs@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== import unittest from Cryptodome.SelfTest.st_common import list_test_cases from Cryptodome.Util.py3compat import tobytes, b, unhexlify from Cryptodome.Cipher import AES, DES3, DES from Cryptodome.Hash import SHAKE128 def get_tag_random(tag, length): return SHAKE128.new(data=tobytes(tag)).read(length) from Cryptodome.SelfTest.Cipher.test_CBC import BlockChainingTests class OfbTests(BlockChainingTests): aes_mode = AES.MODE_OFB des3_mode = DES3.MODE_OFB # Redefine test_unaligned_data_128/64 def test_unaligned_data_128(self): plaintexts = [ b("7777777") ] * 100 cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=8) self.assertEqual(b("").join(ciphertexts), cipher.encrypt(b("").join(plaintexts))) cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] cipher = AES.new(self.key_128, AES.MODE_CFB, self.iv_128, segment_size=128) self.assertEqual(b("").join(ciphertexts), cipher.encrypt(b("").join(plaintexts))) def test_unaligned_data_64(self): plaintexts = [ b("7777777") ] * 100 cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=8) self.assertEqual(b("").join(ciphertexts), cipher.encrypt(b("").join(plaintexts))) cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) ciphertexts = [ cipher.encrypt(x) for x in plaintexts ] cipher = DES3.new(self.key_192, DES3.MODE_CFB, self.iv_64, segment_size=64) self.assertEqual(b("").join(ciphertexts), cipher.encrypt(b("").join(plaintexts))) from Cryptodome.SelfTest.Cipher.test_CBC import NistBlockChainingVectors class NistOfbVectors(NistBlockChainingVectors): aes_mode = AES.MODE_OFB des_mode = DES.MODE_OFB des3_mode = DES3.MODE_OFB # Create one test method per file nist_aes_kat_mmt_files = ( # KAT "OFBGFSbox128.rsp", "OFBGFSbox192.rsp", "OFBGFSbox256.rsp", "OFBKeySbox128.rsp", "OFBKeySbox192.rsp", "OFBKeySbox256.rsp", "OFBVarKey128.rsp", "OFBVarKey192.rsp", "OFBVarKey256.rsp", "OFBVarTxt128.rsp", "OFBVarTxt192.rsp", "OFBVarTxt256.rsp", # MMT "OFBMMT128.rsp", "OFBMMT192.rsp", "OFBMMT256.rsp", ) nist_aes_mct_files = ( "OFBMCT128.rsp", "OFBMCT192.rsp", "OFBMCT256.rsp", ) for file_name in nist_aes_kat_mmt_files: def new_func(self, file_name=file_name): self._do_kat_aes_test(file_name) setattr(NistOfbVectors, "test_AES_" + file_name, new_func) for file_name in nist_aes_mct_files: def new_func(self, file_name=file_name): self._do_mct_aes_test(file_name) setattr(NistOfbVectors, "test_AES_" + file_name, new_func) del file_name, new_func nist_tdes_files = ( "TOFBMMT2.rsp", # 2TDES "TOFBMMT3.rsp", # 3TDES "TOFBinvperm.rsp", # Single DES "TOFBpermop.rsp", "TOFBsubtab.rsp", "TOFBvarkey.rsp", "TOFBvartext.rsp", ) for file_name in nist_tdes_files: def new_func(self, file_name=file_name): self._do_tdes_test(file_name) setattr(NistOfbVectors, "test_TDES_" + file_name, new_func) # END OF NIST OFB TEST VECTORS class SP800TestVectors(unittest.TestCase): """Class exercising the OFB test vectors found in Section F.4 of NIST SP 800-3A""" def test_aes_128(self): plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ '30c81c46a35ce411e5fbc1191a0a52ef' +\ 'f69f2445df4f9b17ad2b417be66c3710' ciphertext = '3b3fd92eb72dad20333449f8e83cfb4a' +\ '7789508d16918f03f53c52dac54ed825' +\ '9740051e9c5fecf64344f7a82260edcc' +\ '304c6528f659c77866a510d9c1d6ae5e' key = '2b7e151628aed2a6abf7158809cf4f3c' iv = '000102030405060708090a0b0c0d0e0f' key = unhexlify(key) iv = unhexlify(iv) plaintext = unhexlify(plaintext) ciphertext = unhexlify(ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext), ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext), plaintext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) def test_aes_192(self): plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ '30c81c46a35ce411e5fbc1191a0a52ef' +\ 'f69f2445df4f9b17ad2b417be66c3710' ciphertext = 'cdc80d6fddf18cab34c25909c99a4174' +\ 'fcc28b8d4c63837c09e81700c1100401' +\ '8d9a9aeac0f6596f559c6d4daf59a5f2' +\ '6d9f200857ca6c3e9cac524bd9acc92a' key = '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b' iv = '000102030405060708090a0b0c0d0e0f' key = unhexlify(key) iv = unhexlify(iv) plaintext = unhexlify(plaintext) ciphertext = unhexlify(ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext), ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext), plaintext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) def test_aes_256(self): plaintext = '6bc1bee22e409f96e93d7e117393172a' +\ 'ae2d8a571e03ac9c9eb76fac45af8e51' +\ '30c81c46a35ce411e5fbc1191a0a52ef' +\ 'f69f2445df4f9b17ad2b417be66c3710' ciphertext = 'dc7e84bfda79164b7ecd8486985d3860' +\ '4febdc6740d20b3ac88f6ad82a4fb08d' +\ '71ab47a086e86eedf39d1c5bba97c408' +\ '0126141d67f37be8538f5a8be740e484' key = '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4' iv = '000102030405060708090a0b0c0d0e0f' key = unhexlify(key) iv = unhexlify(iv) plaintext = unhexlify(plaintext) ciphertext = unhexlify(ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext), ciphertext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext), plaintext) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.encrypt(plaintext[:-8]), ciphertext[:-8]) cipher = AES.new(key, AES.MODE_OFB, iv) self.assertEqual(cipher.decrypt(ciphertext[:-8]), plaintext[:-8]) def get_tests(config={}): tests = [] tests += list_test_cases(OfbTests) if config.get('slow_tests'): tests += list_test_cases(NistOfbVectors) tests += list_test_cases(SP800TestVectors) return tests if __name__ == '__main__': suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite')
gpl-3.0
-7,998,060,312,913,902,000
38.533898
90
0.645016
false
3.106893
true
false
false
bmazin/SDR
DataReadout/ReadoutControls/lib/arcons_basic_gui.py
1
47190
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'arcons_basic_gui.ui' # # Created: Tue Dec 11 17:40:34 2012 # by: PyQt4 UI code generator 4.8.3 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_arcons(object): def setupUi(self, arcons): arcons.setObjectName(_fromUtf8("arcons")) arcons.setEnabled(True) arcons.resize(860, 960) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(arcons.sizePolicy().hasHeightForWidth()) arcons.setSizePolicy(sizePolicy) arcons.setMouseTracking(True) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8("lib/Archon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off) arcons.setWindowIcon(icon) arcons.setWindowFilePath(_fromUtf8("")) arcons.setAnimated(True) self.centralwidget = QtGui.QWidget(arcons) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) self.frame = QtGui.QFrame(self.centralwidget) self.frame.setGeometry(QtCore.QRect(0, 0, 391, 151)) self.frame.setFrameShape(QtGui.QFrame.Box) self.frame.setFrameShadow(QtGui.QFrame.Sunken) self.frame.setObjectName(_fromUtf8("frame")) self.label = QtGui.QLabel(self.frame) self.label.setGeometry(QtCore.QRect(110, 10, 251, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label.setFont(font) self.label.setObjectName(_fromUtf8("label")) self.array_temp_lcd = QtGui.QLCDNumber(self.frame) self.array_temp_lcd.setGeometry(QtCore.QRect(10, 40, 81, 41)) self.array_temp_lcd.setObjectName(_fromUtf8("array_temp_lcd")) self.pulse_tube_temp_lcd = QtGui.QLCDNumber(self.frame) self.pulse_tube_temp_lcd.setGeometry(QtCore.QRect(10, 90, 81, 41)) self.pulse_tube_temp_lcd.setObjectName(_fromUtf8("pulse_tube_temp_lcd")) self.label_3 = QtGui.QLabel(self.frame) self.label_3.setGeometry(QtCore.QRect(100, 50, 101, 21)) font = QtGui.QFont() font.setPointSize(14) self.label_3.setFont(font) self.label_3.setObjectName(_fromUtf8("label_3")) self.label_4 = QtGui.QLabel(self.frame) self.label_4.setGeometry(QtCore.QRect(100, 90, 121, 41)) font = QtGui.QFont() font.setPointSize(14) self.label_4.setFont(font) self.label_4.setObjectName(_fromUtf8("label_4")) self.open_shutter_radioButton = QtGui.QRadioButton(self.frame) self.open_shutter_radioButton.setEnabled(False) self.open_shutter_radioButton.setGeometry(QtCore.QRect(240, 90, 141, 41)) font = QtGui.QFont() font.setPointSize(14) self.open_shutter_radioButton.setFont(font) self.open_shutter_radioButton.setFocusPolicy(QtCore.Qt.NoFocus) self.open_shutter_radioButton.setAutoExclusive(False) self.open_shutter_radioButton.setObjectName(_fromUtf8("open_shutter_radioButton")) self.cycle_fridge_radioButton = QtGui.QRadioButton(self.frame) self.cycle_fridge_radioButton.setEnabled(False) self.cycle_fridge_radioButton.setGeometry(QtCore.QRect(240, 40, 141, 41)) font = QtGui.QFont() font.setPointSize(14) self.cycle_fridge_radioButton.setFont(font) self.cycle_fridge_radioButton.setFocusPolicy(QtCore.Qt.NoFocus) self.cycle_fridge_radioButton.setAutoExclusive(False) self.cycle_fridge_radioButton.setObjectName(_fromUtf8("cycle_fridge_radioButton")) self.lineEdit = QtGui.QLineEdit(self.frame) self.lineEdit.setGeometry(QtCore.QRect(80, 180, 113, 20)) self.lineEdit.setObjectName(_fromUtf8("lineEdit")) self.frame_2 = QtGui.QFrame(self.centralwidget) self.frame_2.setGeometry(QtCore.QRect(390, 0, 471, 181)) self.frame_2.setFrameShape(QtGui.QFrame.Box) self.frame_2.setFrameShadow(QtGui.QFrame.Sunken) self.frame_2.setObjectName(_fromUtf8("frame_2")) self.label_2 = QtGui.QLabel(self.frame_2) self.label_2.setGeometry(QtCore.QRect(150, 0, 261, 41)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label_2.setFont(font) self.label_2.setObjectName(_fromUtf8("label_2")) self.compass_graphicsView = QtGui.QGraphicsView(self.frame_2) self.compass_graphicsView.setGeometry(QtCore.QRect(70, 90, 81, 81)) self.compass_graphicsView.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.compass_graphicsView.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.compass_graphicsView.setInteractive(False) self.compass_graphicsView.setObjectName(_fromUtf8("compass_graphicsView")) self.label_5 = QtGui.QLabel(self.frame_2) self.label_5.setGeometry(QtCore.QRect(10, 40, 31, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_5.setFont(font) self.label_5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_5.setObjectName(_fromUtf8("label_5")) self.label_6 = QtGui.QLabel(self.frame_2) self.label_6.setGeometry(QtCore.QRect(10, 60, 31, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_6.setFont(font) self.label_6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_6.setObjectName(_fromUtf8("label_6")) self.label_7 = QtGui.QLabel(self.frame_2) self.label_7.setGeometry(QtCore.QRect(170, 120, 111, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_7.setFont(font) self.label_7.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_7.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_7.setObjectName(_fromUtf8("label_7")) self.label_8 = QtGui.QLabel(self.frame_2) self.label_8.setGeometry(QtCore.QRect(180, 40, 101, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_8.setFont(font) self.label_8.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_8.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_8.setObjectName(_fromUtf8("label_8")) self.label_9 = QtGui.QLabel(self.frame_2) self.label_9.setGeometry(QtCore.QRect(180, 60, 101, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_9.setFont(font) self.label_9.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_9.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_9.setObjectName(_fromUtf8("label_9")) self.label_10 = QtGui.QLabel(self.frame_2) self.label_10.setGeometry(QtCore.QRect(150, 140, 131, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_10.setFont(font) self.label_10.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_10.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_10.setObjectName(_fromUtf8("label_10")) self.label_11 = QtGui.QLabel(self.frame_2) self.label_11.setGeometry(QtCore.QRect(150, 80, 131, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_11.setFont(font) self.label_11.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_11.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_11.setObjectName(_fromUtf8("label_11")) self.label_12 = QtGui.QLabel(self.frame_2) self.label_12.setGeometry(QtCore.QRect(170, 100, 111, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_12.setFont(font) self.label_12.setLayoutDirection(QtCore.Qt.RightToLeft) self.label_12.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_12.setObjectName(_fromUtf8("label_12")) self.status_label = QtGui.QLabel(self.frame_2) self.status_label.setGeometry(QtCore.QRect(10, 10, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.status_label.setFont(font) self.status_label.setFrameShape(QtGui.QFrame.Box) self.status_label.setObjectName(_fromUtf8("status_label")) self.local_time_label = QtGui.QLabel(self.frame_2) self.local_time_label.setGeometry(QtCore.QRect(290, 140, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.local_time_label.setFont(font) self.local_time_label.setFrameShape(QtGui.QFrame.Box) self.local_time_label.setText(_fromUtf8("")) self.local_time_label.setObjectName(_fromUtf8("local_time_label")) self.utc_label = QtGui.QLabel(self.frame_2) self.utc_label.setGeometry(QtCore.QRect(290, 120, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.utc_label.setFont(font) self.utc_label.setFrameShape(QtGui.QFrame.Box) self.utc_label.setText(_fromUtf8("")) self.utc_label.setObjectName(_fromUtf8("utc_label")) self.lst_label = QtGui.QLabel(self.frame_2) self.lst_label.setGeometry(QtCore.QRect(290, 100, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.lst_label.setFont(font) self.lst_label.setFrameShape(QtGui.QFrame.Box) self.lst_label.setText(_fromUtf8("")) self.lst_label.setObjectName(_fromUtf8("lst_label")) self.airmass_label = QtGui.QLabel(self.frame_2) self.airmass_label.setGeometry(QtCore.QRect(290, 80, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.airmass_label.setFont(font) self.airmass_label.setFrameShape(QtGui.QFrame.Box) self.airmass_label.setText(_fromUtf8("")) self.airmass_label.setObjectName(_fromUtf8("airmass_label")) self.az_label = QtGui.QLabel(self.frame_2) self.az_label.setGeometry(QtCore.QRect(290, 60, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.az_label.setFont(font) self.az_label.setFrameShape(QtGui.QFrame.Box) self.az_label.setText(_fromUtf8("")) self.az_label.setObjectName(_fromUtf8("az_label")) self.alt_label = QtGui.QLabel(self.frame_2) self.alt_label.setGeometry(QtCore.QRect(290, 40, 131, 21)) font = QtGui.QFont() font.setPointSize(12) self.alt_label.setFont(font) self.alt_label.setFrameShape(QtGui.QFrame.Box) self.alt_label.setText(_fromUtf8("")) self.alt_label.setObjectName(_fromUtf8("alt_label")) self.ra_label = QtGui.QLabel(self.frame_2) self.ra_label.setGeometry(QtCore.QRect(50, 40, 121, 21)) font = QtGui.QFont() font.setPointSize(12) self.ra_label.setFont(font) self.ra_label.setFrameShape(QtGui.QFrame.Box) self.ra_label.setText(_fromUtf8("")) self.ra_label.setObjectName(_fromUtf8("ra_label")) self.dec_label = QtGui.QLabel(self.frame_2) self.dec_label.setGeometry(QtCore.QRect(50, 60, 121, 21)) font = QtGui.QFont() font.setPointSize(12) self.dec_label.setFont(font) self.dec_label.setFrameShape(QtGui.QFrame.Box) self.dec_label.setText(_fromUtf8("")) self.dec_label.setObjectName(_fromUtf8("dec_label")) self.frame_3 = QtGui.QFrame(self.centralwidget) self.frame_3.setGeometry(QtCore.QRect(0, 150, 391, 521)) self.frame_3.setFrameShape(QtGui.QFrame.Box) self.frame_3.setFrameShadow(QtGui.QFrame.Sunken) self.frame_3.setObjectName(_fromUtf8("frame_3")) self.save_raw_checkBox = QtGui.QCheckBox(self.frame_3) self.save_raw_checkBox.setEnabled(False) self.save_raw_checkBox.setGeometry(QtCore.QRect(300, 70, 91, 21)) font = QtGui.QFont() font.setPointSize(12) self.save_raw_checkBox.setFont(font) self.save_raw_checkBox.setObjectName(_fromUtf8("save_raw_checkBox")) self.data_directory_lineEdit = QtGui.QLineEdit(self.frame_3) self.data_directory_lineEdit.setGeometry(QtCore.QRect(110, 40, 201, 21)) self.data_directory_lineEdit.setObjectName(_fromUtf8("data_directory_lineEdit")) self.label_14 = QtGui.QLabel(self.frame_3) self.label_14.setGeometry(QtCore.QRect(10, 40, 91, 21)) font = QtGui.QFont() font.setPointSize(12) self.label_14.setFont(font) self.label_14.setObjectName(_fromUtf8("label_14")) self.label_16 = QtGui.QLabel(self.frame_3) self.label_16.setGeometry(QtCore.QRect(20, 210, 91, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_16.setFont(font) self.label_16.setObjectName(_fromUtf8("label_16")) self.label_13 = QtGui.QLabel(self.frame_3) self.label_13.setGeometry(QtCore.QRect(120, 0, 241, 41)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label_13.setFont(font) self.label_13.setObjectName(_fromUtf8("label_13")) self.label_15 = QtGui.QLabel(self.frame_3) self.label_15.setGeometry(QtCore.QRect(20, 170, 111, 31)) font = QtGui.QFont() font.setPointSize(10) self.label_15.setFont(font) self.label_15.setObjectName(_fromUtf8("label_15")) self.calibrate_data_checkBox = QtGui.QCheckBox(self.frame_3) self.calibrate_data_checkBox.setEnabled(False) self.calibrate_data_checkBox.setGeometry(QtCore.QRect(280, 100, 121, 21)) font = QtGui.QFont() font.setPointSize(12) self.calibrate_data_checkBox.setFont(font) self.calibrate_data_checkBox.setObjectName(_fromUtf8("calibrate_data_checkBox")) self.stop_observation_pushButton = QtGui.QPushButton(self.frame_3) self.stop_observation_pushButton.setGeometry(QtCore.QRect(200, 200, 181, 41)) font = QtGui.QFont() font.setPointSize(10) self.stop_observation_pushButton.setFont(font) self.stop_observation_pushButton.setObjectName(_fromUtf8("stop_observation_pushButton")) self.start_observation_pushButton = QtGui.QPushButton(self.frame_3) self.start_observation_pushButton.setGeometry(QtCore.QRect(200, 140, 181, 51)) font = QtGui.QFont() font.setPointSize(14) self.start_observation_pushButton.setFont(font) self.start_observation_pushButton.setObjectName(_fromUtf8("start_observation_pushButton")) self.close_pushButton = QtGui.QPushButton(self.frame_3) self.close_pushButton.setGeometry(QtCore.QRect(50, 460, 121, 41)) self.close_pushButton.setObjectName(_fromUtf8("close_pushButton")) self.search_pushButton = QtGui.QPushButton(self.frame_3) self.search_pushButton.setGeometry(QtCore.QRect(320, 40, 61, 21)) self.search_pushButton.setObjectName(_fromUtf8("search_pushButton")) self.obs_time_spinBox = QtGui.QSpinBox(self.frame_3) self.obs_time_spinBox.setGeometry(QtCore.QRect(120, 170, 71, 31)) font = QtGui.QFont() font.setPointSize(12) self.obs_time_spinBox.setFont(font) self.obs_time_spinBox.setMaximum(99999) self.obs_time_spinBox.setProperty(_fromUtf8("value"), 30) self.obs_time_spinBox.setObjectName(_fromUtf8("obs_time_spinBox")) self.remaining_time_lcdNumber = QtGui.QLCDNumber(self.frame_3) self.remaining_time_lcdNumber.setGeometry(QtCore.QRect(120, 210, 61, 21)) self.remaining_time_lcdNumber.setObjectName(_fromUtf8("remaining_time_lcdNumber")) self.frequency_tuneup_pushButton = QtGui.QPushButton(self.frame_3) self.frequency_tuneup_pushButton.setEnabled(False) self.frequency_tuneup_pushButton.setGeometry(QtCore.QRect(50, 400, 121, 51)) self.frequency_tuneup_pushButton.setObjectName(_fromUtf8("frequency_tuneup_pushButton")) self.file_name_lineEdit = QtGui.QLineEdit(self.frame_3) self.file_name_lineEdit.setEnabled(False) self.file_name_lineEdit.setGeometry(QtCore.QRect(82, 70, 211, 22)) self.file_name_lineEdit.setText(_fromUtf8("")) self.file_name_lineEdit.setObjectName(_fromUtf8("file_name_lineEdit")) self.target_lineEdit = QtGui.QLineEdit(self.frame_3) self.target_lineEdit.setGeometry(QtCore.QRect(100, 100, 171, 22)) self.target_lineEdit.setObjectName(_fromUtf8("target_lineEdit")) self.label_20 = QtGui.QLabel(self.frame_3) self.label_20.setGeometry(QtCore.QRect(10, 70, 91, 20)) font = QtGui.QFont() font.setPointSize(12) self.label_20.setFont(font) self.label_20.setObjectName(_fromUtf8("label_20")) self.label_21 = QtGui.QLabel(self.frame_3) self.label_21.setGeometry(QtCore.QRect(10, 100, 91, 17)) font = QtGui.QFont() font.setPointSize(12) self.label_21.setFont(font) self.label_21.setObjectName(_fromUtf8("label_21")) self.frame_6 = QtGui.QFrame(self.frame_3) self.frame_6.setGeometry(QtCore.QRect(210, 390, 141, 111)) self.frame_6.setFrameShape(QtGui.QFrame.StyledPanel) self.frame_6.setFrameShadow(QtGui.QFrame.Raised) self.frame_6.setObjectName(_fromUtf8("frame_6")) self.subtract_sky_radioButton = QtGui.QRadioButton(self.frame_6) self.subtract_sky_radioButton.setGeometry(QtCore.QRect(20, 0, 111, 31)) font = QtGui.QFont() font.setPointSize(12) self.subtract_sky_radioButton.setFont(font) self.subtract_sky_radioButton.setChecked(False) self.subtract_sky_radioButton.setAutoExclusive(False) self.subtract_sky_radioButton.setObjectName(_fromUtf8("subtract_sky_radioButton")) self.flat_field_radioButton = QtGui.QRadioButton(self.frame_6) self.flat_field_radioButton.setEnabled(True) self.flat_field_radioButton.setGeometry(QtCore.QRect(20, 20, 111, 31)) font = QtGui.QFont() font.setPointSize(12) self.flat_field_radioButton.setFont(font) self.flat_field_radioButton.setCheckable(True) self.flat_field_radioButton.setChecked(False) self.flat_field_radioButton.setAutoExclusive(False) self.flat_field_radioButton.setObjectName(_fromUtf8("flat_field_radioButton")) self.int_time_spinBox = QtGui.QSpinBox(self.frame_6) self.int_time_spinBox.setGeometry(QtCore.QRect(10, 50, 51, 22)) self.int_time_spinBox.setMaximum(9999) self.int_time_spinBox.setProperty(_fromUtf8("value"), 1) self.int_time_spinBox.setObjectName(_fromUtf8("int_time_spinBox")) self.label_19 = QtGui.QLabel(self.frame_6) self.label_19.setGeometry(QtCore.QRect(60, 50, 81, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_19.setFont(font) self.label_19.setObjectName(_fromUtf8("label_19")) self.options_radioButton = QtGui.QRadioButton(self.frame_6) self.options_radioButton.setGeometry(QtCore.QRect(10, 80, 131, 21)) font = QtGui.QFont() font.setPointSize(9) self.options_radioButton.setFont(font) self.options_radioButton.setObjectName(_fromUtf8("options_radioButton")) self.textEdit = QtGui.QTextEdit(self.frame_3) self.textEdit.setGeometry(QtCore.QRect(10, 260, 371, 101)) self.textEdit.setObjectName(_fromUtf8("textEdit")) self.label_27 = QtGui.QLabel(self.frame_3) self.label_27.setGeometry(QtCore.QRect(10, 240, 151, 17)) self.label_27.setObjectName(_fromUtf8("label_27")) self.update_description = QtGui.QPushButton(self.frame_3) self.update_description.setGeometry(QtCore.QRect(10, 360, 141, 31)) self.update_description.setObjectName(_fromUtf8("update_description")) self.continuous = QtGui.QCheckBox(self.frame_3) self.continuous.setGeometry(QtCore.QRect(20, 130, 181, 41)) self.continuous.setObjectName(_fromUtf8("continuous")) self.frame_4 = QtGui.QFrame(self.centralwidget) self.frame_4.setGeometry(QtCore.QRect(390, 180, 471, 491)) self.frame_4.setFrameShape(QtGui.QFrame.Box) self.frame_4.setFrameShadow(QtGui.QFrame.Sunken) self.frame_4.setObjectName(_fromUtf8("frame_4")) self.tv_image = QtGui.QGraphicsView(self.frame_4) self.tv_image.setGeometry(QtCore.QRect(10, 10, 444, 464)) self.tv_image.setMouseTracking(True) self.tv_image.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.tv_image.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) self.tv_image.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop) self.tv_image.setObjectName(_fromUtf8("tv_image")) self.frame_5 = QtGui.QFrame(self.centralwidget) self.frame_5.setGeometry(QtCore.QRect(0, 670, 861, 231)) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.frame_5.sizePolicy().hasHeightForWidth()) self.frame_5.setSizePolicy(sizePolicy) self.frame_5.setFrameShape(QtGui.QFrame.Box) self.frame_5.setFrameShadow(QtGui.QFrame.Sunken) self.frame_5.setObjectName(_fromUtf8("frame_5")) self.spectra_plot = MPL_Widget(self.frame_5) self.spectra_plot.setGeometry(QtCore.QRect(250, 0, 491, 241)) self.spectra_plot.setObjectName(_fromUtf8("spectra_plot")) self.pixel_number_label = QtGui.QLabel(self.frame_5) self.pixel_number_label.setGeometry(QtCore.QRect(40, 50, 131, 31)) self.pixel_number_label.setFrameShape(QtGui.QFrame.NoFrame) self.pixel_number_label.setAlignment(QtCore.Qt.AlignCenter) self.pixel_number_label.setObjectName(_fromUtf8("pixel_number_label")) self.pixelpath = QtGui.QLabel(self.frame_5) self.pixelpath.setGeometry(QtCore.QRect(40, 90, 141, 31)) font = QtGui.QFont() font.setPointSize(14) self.pixelpath.setFont(font) self.pixelpath.setFrameShape(QtGui.QFrame.Box) self.pixelpath.setText(_fromUtf8("")) self.pixelpath.setObjectName(_fromUtf8("pixelpath")) self.row = QtGui.QLabel(self.frame_5) self.row.setGeometry(QtCore.QRect(40, 150, 61, 31)) self.row.setFrameShape(QtGui.QFrame.Box) self.row.setText(_fromUtf8("")) self.row.setObjectName(_fromUtf8("row")) self.label_32 = QtGui.QLabel(self.frame_5) self.label_32.setGeometry(QtCore.QRect(40, 130, 61, 16)) self.label_32.setObjectName(_fromUtf8("label_32")) self.col = QtGui.QLabel(self.frame_5) self.col.setGeometry(QtCore.QRect(130, 150, 61, 31)) self.col.setFrameShape(QtGui.QFrame.Box) self.col.setText(_fromUtf8("")) self.col.setObjectName(_fromUtf8("col")) self.label_34 = QtGui.QLabel(self.frame_5) self.label_34.setGeometry(QtCore.QRect(130, 130, 61, 16)) self.label_34.setObjectName(_fromUtf8("label_34")) self.groupBox = QtGui.QGroupBox(self.centralwidget) self.groupBox.setGeometry(QtCore.QRect(870, 10, 171, 191)) self.groupBox.setObjectName(_fromUtf8("groupBox")) self.drag_select_radioButton = QtGui.QRadioButton(self.groupBox) self.drag_select_radioButton.setGeometry(QtCore.QRect(10, 30, 141, 31)) self.drag_select_radioButton.setAutoExclusive(False) self.drag_select_radioButton.setObjectName(_fromUtf8("drag_select_radioButton")) self.mode_buttonGroup = QtGui.QButtonGroup(arcons) self.mode_buttonGroup.setObjectName(_fromUtf8("mode_buttonGroup")) self.mode_buttonGroup.addButton(self.drag_select_radioButton) self.rect_select_radioButton = QtGui.QRadioButton(self.groupBox) self.rect_select_radioButton.setGeometry(QtCore.QRect(10, 70, 161, 21)) self.rect_select_radioButton.setChecked(True) self.rect_select_radioButton.setAutoExclusive(False) self.rect_select_radioButton.setObjectName(_fromUtf8("rect_select_radioButton")) self.mode_buttonGroup.addButton(self.rect_select_radioButton) self.rect_x_spinBox = QtGui.QSpinBox(self.groupBox) self.rect_x_spinBox.setGeometry(QtCore.QRect(30, 90, 57, 31)) self.rect_x_spinBox.setMinimum(1) self.rect_x_spinBox.setMaximum(32) self.rect_x_spinBox.setProperty(_fromUtf8("value"), 1) self.rect_x_spinBox.setObjectName(_fromUtf8("rect_x_spinBox")) self.rect_y_spinBox = QtGui.QSpinBox(self.groupBox) self.rect_y_spinBox.setGeometry(QtCore.QRect(110, 90, 57, 31)) self.rect_y_spinBox.setMinimum(1) self.rect_y_spinBox.setMaximum(32) self.rect_y_spinBox.setProperty(_fromUtf8("value"), 1) self.rect_y_spinBox.setObjectName(_fromUtf8("rect_y_spinBox")) self.label_23 = QtGui.QLabel(self.groupBox) self.label_23.setGeometry(QtCore.QRect(20, 90, 16, 31)) self.label_23.setObjectName(_fromUtf8("label_23")) self.label_24 = QtGui.QLabel(self.groupBox) self.label_24.setGeometry(QtCore.QRect(100, 90, 16, 31)) self.label_24.setObjectName(_fromUtf8("label_24")) self.circ_select_radioButton = QtGui.QRadioButton(self.groupBox) self.circ_select_radioButton.setGeometry(QtCore.QRect(10, 130, 151, 21)) self.circ_select_radioButton.setAutoExclusive(False) self.circ_select_radioButton.setObjectName(_fromUtf8("circ_select_radioButton")) self.mode_buttonGroup.addButton(self.circ_select_radioButton) self.circ_r_spinBox = QtGui.QSpinBox(self.groupBox) self.circ_r_spinBox.setGeometry(QtCore.QRect(40, 150, 57, 31)) self.circ_r_spinBox.setMinimum(0) self.circ_r_spinBox.setMaximum(16) self.circ_r_spinBox.setProperty(_fromUtf8("value"), 0) self.circ_r_spinBox.setObjectName(_fromUtf8("circ_r_spinBox")) self.label_25 = QtGui.QLabel(self.groupBox) self.label_25.setGeometry(QtCore.QRect(30, 150, 16, 31)) self.label_25.setObjectName(_fromUtf8("label_25")) self.choose_beamimage = QtGui.QPushButton(self.centralwidget) self.choose_beamimage.setGeometry(QtCore.QRect(1050, 770, 171, 41)) self.choose_beamimage.setObjectName(_fromUtf8("choose_beamimage")) self.choose_bindir = QtGui.QPushButton(self.centralwidget) self.choose_bindir.setGeometry(QtCore.QRect(1050, 740, 171, 41)) self.choose_bindir.setObjectName(_fromUtf8("choose_bindir")) self.brightpix = QtGui.QSpinBox(self.centralwidget) self.brightpix.setGeometry(QtCore.QRect(880, 250, 57, 25)) self.brightpix.setMaximum(2024) self.brightpix.setProperty(_fromUtf8("value"), 50) self.brightpix.setObjectName(_fromUtf8("brightpix")) self.label_22 = QtGui.QLabel(self.centralwidget) self.label_22.setGeometry(QtCore.QRect(940, 250, 121, 21)) self.label_22.setObjectName(_fromUtf8("label_22")) self.takesky = QtGui.QPushButton(self.centralwidget) self.takesky.setGeometry(QtCore.QRect(1050, 700, 171, 51)) self.takesky.setObjectName(_fromUtf8("takesky")) self.frame_7 = QtGui.QFrame(self.centralwidget) self.frame_7.setGeometry(QtCore.QRect(860, 390, 181, 511)) self.frame_7.setFrameShape(QtGui.QFrame.Box) self.frame_7.setFrameShadow(QtGui.QFrame.Sunken) self.frame_7.setObjectName(_fromUtf8("frame_7")) self.label_36 = QtGui.QLabel(self.frame_7) self.label_36.setGeometry(QtCore.QRect(30, 10, 141, 41)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label_36.setFont(font) self.label_36.setObjectName(_fromUtf8("label_36")) self.cal_time = QtGui.QSpinBox(self.frame_7) self.cal_time.setGeometry(QtCore.QRect(20, 180, 57, 25)) self.cal_time.setMaximum(10000) self.cal_time.setObjectName(_fromUtf8("cal_time")) self.cal_angle = QtGui.QDoubleSpinBox(self.frame_7) self.cal_angle.setGeometry(QtCore.QRect(90, 180, 62, 25)) self.cal_angle.setMaximum(360.0) self.cal_angle.setObjectName(_fromUtf8("cal_angle")) self.goto_angle = QtGui.QDoubleSpinBox(self.frame_7) self.goto_angle.setGeometry(QtCore.QRect(90, 240, 62, 25)) self.goto_angle.setObjectName(_fromUtf8("goto_angle")) self.label_37 = QtGui.QLabel(self.frame_7) self.label_37.setGeometry(QtCore.QRect(90, 220, 62, 17)) self.label_37.setObjectName(_fromUtf8("label_37")) self.label_38 = QtGui.QLabel(self.frame_7) self.label_38.setGeometry(QtCore.QRect(20, 160, 51, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_38.setFont(font) self.label_38.setObjectName(_fromUtf8("label_38")) self.label_39 = QtGui.QLabel(self.frame_7) self.label_39.setGeometry(QtCore.QRect(90, 160, 61, 21)) font = QtGui.QFont() font.setPointSize(10) self.label_39.setFont(font) self.label_39.setObjectName(_fromUtf8("label_39")) self.do_cal_button = QtGui.QPushButton(self.frame_7) self.do_cal_button.setGeometry(QtCore.QRect(10, 120, 151, 41)) self.do_cal_button.setObjectName(_fromUtf8("do_cal_button")) self.go_home_button = QtGui.QPushButton(self.frame_7) self.go_home_button.setGeometry(QtCore.QRect(10, 61, 151, 41)) self.go_home_button.setObjectName(_fromUtf8("go_home_button")) self.goto_button = QtGui.QPushButton(self.frame_7) self.goto_button.setGeometry(QtCore.QRect(10, 220, 71, 51)) self.goto_button.setObjectName(_fromUtf8("goto_button")) self.label_40 = QtGui.QLabel(self.frame_7) self.label_40.setGeometry(QtCore.QRect(20, 310, 151, 41)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label_40.setFont(font) self.label_40.setObjectName(_fromUtf8("label_40")) self.laser_toggle = QtGui.QCheckBox(self.frame_7) self.laser_toggle.setGeometry(QtCore.QRect(20, 270, 111, 41)) self.laser_toggle.setObjectName(_fromUtf8("laser_toggle")) self.laser_label = QtGui.QLabel(self.frame_7) self.laser_label.setGeometry(QtCore.QRect(120, 270, 51, 41)) self.laser_label.setObjectName(_fromUtf8("laser_label")) self.filter1 = QtGui.QRadioButton(self.frame_7) self.filter1.setGeometry(QtCore.QRect(20, 350, 102, 21)) self.filter1.setAutoExclusive(False) self.filter1.setObjectName(_fromUtf8("filter1")) self.filter_buttonGroup = QtGui.QButtonGroup(arcons) self.filter_buttonGroup.setObjectName(_fromUtf8("filter_buttonGroup")) self.filter_buttonGroup.addButton(self.filter1) self.filter2 = QtGui.QRadioButton(self.frame_7) self.filter2.setGeometry(QtCore.QRect(20, 370, 102, 21)) self.filter2.setAutoExclusive(False) self.filter2.setObjectName(_fromUtf8("filter2")) self.filter_buttonGroup.addButton(self.filter2) self.filter3 = QtGui.QRadioButton(self.frame_7) self.filter3.setGeometry(QtCore.QRect(20, 390, 102, 21)) self.filter3.setAutoExclusive(False) self.filter3.setObjectName(_fromUtf8("filter3")) self.filter_buttonGroup.addButton(self.filter3) self.filter4 = QtGui.QRadioButton(self.frame_7) self.filter4.setGeometry(QtCore.QRect(20, 410, 102, 21)) self.filter4.setAutoExclusive(False) self.filter4.setObjectName(_fromUtf8("filter4")) self.filter_buttonGroup.addButton(self.filter4) self.filter5 = QtGui.QRadioButton(self.frame_7) self.filter5.setGeometry(QtCore.QRect(20, 430, 102, 21)) self.filter5.setAutoExclusive(False) self.filter5.setObjectName(_fromUtf8("filter5")) self.filter_buttonGroup.addButton(self.filter5) self.filter6 = QtGui.QRadioButton(self.frame_7) self.filter6.setGeometry(QtCore.QRect(20, 450, 102, 21)) self.filter6.setChecked(True) self.filter6.setAutoExclusive(False) self.filter6.setObjectName(_fromUtf8("filter6")) self.filter_buttonGroup.addButton(self.filter6) self.label_26 = QtGui.QLabel(self.centralwidget) self.label_26.setGeometry(QtCore.QRect(1060, 810, 101, 17)) self.label_26.setObjectName(_fromUtf8("label_26")) self.label_17 = QtGui.QLabel(self.centralwidget) self.label_17.setGeometry(QtCore.QRect(1060, 830, 46, 13)) self.label_17.setObjectName(_fromUtf8("label_17")) self.label_18 = QtGui.QLabel(self.centralwidget) self.label_18.setGeometry(QtCore.QRect(1060, 863, 46, 20)) self.label_18.setObjectName(_fromUtf8("label_18")) self.RA_lineEdit = QtGui.QLineEdit(self.centralwidget) self.RA_lineEdit.setEnabled(False) self.RA_lineEdit.setGeometry(QtCore.QRect(1060, 840, 113, 20)) self.RA_lineEdit.setObjectName(_fromUtf8("RA_lineEdit")) self.Dec_lineEdit = QtGui.QLineEdit(self.centralwidget) self.Dec_lineEdit.setEnabled(False) self.Dec_lineEdit.setGeometry(QtCore.QRect(1060, 880, 113, 20)) self.Dec_lineEdit.setObjectName(_fromUtf8("Dec_lineEdit")) self.label_41 = QtGui.QLabel(self.centralwidget) self.label_41.setGeometry(QtCore.QRect(900, 210, 141, 41)) font = QtGui.QFont() font.setFamily(_fromUtf8("Arial")) font.setPointSize(18) font.setWeight(75) font.setBold(True) self.label_41.setFont(font) self.label_41.setObjectName(_fromUtf8("label_41")) self.vmin = QtGui.QSpinBox(self.centralwidget) self.vmin.setGeometry(QtCore.QRect(886, 350, 61, 25)) self.vmin.setMaximum(1000000) self.vmin.setProperty(_fromUtf8("value"), 100) self.vmin.setObjectName(_fromUtf8("vmin")) self.vmax = QtGui.QSpinBox(self.centralwidget) self.vmax.setGeometry(QtCore.QRect(960, 350, 57, 25)) self.vmax.setMaximum(1000000) self.vmax.setProperty(_fromUtf8("value"), 1500) self.vmax.setObjectName(_fromUtf8("vmax")) self.label_28 = QtGui.QLabel(self.centralwidget) self.label_28.setGeometry(QtCore.QRect(940, 280, 62, 17)) self.label_28.setObjectName(_fromUtf8("label_28")) self.contrast_mode = QtGui.QCheckBox(self.centralwidget) self.contrast_mode.setGeometry(QtCore.QRect(880, 300, 161, 31)) self.contrast_mode.setObjectName(_fromUtf8("contrast_mode")) self.label_29 = QtGui.QLabel(self.centralwidget) self.label_29.setGeometry(QtCore.QRect(890, 330, 62, 17)) font = QtGui.QFont() font.setPointSize(12) self.label_29.setFont(font) self.label_29.setObjectName(_fromUtf8("label_29")) self.label_30 = QtGui.QLabel(self.centralwidget) self.label_30.setGeometry(QtCore.QRect(960, 330, 62, 17)) font = QtGui.QFont() font.setPointSize(11) self.label_30.setFont(font) self.label_30.setObjectName(_fromUtf8("label_30")) arcons.setCentralWidget(self.centralwidget) self.statusbar = QtGui.QStatusBar(arcons) self.statusbar.setObjectName(_fromUtf8("statusbar")) arcons.setStatusBar(self.statusbar) self.menubar = QtGui.QMenuBar(arcons) self.menubar.setGeometry(QtCore.QRect(0, 0, 860, 22)) self.menubar.setObjectName(_fromUtf8("menubar")) arcons.setMenuBar(self.menubar) self.retranslateUi(arcons) QtCore.QObject.connect(self.close_pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), arcons.close) QtCore.QMetaObject.connectSlotsByName(arcons) def retranslateUi(self, arcons): arcons.setWindowTitle(QtGui.QApplication.translate("arcons", "ARCONS", None, QtGui.QApplication.UnicodeUTF8)) self.label.setText(QtGui.QApplication.translate("arcons", "ARCONS Status", None, QtGui.QApplication.UnicodeUTF8)) self.label_3.setText(QtGui.QApplication.translate("arcons", "Array Temp", None, QtGui.QApplication.UnicodeUTF8)) self.label_4.setText(QtGui.QApplication.translate("arcons", "Pulse Tube Temp", None, QtGui.QApplication.UnicodeUTF8)) self.open_shutter_radioButton.setText(QtGui.QApplication.translate("arcons", "Open Shutter", None, QtGui.QApplication.UnicodeUTF8)) self.cycle_fridge_radioButton.setText(QtGui.QApplication.translate("arcons", "Cycle Fridge", None, QtGui.QApplication.UnicodeUTF8)) self.label_2.setText(QtGui.QApplication.translate("arcons", "Telescope Status", None, QtGui.QApplication.UnicodeUTF8)) self.label_5.setText(QtGui.QApplication.translate("arcons", "RA", None, QtGui.QApplication.UnicodeUTF8)) self.label_6.setText(QtGui.QApplication.translate("arcons", "Dec", None, QtGui.QApplication.UnicodeUTF8)) self.label_7.setText(QtGui.QApplication.translate("arcons", "UTC", None, QtGui.QApplication.UnicodeUTF8)) self.label_8.setText(QtGui.QApplication.translate("arcons", "Altitude", None, QtGui.QApplication.UnicodeUTF8)) self.label_9.setText(QtGui.QApplication.translate("arcons", "Azimuth", None, QtGui.QApplication.UnicodeUTF8)) self.label_10.setText(QtGui.QApplication.translate("arcons", "Local Time", None, QtGui.QApplication.UnicodeUTF8)) self.label_11.setText(QtGui.QApplication.translate("arcons", "Airmass", None, QtGui.QApplication.UnicodeUTF8)) self.label_12.setText(QtGui.QApplication.translate("arcons", "LST", None, QtGui.QApplication.UnicodeUTF8)) self.status_label.setText(QtGui.QApplication.translate("arcons", "Status", None, QtGui.QApplication.UnicodeUTF8)) self.save_raw_checkBox.setText(QtGui.QApplication.translate("arcons", " Save Raw", None, QtGui.QApplication.UnicodeUTF8)) self.label_14.setText(QtGui.QApplication.translate("arcons", "Data Directory:", None, QtGui.QApplication.UnicodeUTF8)) self.label_16.setText(QtGui.QApplication.translate("arcons", "Remaining time:", None, QtGui.QApplication.UnicodeUTF8)) self.label_13.setText(QtGui.QApplication.translate("arcons", "SDR Control", None, QtGui.QApplication.UnicodeUTF8)) self.label_15.setText(QtGui.QApplication.translate("arcons", "Exposure Time:", None, QtGui.QApplication.UnicodeUTF8)) self.calibrate_data_checkBox.setText(QtGui.QApplication.translate("arcons", "Calibrate Data", None, QtGui.QApplication.UnicodeUTF8)) self.stop_observation_pushButton.setText(QtGui.QApplication.translate("arcons", "Stop Observation", None, QtGui.QApplication.UnicodeUTF8)) self.start_observation_pushButton.setText(QtGui.QApplication.translate("arcons", "Start Observation", None, QtGui.QApplication.UnicodeUTF8)) self.close_pushButton.setText(QtGui.QApplication.translate("arcons", "Close", None, QtGui.QApplication.UnicodeUTF8)) self.search_pushButton.setText(QtGui.QApplication.translate("arcons", "Browse", None, QtGui.QApplication.UnicodeUTF8)) self.frequency_tuneup_pushButton.setText(QtGui.QApplication.translate("arcons", "Frequency \n" "Tune-up", None, QtGui.QApplication.UnicodeUTF8)) self.target_lineEdit.setText(QtGui.QApplication.translate("arcons", "Target", None, QtGui.QApplication.UnicodeUTF8)) self.label_20.setText(QtGui.QApplication.translate("arcons", "File name:", None, QtGui.QApplication.UnicodeUTF8)) self.label_21.setText(QtGui.QApplication.translate("arcons", "Target name:", None, QtGui.QApplication.UnicodeUTF8)) self.subtract_sky_radioButton.setText(QtGui.QApplication.translate("arcons", "Subtract Sky", None, QtGui.QApplication.UnicodeUTF8)) self.flat_field_radioButton.setText(QtGui.QApplication.translate("arcons", "Flat Field", None, QtGui.QApplication.UnicodeUTF8)) self.label_19.setText(QtGui.QApplication.translate("arcons", " Integration (s)", None, QtGui.QApplication.UnicodeUTF8)) self.options_radioButton.setText(QtGui.QApplication.translate("arcons", "Expand Controls-->", None, QtGui.QApplication.UnicodeUTF8)) self.label_27.setText(QtGui.QApplication.translate("arcons", "Additional Header Info:", None, QtGui.QApplication.UnicodeUTF8)) self.update_description.setText(QtGui.QApplication.translate("arcons", "Update in Header", None, QtGui.QApplication.UnicodeUTF8)) self.continuous.setText(QtGui.QApplication.translate("arcons", " Continuous Observing", None, QtGui.QApplication.UnicodeUTF8)) self.pixel_number_label.setText(QtGui.QApplication.translate("arcons", "Displaying Plot for\n" " Pixel Number:", None, QtGui.QApplication.UnicodeUTF8)) self.label_32.setText(QtGui.QApplication.translate("arcons", "Row:", None, QtGui.QApplication.UnicodeUTF8)) self.label_34.setText(QtGui.QApplication.translate("arcons", "Col:", None, QtGui.QApplication.UnicodeUTF8)) self.groupBox.setTitle(QtGui.QApplication.translate("arcons", "Pixel Selection Mode", None, QtGui.QApplication.UnicodeUTF8)) self.drag_select_radioButton.setText(QtGui.QApplication.translate("arcons", "Click && Drag", None, QtGui.QApplication.UnicodeUTF8)) self.rect_select_radioButton.setText(QtGui.QApplication.translate("arcons", "Single Click Rectangle", None, QtGui.QApplication.UnicodeUTF8)) self.label_23.setText(QtGui.QApplication.translate("arcons", "x", None, QtGui.QApplication.UnicodeUTF8)) self.label_24.setText(QtGui.QApplication.translate("arcons", "y", None, QtGui.QApplication.UnicodeUTF8)) self.circ_select_radioButton.setText(QtGui.QApplication.translate("arcons", "Single Click Circle", None, QtGui.QApplication.UnicodeUTF8)) self.label_25.setText(QtGui.QApplication.translate("arcons", "r", None, QtGui.QApplication.UnicodeUTF8)) self.choose_beamimage.setText(QtGui.QApplication.translate("arcons", "Choose Beamimage", None, QtGui.QApplication.UnicodeUTF8)) self.choose_bindir.setText(QtGui.QApplication.translate("arcons", "Choose Bin Dir.", None, QtGui.QApplication.UnicodeUTF8)) self.label_22.setText(QtGui.QApplication.translate("arcons", "Saturated Pix", None, QtGui.QApplication.UnicodeUTF8)) self.takesky.setText(QtGui.QApplication.translate("arcons", "Take Sky Exposure", None, QtGui.QApplication.UnicodeUTF8)) self.label_36.setText(QtGui.QApplication.translate("arcons", "Calibration", None, QtGui.QApplication.UnicodeUTF8)) self.label_37.setText(QtGui.QApplication.translate("arcons", "Angle", None, QtGui.QApplication.UnicodeUTF8)) self.label_38.setText(QtGui.QApplication.translate("arcons", "Cal Time:", None, QtGui.QApplication.UnicodeUTF8)) self.label_39.setText(QtGui.QApplication.translate("arcons", "Cal Angle:", None, QtGui.QApplication.UnicodeUTF8)) self.do_cal_button.setText(QtGui.QApplication.translate("arcons", "Do Cal", None, QtGui.QApplication.UnicodeUTF8)) self.go_home_button.setText(QtGui.QApplication.translate("arcons", "Go Home", None, QtGui.QApplication.UnicodeUTF8)) self.goto_button.setText(QtGui.QApplication.translate("arcons", "GoTo", None, QtGui.QApplication.UnicodeUTF8)) self.label_40.setText(QtGui.QApplication.translate("arcons", "Filter Wheel", None, QtGui.QApplication.UnicodeUTF8)) self.laser_toggle.setText(QtGui.QApplication.translate("arcons", "Laser Box", None, QtGui.QApplication.UnicodeUTF8)) self.laser_label.setText(QtGui.QApplication.translate("arcons", "OFF", None, QtGui.QApplication.UnicodeUTF8)) self.filter1.setText(QtGui.QApplication.translate("arcons", "Filter 1", None, QtGui.QApplication.UnicodeUTF8)) self.filter2.setText(QtGui.QApplication.translate("arcons", "Filter 2", None, QtGui.QApplication.UnicodeUTF8)) self.filter3.setText(QtGui.QApplication.translate("arcons", "Filter 3", None, QtGui.QApplication.UnicodeUTF8)) self.filter4.setText(QtGui.QApplication.translate("arcons", "Filter 4", None, QtGui.QApplication.UnicodeUTF8)) self.filter5.setText(QtGui.QApplication.translate("arcons", "Filter 5", None, QtGui.QApplication.UnicodeUTF8)) self.filter6.setText(QtGui.QApplication.translate("arcons", "Filter 6", None, QtGui.QApplication.UnicodeUTF8)) self.label_26.setText(QtGui.QApplication.translate("arcons", "Testing Junk", None, QtGui.QApplication.UnicodeUTF8)) self.label_17.setText(QtGui.QApplication.translate("arcons", "RA", None, QtGui.QApplication.UnicodeUTF8)) self.label_18.setText(QtGui.QApplication.translate("arcons", "Dec", None, QtGui.QApplication.UnicodeUTF8)) self.RA_lineEdit.setText(QtGui.QApplication.translate("arcons", "0.0", None, QtGui.QApplication.UnicodeUTF8)) self.Dec_lineEdit.setText(QtGui.QApplication.translate("arcons", "0.0", None, QtGui.QApplication.UnicodeUTF8)) self.label_41.setText(QtGui.QApplication.translate("arcons", "Contrast", None, QtGui.QApplication.UnicodeUTF8)) self.label_28.setText(QtGui.QApplication.translate("arcons", "or", None, QtGui.QApplication.UnicodeUTF8)) self.contrast_mode.setText(QtGui.QApplication.translate("arcons", "Manually", None, QtGui.QApplication.UnicodeUTF8)) self.label_29.setText(QtGui.QApplication.translate("arcons", "Min", None, QtGui.QApplication.UnicodeUTF8)) self.label_30.setText(QtGui.QApplication.translate("arcons", "Max", None, QtGui.QApplication.UnicodeUTF8)) from mpl_pyqt4_widget import MPL_Widget
gpl-2.0
-8,649,052,045,101,224,000
60.173913
148
0.679148
false
3.570942
false
false
false
marcotinacci/interbank-lending-systemic-risk
Plot.py
1
2224
# -*- coding: utf-8 -*- """ Created on Thu May 21 14:37:15 2015 @author: Marco Tinacci """ import networkx as nx import matplotlib.pyplot as plt import numpy as np import Contagion def plotGraph(g,alpha,node_scale=1, seed=None, pos=None): # layout if pos == None: pos = nx.circular_layout(g) # pos = nx.random_layout(g) # draw nodes nx.draw_networkx_nodes(g,pos, nodelist = filter(lambda x:g.node[x]['BANKRUPT'] == 0,g.nodes()), # active -> green node_size = [node_scale*g.node[k]['ASSET'] for k in g.nodes()], node_color = 'g')#[node_scale*g.node[k]['ASSET'] for k in g.nodes()],cmap = plt.cm.Blues) nx.draw_networkx_nodes(g,pos, nodelist = filter(lambda x:g.node[x]['BANKRUPT'] == 2,g.nodes()), # failure -> yellow node_size = 10, node_color = 'y', node_shape = 's') nx.draw_networkx_nodes(g,pos, nodelist = filter(lambda x:g.node[x]['BANKRUPT'] == 1,g.nodes()), # default -> red node_size = 10, node_color = 'r', node_shape = 's') nx.draw_networkx_nodes(g,pos, nodelist = filter(lambda x:g.node[x]['BANKRUPT'] == 3,g.nodes()), # init -> blue node_size = 10, node_color = 'b', node_shape = 's') # draw edges if g.edges(): edges,weights = zip(*nx.get_edge_attributes(g,'weight').items()) nx.draw_networkx_edges(g, pos, edge_color = map(lambda x:x+20,weights), width=1, edge_cmap = plt.cm.Blues, arrows=False) # plot graph nx.write_gml(g,'output_graphs/n'+str(len(g))+'a'+str(alpha)+'s'+str(seed)+'.gml') plt.savefig('output_graphs/n'+str(len(g))+'a'+str(alpha)+'s'+str(seed)+'.png') plt.show() return pos def scatterDegreeSize(g): # fig = plt.figure() # ax2 = fig.add_subplot(111) # ax2.scatter(map(lambda x:g.degree(x), g.nodes()), # map(lambda y:y['ASSET'], g.node.values())) plt.scatter(map(lambda x:g.degree(x), g.nodes()), map(lambda y:y['ASSET'], g.node.values())) plt.xlabel('degree') plt.ylabel('asset') plt.show()
mit
7,015,643,916,021,559,000
33.75
97
0.543615
false
3.067586
false
false
false
sgolitsynskiy/sergey.cs.uni.edu
www/courses/cs1510/fall2017/sessions/092117.py
1
1456
# ask for int, report runnig total / version 1 num = 0 total = 0 while num != -1: total = total + num print("total so far = " + str(total)) num = int(input("next int: ")) # ask for int, report runnig total / version 2 total = 0 while True: num = int(input("next int: ")) if num == -1: break total += num print("total so far = " + str(total)) # check if number is prime num = int(input("int: ")) total = 0 for x in range(2, num): if num % x == 0: print(str(num) + " is NOT prime") break # we don't need to continue checking else: print(str(num) + " is PRIME") # check multiple numbers while True: num = int(input("int: ")) if num == -1: break if num < 3: print("int must be greater than 2") continue is_prime = True for i in range(2, num): if num % i == 0: is_prime = False break if is_prime: print(str(num) + " is PRIME") else: print(str(num) + " is NOT prime") # print out primes up to 100 for i in range(3, 101): is_prime = True for j in range(2, i-1): if i % j == 0: is_prime = False break if is_prime: print(str(i) + " is PRIME") else: print(str(i) + " is NOT prime") # print multilication table for i in range(1, 11): for j in range(1, 11): print("%3d" % (i * j), end=' ') print() print()
mit
-2,457,461,500,574,343,000
20.731343
50
0.517857
false
3.235556
false
false
false
callowayproject/django-articleappkit
articleappkit/admin.py
1
1229
from django.contrib import admin from django.db import models from django import forms ARTICLE_BASE_FIELDSET = ( None, { 'fields': ('title', 'slug', 'subtitle', 'summary', 'content', 'update_date', ) } ) ARTICLE_CONTENT_FIELDSET = ( None, { 'fields': ('title', 'subtitle', 'summary', 'content', ) } ) ARTCILE_METADATA_FIELDSET = ( 'Metadata', { 'fields': ('slug', 'create_date', 'update_date', 'modified_date', ), 'classes': ('collapse', ) } ) SINGLE_AUTHOR_FIELDSET = ( 'Author', { 'fields': ('author', ), } ) MULTI_AUTHOR_FIELDSET = ( 'Authors', { 'fields': ('authors', ), } ) NONSTAFF_AUTHOR_FIELDSET = ( 'Author', { 'fields': ('non_staff_author', ), } ) KEY_IMAGE_FIELDSET = ( 'Key Image', { 'fields': ('key_image', 'key_image_credit', ) } ) PUBLISHING_FIELDSET = ( 'Publishing', { 'fields': ('status', ('pub_date', 'pub_time'), ) } ) class ArticleBaseAdmin(admin.ModelAdmin): search_fields = ('title', 'subtitle', ) prepopulated_fields = {'slug': ('title', )} formfield_overrides = { models.CharField: {'widget': forms.TextInput(attrs={'size': '117'})}, }
apache-2.0
-8,154,206,064,095,571,000
20.189655
86
0.548413
false
3.242744
false
false
false
marcysweber/hamadryas-social-sim
agent.py
1
3855
""" AGENT: individual attributes """ import random class FemaleState: juvenile, cycling, pregnant, nursing0, nursing1 = range(5) class MaleState: juvsol, sol, fol, lea = range(4) class HamadryasRhp: rhp = { "1": {6: 30, 6.5: 48, 7: 61, 7.5: 65, 8.0: 68, 8.5: 71, 9.0: 73, 9.5: 75, 10.0: 76, 10.5: 77, 11: 78, 11.5: 79, 12: 79.5, 12.5: 80, 13: 80, 13.5: 80, 14: 79.5, 14.5: 79, 15: 78, 15.5: 77, 16: 76, 16.5: 75, 17: 73, 17.5: 71, 18: 68, 18.5: 65, 19: 61, 19.5: 48, 20: 30, 20.5: 0}, "2": {6: 15, 6.5: 24, 7: 30.5, 7.5: 32.5, 8.0: 34, 8.5: 35.5, 9.0: 36.5, 9.5: 37.5, 10.0: 38, 10.5: 38.5, 11: 39, 11.5: 39.5, 12: 39.75, 12.5: 40, 13: 40, 13.5: 40, 14: 39.75, 14.5: 39.5, 15: 39, 15.5: 38.5, 16: 38, 16.5: 37.5, 17: 36.5, 17.5: 35.5, 18: 34, 18.5: 32.5, 19: 30.5, 19.5: 24, 20: 15, 20.5: 0}, "3": {6: 2, 6.5: 4, 7: 6, 7.5: 9, 8.0: 12, 8.5: 16, 9.0: 20, 9.5: 25, 10.0: 30, 10.5: 40, 11: 50, 11.5: 60, 12: 75, 12.5: 90, 13: 96, 13.5: 100, 14: 96, 14.5: 90, 15: 75, 15.5: 60, 16: 50, 16.5: 40, 17: 30, 17.5: 22, 18: 16, 18.5: 11, 19: 8, 19.5: 4, 20: 2, 20.5: 0}, "4": {6: 1, 6.5: 2, 7: 3, 7.5: 4.5, 8.0: 6, 8.5: 8, 9.0: 10, 9.5: 12.5, 10.0: 15, 10.5: 20, 11: 25, 11.5: 30, 12: 37.5, 12.5: 45, 13: 48, 13.5: 50, 14: 48, 14.5: 45, 15: 37.5, 15.5: 30, 16: 25, 16.5: 20, 17: 15, 17.5: 11, 18: 8, 18.5: 5.5, 19: 4, 19.5: 2, 20: 1, 20.5: 0} } class AgentClass(object): def __init__(self, sex, mother, sire): # defines an agent.py of any species self.index = 0 self.age = 0.0 self.sex = sex self.femaleState = None self.last_birth = None self.sire_of_fetus = None self.parents = [mother, sire] self.offspring = [] self.dispersed = False # set to True if born during sim self.born = False class HamadryasAgent(AgentClass): # defines the attributes that a hamadryas baboon must have def __init__(self, sex, mother, sire, bandID): self.taxon = "hamadryas" self.clanID = None self.bandID = bandID self.OMUID = None self.maleState = None self.females = [] self.malefols = [] self.femaleState = None self.maleState = None super(HamadryasAgent, self).__init__(sex, mother, sire) def get_rhp(self): score = HamadryasRhp.rhp[self.rhp][self.age] return score class MakeAgents: @staticmethod def makenewhamadryas(bandID, sex, mother, sire, population, sim, age=0.0): newagent = HamadryasAgent(sex, mother, sire, bandID) newagent.age = age if newagent.sex == 'm': newagent.rhp = MakeAgents.assignrhpcurve(newagent) else: newagent.femaleState = FemaleState.juvenile newagent.index = MakeAgents.get_unique_index(population) # parents get credit if sire and sire in population.dict.keys(): population.dict[sire].offspring.append(newagent.index) population.dict[sire].last_birth = population.halfyear if mother and mother in population.dict.keys(): population.dict[mother].offspring.append(newagent.index) return newagent @staticmethod def assignrhpcurve(agent): score = None if agent.taxon == "hamadryas": score = random.choice(["1", "2", "3", "4"]) elif agent.taxon == "savannah": score = random.choice(["1", "2", "3", "4", "5"]) return score @staticmethod def get_unique_index(population): newindex = population.topeverindex + 1 population.topeverindex = newindex return newindex
mit
-7,708,074,396,862,821,000
33.419643
101
0.530739
false
2.664133
false
false
false
deepmind/open_spiel
open_spiel/python/tests/tensor_game_utils_test.py
1
2737
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests the C++ matrix game utility methods exposed to Python.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import absltest import pyspiel class TensorGamesUtilsTest(absltest.TestCase): def test_extensive_to_tensor_game_type(self): game = pyspiel.extensive_to_tensor_game( pyspiel.load_game( "turn_based_simultaneous_game(game=blotto(players=3,coins=5))")) game_type = game.get_type() self.assertEqual(game_type.dynamics, pyspiel.GameType.Dynamics.SIMULTANEOUS) self.assertEqual(game_type.chance_mode, pyspiel.GameType.ChanceMode.DETERMINISTIC) self.assertEqual(game_type.information, pyspiel.GameType.Information.ONE_SHOT) self.assertEqual(game_type.utility, pyspiel.GameType.Utility.ZERO_SUM) def test_extensive_to_tensor_game_payoff_tensor(self): turn_based_game = pyspiel.load_game_as_turn_based( "blotto(players=3,coins=5)") tensor_game1 = pyspiel.extensive_to_tensor_game(turn_based_game) tensor_game2 = pyspiel.load_tensor_game("blotto(players=3,coins=5)") self.assertEqual(tensor_game1.shape(), tensor_game2.shape()) s0 = turn_based_game.new_initial_state() self.assertEqual(tensor_game1.shape()[0], s0.num_distinct_actions()) for a0 in range(s0.num_distinct_actions()): s1 = s0.child(a0) self.assertEqual(tensor_game1.shape()[1], s1.num_distinct_actions()) for a1 in range(s1.num_distinct_actions()): s2 = s1.child(a1) self.assertEqual(tensor_game1.shape()[2], s2.num_distinct_actions()) for a2 in range(s2.num_distinct_actions()): s3 = s2.child(a2) self.assertTrue(s3.is_terminal()) for player in range(3): self.assertEqual( s3.returns()[player], tensor_game1.player_utility(player, (a0, a1, a2))) self.assertEqual( s3.returns()[player], tensor_game2.player_utility(player, (a0, a1, a2))) if __name__ == "__main__": absltest.main()
apache-2.0
5,639,222,066,604,762,000
39.850746
80
0.680672
false
3.362408
true
false
false
jurcicek/extended-hidden-vector-state-parser
svc/ui/mlf.py
1
3238
# SVC library - usefull Python routines and classes # Copyright (C) 2006-2008 Jan Svec, honza.svec@gmail.com # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import codecs from svc.egg import PythonEgg from svc.utils import issequence from svc.ui.treedist import OrderedTree, ROOT_CONCEPT class _ConceptLine(list): def __init__(self, separator): super(_ConceptLine, self).__init__() self._separator = separator def addSeparator(self): self.append(self._separator) def removeSeparator(self): while self and self[-1] == self._separator: del self[-1] def flushLine(self): self.removeSeparator() ret = ''.join(self) del self[:] return OrderedTree.fromString(ret, label=ROOT_CONCEPT) class MLF(PythonEgg, dict): MLF_HEADER = '#!MLF!#' @classmethod def mapFromMLF(cls, contents): return contents def mapToMLF(self, value): return value @classmethod def readFromFile(cls, fn): fr = codecs.open(fn, 'r', 'utf-8') try: return cls.fromLines(fr) finally: fr.close() @classmethod def fromLines(cls, lines): forest = cls() it = iter(lines) header = it.next().strip() if header != cls.MLF_HEADER: raise ValueError("Not a MLF file, bad header") filename_line = True for line in it: if filename_line: filename_line = False filename = line.strip()[1:-1] filename = os.path.splitext(filename)[0] content_lines = [] continue elif line[0] == '.': filename_line = True forest[filename] = cls.mapFromMLF(content_lines) del content_lines continue else: content_lines.append(line) return forest def writeToFile(self, fn): fw = codecs.open(fn, 'w', 'utf-8') try: for line in self.toLines(): fw.write(line) finally: fw.close() def toLines(self): yield self.MLF_HEADER + '\n' for key in sorted(self): yield '"%s"\n' % key value = self[key] for line in self.mapToMLF(value): yield line yield '.\n' class ConceptMLF(MLF): @classmethod def mapFromMLF(cls, contents): str = ' '.join(s.strip() for s in contents) return OrderedTree.fromString(str, label=ROOT_CONCEPT) def mapToMLF(self, value): return str(value)
gpl-2.0
5,940,858,042,514,683,000
28.436364
71
0.590179
false
3.992602
false
false
false
rocky/python2-trepan
trepan/processor/command/info_subcmd/signals.py
2
2200
# -*- coding: utf-8 -*- # Copyright (C) 2009, 2015 Rocky Bernstein <rocky@gnu.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Our local modules from trepan.processor.command import base_subcmd as Mbase_subcmd from trepan.lib import complete as Mcomplete import columnize class InfoSignals(Mbase_subcmd.DebuggerSubcommand): '''**info signals** [*signal-name*] **info signals** \* Show information about how debugger treats signals to the program. Here are the boolean actions we can take: * Stop: enter the debugger when the signal is sent to the debugged program * Print: print that the signal was received * Stack: show a call stack * Pass: pass the signal onto the program If *signal-name* is not given, we the above show information for all signals. If '*' is given we just give a list of signals. ''' min_abbrev = 3 # info sig need_stack = False short_help = 'What debugger does when program gets various signals' def complete(self, prefix): completions = sorted(['*'] + self.debugger.sigmgr.siglist) return Mcomplete.complete_token(completions, prefix) def run(self, args): if len(args) > 0 and args[0] == '*' : self.msg(self.columnize_commands(self.debugger.sigmgr.siglist)) else: self.debugger.sigmgr.info_signal(['signal'] + args) return pass if __name__ == '__main__': from trepan.processor.command import mock, info as Minfo d, cp = mock.dbg_setup() i = Minfo.InfoCommand(cp) sub = InfoSignals(i) # sub.run([]) # sub.run(['*']) pass
gpl-3.0
-854,031,701,820,142,800
32.333333
75
0.689091
false
3.735144
false
false
false
cmac4603/Home-Utilities-App
archive/HomeUtilitiesApp v0.1.2.py
1
1090
from kivy.app import App from kivy.uix.button import Button from kivy.uix.gridlayout import GridLayout class HomeButtons(GridLayout): def __init__(self, **kwargs): super(HomeButtons, self).__init__(**kwargs) self.cols = 2 self.rows = 2 btn1 = Button(text='BElGen Live Graph') btn2 = Button(text='Lights') btn3 = Button(text='Unused') btn4 = Button(text='Unused') self.add_widget(btn1) btn1.bind(on_press = callbackgraph) self.add_widget(btn2) btn2.bind(on_press = callbacklights) self.add_widget(btn3) btn3.bind(on_press = callbackunused) self.add_widget(btn4) btn4.bind(on_press = callbackunused) def callbackgraph(instance): print('The button <%s> is being pressed' %instance.text) def callbacklights(instance): print('The button <%s> is being pressed' %instance.text) def callbackunused(instance): print('The button is <%s>' %instance.text) class HomeUtilitiesApp(App): def build(self): return HomeButtons() HomeUtilitiesApp().run()
gpl-2.0
-4,633,940,423,851,666,000
29.277778
60
0.647706
false
3.550489
false
false
false
kato-masahiro/RaspberryPiMouse-on-episode-task
particle_filter_on_episode.py
1
17821
#!/usr/bin/env python #coding:utf-8 """ パーティクルフィルタでエピソード的タスクを学習させる 全体的な流れ: while(報酬==0): センサ値をN回取得して平均を取る latest_sen = 平均 パーティクルを尤度関数を用いて再配置する(報酬は前回得たものを用いる) パーティクルの投票で行動を決定する 前回の報酬、観測、行動をepisode_setに追加 while(行動終了の条件を満たさない): 一瞬だけ行動する N回センサ値を取得して平均を取る 報酬を得る 報酬をlaetst_episodeに追加 """ import rospy import os import random import math import sys from raspimouse_ros.msg import LightSensorValues from gazebo_msgs.msg import ModelStates from geometry_msgs.msg import Twist args = sys.argv try: reward_arm = args[1] except IndexError: print "実行時に引数として'right'または'left'を指定してください" sys.exit() # 変更可能なパラメータ p = 1000 # パーティクルの数 lmd = 17 #retrospective_resettingの時、いくつのイベントを残すか N = 10 # 何回分のセンサ値の平均を取って利用するか fw_threshold = 5000 # 前進をやめるかどうかの判定に使われる閾値(rf+rs+ls+lf) alpha_threshold = 0.3 # retrospective_resettingを行うかどうかの閾値。0.0だと行わない。1.0だと常に行う。 greedy_particles = 0.8 # パーティクルが尤度関数に基づいてリサンプリングされる確率 not_fit_reduce = 0.4 # つじつまが合わないエピソードの尤度に掛けて削減する。0.0から1.0 sensitivity = 600 # センサ値の差に応じて尤度を減少させるための値.\ # その他のグローバル変数 x = 0.0; y = 0.0 # ロボットの座標 rf = 0; rs = 0; ls = 0; lf = 0 # センサ値 sensors_val = [0,0,0,0] # 平均を取るためにrf,rs,ls,lfの和を入れるための変数 counter = 0 # sensors_callbackを何回実行したか T = 1 # 最新の時間ステップ(いままで経験したエピソードの数+1) T0 = 1 action = "" # 行動."f","r","l","s"の3種類(前進、右旋回、左旋回,待機) moving_flag = False # ロボットが行動中かどうかのフラグ got_average_flag = False # センサ値が平均値をとっているかどうかのフラグ end_flag = False # 非ゼロ報酬を得たらこのフラグが立って、すべての処理を終わらせる。 particle = range(p) # パーティクルの位置、重みが入るリスト。パーティクルの重みの合計は1 for i in particle: particle[i] = [0, 1.0/p] latest_episode = [0.0 ,0, 0, 0, 0,""] # 最新のエピソード。報酬値、センサ値、行動。 episode_set = [] # 過去のエピソードの集合。報酬値、センサ値、行動 alpha = 0.0 ########################################################### # particle,episode_setについてファイルから読み込む # ########################################################### if os.path.exists("./particle.txt"): f = open("particle.txt","r") particle = f.read() particle = eval(particle) f.close() print "ファイル:particle.txtを読み込みました" if os.path.exists("./episode_set.txt"): f = open("episode_set.txt","r") episode_set = f.read() episode_set = eval(episode_set) f.close T = len(episode_set) + 1 T0 = len(episode_set) + 1 print "ファイル:episode_set.txtを読み込みました" else: f = open("result.txt","a") f.write("---\n") f.close() def sensors_ave(): """センサの平均値を求める""" global rf;global rs;global ls;global lf global sensors_val global got_average_flag sensors_val[0] += rf sensors_val[1] += rs sensors_val[2] += ls sensors_val[3] += lf if counter%N == 0: got_average_flag = True for i in range(4): sensors_val[i] /= N else: got_average_flag = False def reward_check(x,y): """ ロボットの位置に基づき、正解・不正解・行動の続行等を決定する """ global end_flag global latest_episode if reward_arm == "right": if(x - 0.36) ** 2 + (y + 0.15) ** 2 <= 0.005: print "###_reward_check_:ロボットは正解に到達" f = open("result.txt","a") f.write(reward_arm) f.write(":O\n") f.close() latest_episode[0] = 1.0 end_flag = True elif(x - 0.36) ** 2 + (y - 0.15) ** 2 <= 0.005: print "###_reward_check_:ロボットは不正解に到達" f = open("result.txt","a") f.write(reward_arm) f.write(":X\n") f.close() latest_episode[0] = -1.0 end_flag = True else: latest_episode[0] = 0.0 end_flag = False elif reward_arm == "left": if(x - 0.36) ** 2 + (y - 0.15) ** 2 <= 0.005: print "###_reward_check_:ロボットは正解に到達" f = open("result.txt","a") f.write(reward_arm) f.write(":O\n") f.close() latest_episode[0] = 1.0 end_flag = True elif(x - 0.36) ** 2 + (y + 0.15) ** 2 <= 0.005: print "###_reward_check_:ロボットは不正解に到達" f = open("result.txt","a") f.write(reward_arm) f.write(":X\n") f.close() latest_episode[0] = -1.0 end_flag = True else: latest_episode[0] = 0.0 end_flag = False def sensor_update(particle): """ パーティクルの尤度を更新し、αも求める 引数:particle 戻り値:particle,alpha 処理: すべてのパーティクルの位置を一つづつスライドさせる 各パーティクルの重みを求める αを求める αを用いてパーティクルの重みを正規化する """ alpha = 0.0 if T != 1: for i in range(p): if episode_set[ particle[i][0] ][0] == latest_episode[0] and particle[i][1] != "X": l1 = math.fabs(latest_episode[1] - episode_set[ particle[i][0] ][1]) l2 = math.fabs(latest_episode[2] - episode_set[ particle[i][0] ][2]) l3 = math.fabs(latest_episode[3] - episode_set[ particle[i][0] ][3]) l4 = math.fabs(latest_episode[4] - episode_set[ particle[i][0] ][4]) particle[i][1] = 0.5 ** ((l1+l2+l3+l4) / sensitivity) else: l1 = math.fabs(latest_episode[1] - episode_set[ particle[i][0] ][1]) l2 = math.fabs(latest_episode[2] - episode_set[ particle[i][0] ][2]) l3 = math.fabs(latest_episode[3] - episode_set[ particle[i][0] ][3]) l4 = math.fabs(latest_episode[4] - episode_set[ particle[i][0] ][4]) particle[i][1] = (0.5 ** ((l1+l2+l3+l4) / sensitivity)) * not_fit_reduce elif T == 1: for i in range(p): particle[i][1] = 1.0/p #alphaも求める for i in range(p): alpha += particle[i][1] #alphaで正規化 if math.fabs(alpha) >= 0.0001: for i in range(p): particle[i][1] /= alpha else: for i in range(p): particle[i][1] = 1.0/p alpha /= p print "alpha:",alpha return particle,alpha def retrospective_resetting(): """ retrospective_resettingを行う 処理: パーティクルを、直近のlmd個のイベント中に均等に配置する それぞれ尤度を求め、リサンプリングする リセッティングの結果尤度が減少した場合には取り消す。 """ global episode_set global particle #print "###_リセッティングをします_###" #print "###_resetting_###:エピソードの数:",len(episode_set),"より、",len(episode_set)-lmd,"から",len(episode_set)-1,"までの中から選ぶ" for i in range(p): particle[i][0] = random.randint(len(episode_set)-lmd,len(episode_set)-1) if particle[i][0] < 0: particle[i][0] = random.randint(0,len(episode_set)-1) particle[i][1] = 1.0/p def motion_update(particle): """ 尤度に基づいてパーティクルをリサンプリングする関数 リサンプリング後のパーティクルの分布も表示する 引数:particle 戻り値:particle """ if T != 1: #重みに基づいてリサンプリング likelihood = [0.0 for i in range(len(episode_set))] for i in range(len(likelihood)):#パーティクルの尤度からエピソードの尤度(likelihood)を求める for ii in range (p): if particle[ii][0] == i: likelihood[i]+= particle[ii][1] #likelihoodの分布に基づき8割のパーティクルを配置する for i in range(int(p * greedy_particles)): seed = random.randint(1,100) for ii in range(len(likelihood)): seed -= likelihood[ii] * 100 if seed <= 0: particle[i][0] = ii break #likelihoodとは無関係に残りのパーティクルを配置する for i in range(int(p * greedy_particles),p): seed = random.randint(0,len(episode_set)-1) particle[i][0] = seed #パーティクルがどこにいくつあるか表示する particle_numbers = [0 for i in range(len(episode_set))] for i in range(p): particle_numbers[particle[i][0]] += 1 """ print "===パーティクルの分布===" cnt = 0 for i in range(len(particle_numbers)): print particle_numbers[i],"\t", cnt += 1 if cnt % 4 == 0: print " " print "T" """ elif T == 0: for i in range(p): particle[i][0] = 0 return particle def decision_making(particle,latest_episode): """ 投票によって行動を決定する 引数:particle,latest_episode 戻り値:action """ if T == 1:#まだどんなエピソードも経験していない 前進させる return "f" else: #各パーティクルが投票で決める vote = range(p)#各パーティクルが自分の所属しているエピソードに対して持つ評価 for i in range(p): vote[i] = 0.0 for i in range (p): distance = 0 #パーティクルがいるエピソードとその直後の非ゼロ報酬が得られたエピソードとの距離 non_zero_reward = 0.0 for l in range(len(episode_set) - particle[i][0] - 1): distance += 1 if episode_set[ particle[i][0] + distance ][0] != 0.0: non_zero_reward = episode_set[particle[i][0] + distance][0] break if non_zero_reward != 0: vote[i] = non_zero_reward / distance else: vote[i] = 0.0 print "センサ値:",latest_episode[1:5] #voteに基づく行動決定。voteの合計がゼロやマイナスになる可能性がある点に注意 got = [0.0 ,0.0 ,0.0 ,0.0] #得票数が入るリスト f,r,l,sの順番 for i in range(p): if episode_set[particle[i][0]][5] == "f": got[0] += vote[i] elif episode_set[particle[i][0]][5] == "r": got[1] += vote[i] elif episode_set[particle[i][0]][5] == "l": got[2] += vote[i] #print "###_decision_making_###:得票数 =",got #前に壁がなければ投票にかかわらず前進させる if sum(latest_episode[1:5]) < fw_threshold: return "f" elif got[1] == got[2]: return random.choice("rl") elif got[1] > got[2]: return "r" elif got[1] < got[2]: return "l" else: print("###_decision_making_###:error") def stop(action): """ 閾値によってmoving_flagをオンオフする """ global moving_flag if action == "f": if sum(sensors_val) >= fw_threshold: moving_flag = False else: moving_flag = True else: if sum(sensors_val) < fw_threshold: moving_flag = False else: moving_flag = True def slide(particle): """ すべてのパーティクルの位置を一つ+1する 引数:particle 戻り値:particle """ for i in range(p): particle[i][0] += 1 if episode_set[ particle[i][0] -1 ][5] != latest_episode[5]: particle[i][1] = "X" #あとで(sensor_updateのとき)ゼロになる return particle def sensors_callback(message): """ センサ値をsubscribeするコールバック関数 main """ vel = Twist() vel.linear.x = 0.0 vel.angular.z = 0.0 global rf;global rs;global ls;global lf global sensors_val global counter global moving_flag global T global action global latest_episode global episode_set global particle counter += 1 # センサデータを読み込む rf = message.right_forward rs = message.right_side ls = message.left_side lf = message.left_forward sensors_ave() # N回分のセンサ値の平均を取る if got_average_flag == True and moving_flag == False and end_flag == False: print "=========================###_sensors_callback_###============================" for i in range(4): latest_episode[i+1] = sensors_val[i] sensors_val[i] = 0 reward_check(x,y) if end_flag == True: #センサ値、行動(stay)を書き込んで色々保存して終了 print T/4," 回目のトライアルが終了しました" print "===================================" if (T - T0) != 3: print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" print "### エピソードの時間ステップが範囲外だったので取り消します ###" print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" f = open("result.txt","a") f.write("↑:MISS!!\n") f.close sys.exit() particle,alpha = sensor_update(particle) if alpha < alpha_threshold and T != 1: p_alpha = alpha p_particle = particle retrospective_resetting() particle,alpha = sensor_update(particle) if p_alpha > alpha: #print "リセットしないほうがマシだった" particle = p_particle motion_update(particle) action = "s" latest_episode[5] = "s" #print "###_sensors_callback_###:latest_episode=",latest_episode episode_set.append(list(latest_episode)) particle = slide(particle) #episode_set,particle をファイルに書き込んで終了 f = open("episode_set.txt","w") f.write(str(episode_set)) f.close() f = open("particle.txt","w") f.write(str(particle)) f.close() sys.exit() if T % 4 != 2: #ここでズルしている particle,alpha = sensor_update(particle) if alpha < alpha_threshold and T != 1: p_alpha = alpha p_particle = particle retrospective_resetting() particle,alpha = sensor_update(particle) if p_alpha > alpha: #print "リセットしないほうがましだった" particle = p_particle motion_update(particle) #尤度に基づきパーティクルの分布を更新する else: print "2*!" action = decision_making(particle,latest_episode) #パーティクルの投票に基づき行動を決定する latest_episode[5] = action #最新のepisode_setにactionを追加 #print "###_sensors_callback_###:latest_episode=",latest_episode episode_set.append(list(latest_episode))#一連のエピソードをエピソード集合に追加 if T > 1: particle = slide(particle) T += 1 moving_flag = True elif got_average_flag == True and moving_flag == True: if action == "f": vel.linear.x = 0.2 elif action == "r": vel.angular.z = -2.0 elif action == "l": vel.angular.z = 2.0 stop(action) pub.publish(vel) def position_callback(message): """ロボットの現在位置をsubscribeする関数""" global x;global y x = message.pose[-1].position.x y = message.pose[-1].position.y rospy.init_node("particle_filter_on_episode") pub = rospy.Publisher("/raspimouse/diff_drive_controller/cmd_vel",Twist,queue_size = 10) sub1 = rospy.Subscriber("/raspimouse/lightsensors",LightSensorValues,sensors_callback) sub2 = rospy.Subscriber("/gazebo/model_states",ModelStates,position_callback) rospy.spin()
mit
-5,122,817,194,274,757,000
31.307692
118
0.517619
false
2.363344
true
false
false
jiafengwu0301/App_BackEnd
api/views.py
1
1474
from rest_framework import generics, permissions, views, response,status from .models import Account from .serializers import AccountCreateSerializer, AccountSerializer, AuthenticateSerializer, \ UpdateAccountSerializer, AccountRetrieveSerializer # Create your views here. class AccountCreateView(generics.CreateAPIView): queryset = Account.objects.all() serializer_class = AccountCreateSerializer permission_classes = [permissions.AllowAny] class AccountListView(generics.ListAPIView): queryset = Account.objects.all() serializer_class = AccountSerializer permission_classes = [permissions.IsAuthenticated] class AccountRetrieveView(generics.RetrieveAPIView): queryset = Account.objects.all() serializer_class = AccountRetrieveSerializer class UpdateAccountView(generics.UpdateAPIView): queryset = Account.objects.all() serializer_class = UpdateAccountSerializer # permission_classes = [permissions.IsAuthenticated] class AccountAuthenticationView(views.APIView): queryset = Account.objects.all() serializer_class = AuthenticateSerializer def post(self, request): data = request.data serializer = AuthenticateSerializer(data=data) if serializer.is_valid(raise_exception=True): new_date = serializer.data return response.Response(new_date,status=status.HTTP_200_OK) return response.Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
apache-2.0
9,186,457,815,436,140,000
33.302326
94
0.765265
false
4.563467
false
false
false
RNAer/qiita
qiita_pet/handlers/preprocessing_handlers.py
2
1546
from tornado.web import authenticated from .base_handlers import BaseHandler from qiita_ware.dispatchable import preprocessor from qiita_db.data import RawData from qiita_db.parameters import (PreprocessedIlluminaParams, Preprocessed454Params) from qiita_db.metadata_template import PrepTemplate from qiita_ware.context import submit class PreprocessHandler(BaseHandler): @authenticated def post(self): study_id = int(self.get_argument('study_id')) prep_template_id = int(self.get_argument('prep_template_id')) raw_data = RawData(PrepTemplate(prep_template_id).raw_data) param_id = int(self.get_argument('preprocessing_parameters_id')) # Get the preprocessing parameters if raw_data.filetype == 'FASTQ': param_constructor = PreprocessedIlluminaParams elif raw_data.filetype in ('FASTA', 'SFF'): param_constructor = Preprocessed454Params else: raise ValueError('Unknown filetype') job_id = submit(self.current_user.id, preprocessor, study_id, prep_template_id, param_id, param_constructor) self.render('compute_wait.html', job_id=job_id, title='Preprocessing', completion_redirect='/study/description/%d?top_tab=' 'raw_data_tab&sub_tab=%s&prep_tab=%s' % (study_id, raw_data.id, prep_template_id))
bsd-3-clause
280,697,794,406,818,800
41.944444
77
0.611902
false
4.235616
false
false
false
sternshus/arelle2.7
svr-2.7/arelle/ViewFileRenderedGrid.py
1
58703
u''' Created on Sep 13, 2011 @author: Mark V Systems Limited (c) Copyright 2011 Mark V Systems Limited, All rights reserved. ''' import os from arelle import ViewFile from lxml import etree from arelle.RenderingResolver import resolveAxesStructure, RENDER_UNITS_PER_CHAR from arelle.ViewFile import HTML, XML from arelle.ModelObject import ModelObject from arelle.ModelFormulaObject import Aspect, aspectModels, aspectRuleAspects, aspectModelAspect, aspectStr from arelle.FormulaEvaluator import aspectMatches from arelle.FunctionXs import xsString from arelle.ModelInstanceObject import ModelDimensionValue from arelle.ModelValue import QName from arelle.ModelXbrl import DEFAULT from arelle.ModelRenderingObject import (ModelClosedDefinitionNode, ModelEuAxisCoord, ModelFilterDefinitionNode, OPEN_ASPECT_ENTRY_SURROGATE) from arelle.PrototypeInstanceObject import FactPrototype # change tableModel for namespace needed for consistency suite u''' from arelle.XbrlConst import (tableModelMMDD as tableModelNamespace, tableModelMMDDQName as tableModelQName) ''' from arelle import XbrlConst from arelle.XmlUtil import innerTextList, child, elementFragmentIdentifier, addQnameValue from collections import defaultdict emptySet = set() emptyList = [] def viewRenderedGrid(modelXbrl, outfile, lang=None, viewTblELR=None, sourceView=None, diffToFile=False, cssExtras=u""): modelXbrl.modelManager.showStatus(_(u"saving rendering")) view = ViewRenderedGrid(modelXbrl, outfile, lang, cssExtras) if sourceView is not None: viewTblELR = sourceView.tblELR view.ignoreDimValidity.set(sourceView.ignoreDimValidity.get()) view.xAxisChildrenFirst.set(sourceView.xAxisChildrenFirst.get()) view.yAxisChildrenFirst.set(sourceView.yAxisChildrenFirst.get()) view.view(viewTblELR) if diffToFile: from arelle.ValidateInfoset import validateRenderingInfoset validateRenderingInfoset(modelXbrl, outfile, view.xmlDoc) view.close(noWrite=True) else: view.close() modelXbrl.modelManager.showStatus(_(u"rendering saved to {0}").format(outfile), clearAfter=5000) class ViewRenderedGrid(ViewFile.View): def __init__(self, modelXbrl, outfile, lang, cssExtras): # find table model namespace based on table namespace self.tableModelNamespace = XbrlConst.tableModel for xsdNs in modelXbrl.namespaceDocs.keys(): if xsdNs in (XbrlConst.tableMMDD, XbrlConst.table, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011): self.tableModelNamespace = xsdNs + u"/model" break super(ViewRenderedGrid, self).__init__(modelXbrl, outfile, u'tableModel xmlns="{0}"'.format(self.tableModelNamespace), lang, style=u"rendering", cssExtras=cssExtras) class nonTkBooleanVar(): def __init__(self, value=True): self.value = value def set(self, value): self.value = value def get(self): return self.value # context menu boolean vars (non-tkinter boolean self.ignoreDimValidity = nonTkBooleanVar(value=True) self.xAxisChildrenFirst = nonTkBooleanVar(value=True) self.yAxisChildrenFirst = nonTkBooleanVar(value=False) def tableModelQName(self, localName): return u'{' + self.tableModelNamespace + u'}' + localName def viewReloadDueToMenuAction(self, *args): self.view() def view(self, viewTblELR=None): if viewTblELR is not None: tblELRs = (viewTblELR,) else: tblELRs = self.modelXbrl.relationshipSet(u"Table-rendering").linkRoleUris if self.type == XML: self.tblElt.append(etree.Comment(u"Entry point file: {0}".format(self.modelXbrl.modelDocument.basename))) for tblELR in tblELRs: self.zOrdinateChoices = {} for discriminator in xrange(1, 65535): # each table z production tblAxisRelSet, xTopStructuralNode, yTopStructuralNode, zTopStructuralNode = resolveAxesStructure(self, tblELR) self.hasTableFilters = bool(self.modelTable.filterRelationships) self.zStrNodesWithChoices = [] if tblAxisRelSet and self.tblElt is not None: tableLabel = (self.modelTable.genLabel(lang=self.lang, strip=True) or # use table label, if any self.roledefinition) if self.type == HTML: # table on each Z # each Z is a separate table in the outer table zTableRow = etree.SubElement(self.tblElt, u"{http://www.w3.org/1999/xhtml}tr") zRowCell = etree.SubElement(zTableRow, u"{http://www.w3.org/1999/xhtml}td") zCellTable = etree.SubElement(zRowCell, u"{http://www.w3.org/1999/xhtml}table", attrib={u"border":u"1", u"cellspacing":u"0", u"cellpadding":u"4", u"style":u"font-size:8pt;"}) self.rowElts = [etree.SubElement(zCellTable, u"{http://www.w3.org/1999/xhtml}tr") for r in xrange(self.dataFirstRow + self.dataRows - 1)] etree.SubElement(self.rowElts[0], u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"tableHdr", u"style":u"max-width:100em;", u"colspan": unicode(self.dataFirstCol - 1), u"rowspan": unicode(self.dataFirstRow - 1)} ).text = tableLabel elif self.type == XML: self.structuralNodeModelElements = [] if discriminator == 1: # headers structure only build once for table tableSetElt = etree.SubElement(self.tblElt, self.tableModelQName(u"tableSet")) tableSetElt.append(etree.Comment(u"TableSet linkbase file: {0}, line {1}".format(self.modelTable.modelDocument.basename, self.modelTable.sourceline))) tableSetElt.append(etree.Comment(u"TableSet namespace: {0}".format(self.modelTable.namespaceURI))) tableSetElt.append(etree.Comment(u"TableSet linkrole: {0}".format(tblELR))) etree.SubElement(tableSetElt, self.tableModelQName(u"label") ).text = tableLabel zAspectStructuralNodes = defaultdict(set) tableElt = etree.SubElement(tableSetElt, self.tableModelQName(u"table")) self.groupElts = {} self.headerElts = {} self.headerCells = defaultdict(list) # order #: (breakdownNode, xml element) for axis in (u"z", u"y", u"x"): breakdownNodes = self.breakdownNodes.get(axis) if breakdownNodes: hdrsElt = etree.SubElement(tableElt, self.tableModelQName(u"headers"), attrib={u"axis": axis}) for brkdownNode in self.breakdownNodes.get(axis): groupElt = etree.SubElement(hdrsElt, self.tableModelQName(u"group")) groupElt.append(etree.Comment(u"Breakdown node file: {0}, line {1}".format(brkdownNode.modelDocument.basename, brkdownNode.sourceline))) label = brkdownNode.genLabel(lang=self.lang, strip=True) if label: etree.SubElement(groupElt, self.tableModelQName(u"label")).text=label self.groupElts[brkdownNode] = groupElt # HF TODO omit header if zero cardinality on breakdown self.headerElts[brkdownNode] = etree.SubElement(groupElt, self.tableModelQName(u"header")) else: tableElt.append(etree.Comment(u"No breakdown group for \"{0}\" axis".format(axis))) self.zAxis(1, zTopStructuralNode, zAspectStructuralNodes, True) self.cellsParentElt = tableElt if self.breakdownNodes.get(u"z"): self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName(u"cells"), attrib={u"axis": u"z"}) if self.breakdownNodes.get(u"y"): self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName(u"cells"), attrib={u"axis": u"y"}) u''' move into body cells, for entry row-by-row self.cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName("cells"), attrib={"axis": "x"}) ''' # rows/cols only on firstTime for infoset XML, but on each time for xhtml zAspectStructuralNodes = defaultdict(set) self.zAxis(1, zTopStructuralNode, zAspectStructuralNodes, False) xStructuralNodes = [] if self.type == HTML or (xTopStructuralNode and xTopStructuralNode.childStructuralNodes): self.xAxis(self.dataFirstCol, self.colHdrTopRow, self.colHdrTopRow + self.colHdrRows - 1, xTopStructuralNode, xStructuralNodes, self.xAxisChildrenFirst.get(), True, True) if self.type == HTML: # table/tr goes by row self.yAxisByRow(1, self.dataFirstRow, yTopStructuralNode, self.yAxisChildrenFirst.get(), True, True) elif self.type == XML: # infoset goes by col of row header if yTopStructuralNode and yTopStructuralNode.childStructuralNodes: # no row header element if no rows self.yAxisByCol(1, self.dataFirstRow, yTopStructuralNode, self.yAxisChildrenFirst.get(), True, True) # add header cells to header elements for position, breakdownCellElts in sorted(self.headerCells.items()): for breakdownNode, headerCell in breakdownCellElts: self.headerElts[breakdownNode].append(headerCell) for structuralNode,modelElt in self.structuralNodeModelElements: # must do after elements are all arragned modelElt.addprevious(etree.Comment(u"{0}: label {1}, file {2}, line {3}" .format(structuralNode.definitionNode.localName, structuralNode.definitionNode.xlinkLabel, structuralNode.definitionNode.modelDocument.basename, structuralNode.definitionNode.sourceline))) if structuralNode.definitionNode.get(u'value'): modelElt.addprevious(etree.Comment(u" @value {0}".format(structuralNode.definitionNode.get(u'value')))) for aspect in sorted(structuralNode.aspectsCovered(), key=lambda a: aspectStr(a)): if structuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS): aspectValue = structuralNode.aspectValue(aspect) if aspectValue is None: aspectValue = u"(bound dynamically)" modelElt.addprevious(etree.Comment(u" aspect {0}: {1}".format(aspectStr(aspect), xsString(None,None,aspectValue)))) for varName, varValue in structuralNode.variables.items(): modelElt.addprevious(etree.Comment(u" variable ${0}: {1}".format(varName, varValue))) for headerElt in self.headerElts.values(): # remove empty header elements if not any(e is not None for e in headerElt.iterchildren()): headerElt.getparent().remove(headerElt) self.bodyCells(self.dataFirstRow, yTopStructuralNode, xStructuralNodes, zAspectStructuralNodes, self.yAxisChildrenFirst.get()) # find next choice structural node moreDiscriminators = False for zStrNodeWithChoices in self.zStrNodesWithChoices: currentIndex = zStrNodeWithChoices.choiceNodeIndex + 1 if currentIndex < len(zStrNodeWithChoices.choiceStructuralNodes): zStrNodeWithChoices.choiceNodeIndex = currentIndex self.zOrdinateChoices[zStrNodeWithChoices.definitionNode] = currentIndex moreDiscriminators = True break else: zStrNodeWithChoices.choiceNodeIndex = 0 self.zOrdinateChoices[zStrNodeWithChoices.definitionNode] = 0 # continue incrementing next outermore z choices index if not moreDiscriminators: break def zAxis(self, row, zStructuralNode, zAspectStructuralNodes, discriminatorsTable): if zStructuralNode is not None: label = zStructuralNode.header(lang=self.lang) choiceLabel = None effectiveStructuralNode = zStructuralNode if zStructuralNode.choiceStructuralNodes: # same as combo box selection in GUI mode if not discriminatorsTable: self.zStrNodesWithChoices.insert(0, zStructuralNode) # iteration from last is first try: effectiveStructuralNode = zStructuralNode.choiceStructuralNodes[zStructuralNode.choiceNodeIndex] choiceLabel = effectiveStructuralNode.header(lang=self.lang) if not label and choiceLabel: label = choiceLabel # no header for choice choiceLabel = None except KeyError: pass if choiceLabel: if self.dataCols > 3: zLabelSpan = 2 else: zLabelSpan = 1 zChoiceLabelSpan = self.dataCols - zLabelSpan else: zLabelSpan = self.dataCols if self.type == HTML: etree.SubElement(self.rowElts[row-1], u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"zAxisHdr", u"style":u"max-width:200pt;text-align:left;border-bottom:.5pt solid windowtext", u"colspan": unicode(zLabelSpan)} # "2"} ).text = label if choiceLabel: etree.SubElement(self.rowElts[row-1], u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"zAxisHdr", u"style":u"max-width:200pt;text-align:left;border-bottom:.5pt solid windowtext", u"colspan": unicode(zChoiceLabelSpan)} # "2"} ).text = choiceLabel elif self.type == XML: # headers element built for first pass on z axis if discriminatorsTable: brkdownNode = zStructuralNode.breakdownNode if zStructuralNode.choiceStructuralNodes: # same as combo box selection in GUI mode # hdrElt.set("label", label) if discriminatorsTable: def zSpan(zNode, startNode=False): if startNode: thisSpan = 0 elif zStructuralNode.choiceStructuralNodes: thisSpan = len(zStructuralNode.choiceStructuralNodes) else: thisSpan = 1 return sum(zSpan(z) for z in zNode.childStructuralNodes) + thisSpan span = zSpan(zStructuralNode, True) for i, choiceStructuralNode in enumerate(zStructuralNode.choiceStructuralNodes): choiceLabel = choiceStructuralNode.header(lang=self.lang) cellElt = etree.Element(self.tableModelQName(u"cell"), attrib={u"span": unicode(span)} if span > 1 else None) self.headerCells[i].append((brkdownNode, cellElt)) # self.structuralNodeModelElements.append((zStructuralNode, cellElt)) elt = etree.SubElement(cellElt, self.tableModelQName(u"label")) if choiceLabel: elt.text = choiceLabel #else: # choiceLabel from above # etree.SubElement(hdrElt, self.tableModelQName("label") # ).text = choiceLabel else: # no combo choices, single label cellElt = etree.Element(self.tableModelQName(u"cell")) self.headerCells[0].append((brkdownNode, cellElt)) # self.structuralNodeModelElements.append((zStructuralNode, cellElt)) elt = etree.SubElement(cellElt, self.tableModelQName(u"label")) if label: elt.text = label for aspect in aspectModels[self.aspectModel]: if effectiveStructuralNode.hasAspect(aspect, inherit=True): #implies inheriting from other z axes if aspect == Aspect.DIMENSIONS: for dim in (effectiveStructuralNode.aspectValue(Aspect.DIMENSIONS, inherit=True) or emptyList): zAspectStructuralNodes[dim].add(effectiveStructuralNode) else: zAspectStructuralNodes[aspect].add(effectiveStructuralNode) for zStructuralNode in zStructuralNode.childStructuralNodes: self.zAxis(row + 1, zStructuralNode, zAspectStructuralNodes, discriminatorsTable) def xAxis(self, leftCol, topRow, rowBelow, xParentStructuralNode, xStructuralNodes, childrenFirst, renderNow, atTop): if xParentStructuralNode is not None: parentRow = rowBelow noDescendants = True rightCol = leftCol widthToSpanParent = 0 sideBorder = not xStructuralNodes for xStructuralNode in xParentStructuralNode.childStructuralNodes: noDescendants = False rightCol, row, width, leafNode = self.xAxis(leftCol, topRow + 1, rowBelow, xStructuralNode, xStructuralNodes, # nested items before totals childrenFirst, childrenFirst, False) if row - 1 < parentRow: parentRow = row - 1 #if not leafNode: # rightCol -= 1 nonAbstract = not xStructuralNode.isAbstract if nonAbstract: width += 100 # width for this label widthToSpanParent += width if childrenFirst: thisCol = rightCol else: thisCol = leftCol #print ( "thisCol {0} leftCol {1} rightCol {2} topRow{3} renderNow {4} label {5}".format(thisCol, leftCol, rightCol, topRow, renderNow, label)) if renderNow: label = xStructuralNode.header(lang=self.lang, returnGenLabel=isinstance(xStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord))) columnspan = rightCol - leftCol if columnspan > 0 and nonAbstract: columnspan += 1 elt = None if self.type == HTML: if rightCol == self.dataFirstCol + self.dataCols - 1: edgeBorder = u"border-right:.5pt solid windowtext;" else: edgeBorder = u"" attrib = {u"class":u"xAxisHdr", u"style":u"text-align:center;max-width:{0}pt;{1}".format(width,edgeBorder)} if columnspan > 1: attrib[u"colspan"] = unicode(columnspan) if leafNode and row > topRow: attrib[u"rowspan"] = unicode(row - topRow + 1) elt = etree.Element(u"{http://www.w3.org/1999/xhtml}th", attrib=attrib) self.rowElts[topRow-1].insert(leftCol,elt) elif (self.type == XML and # is leaf or no sub-breakdown cardinality (xStructuralNode.childStructuralNodes is None or columnspan > 0)): # ignore no-breakdown situation brkdownNode = xStructuralNode.breakdownNode cellElt = etree.Element(self.tableModelQName(u"cell"), attrib={u"span": unicode(columnspan)} if columnspan > 1 else None) self.headerCells[thisCol].append((brkdownNode, cellElt)) # self.structuralNodeModelElements.append((xStructuralNode, cellElt)) elt = etree.SubElement(cellElt, self.tableModelQName(u"label")) if nonAbstract or (leafNode and row > topRow): for rollUpCol in xrange(topRow - self.colHdrTopRow + 1, self.colHdrRows - 1): rollUpElt = etree.Element(self.tableModelQName(u"cell"), attrib={u"rollup":u"true"}) self.headerCells[thisCol].append((brkdownNode, cellElt)) for i, role in enumerate(self.colHdrNonStdRoles): roleLabel = xStructuralNode.header(role=role, lang=self.lang, recurseParent=False) # infoset does not move parent label to decscndant if roleLabel is not None: cellElt.append(etree.Comment(u"Label role: {0}, lang {1}" .format(os.path.basename(role), self.lang))) labelElt = etree.SubElement(cellElt, self.tableModelQName(u"label"), #attrib={"role": role, # "lang": self.lang} ) labelElt.text = roleLabel for aspect in sorted(xStructuralNode.aspectsCovered(), key=lambda a: aspectStr(a)): if xStructuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS): aspectValue = xStructuralNode.aspectValue(aspect) if aspectValue is None: aspectValue = u"(bound dynamically)" if isinstance(aspectValue, ModelObject): # typed dimension value aspectValue = innerTextList(aspectValue) aspElt = etree.SubElement(cellElt, self.tableModelQName(u"constraint")) etree.SubElement(aspElt, self.tableModelQName(u"aspect") ).text = aspectStr(aspect) etree.SubElement(aspElt, self.tableModelQName(u"value") ).text = xsString(None,None,addQnameValue(self.xmlDoc, aspectValue)) if elt is not None: elt.text = label if bool(label) and label != OPEN_ASPECT_ENTRY_SURROGATE else u"\u00A0" #produces &nbsp; if nonAbstract: if columnspan > 1 and rowBelow > topRow: # add spanned left leg portion one row down if self.type == HTML: attrib= {u"class":u"xAxisSpanLeg", u"rowspan": unicode(rowBelow - row)} if edgeBorder: attrib[u"style"] = edgeBorder elt = etree.Element(u"{http://www.w3.org/1999/xhtml}th", attrib=attrib) elt.text = u"\u00A0" if childrenFirst: self.rowElts[topRow].append(elt) else: self.rowElts[topRow].insert(leftCol,elt) if self.type == HTML: for i, role in enumerate(self.colHdrNonStdRoles): elt = etree.Element(u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"xAxisHdr", u"style":u"text-align:center;max-width:100pt;{0}".format(edgeBorder)}) self.rowElts[self.dataFirstRow - 1 - len(self.colHdrNonStdRoles) + i].insert(thisCol,elt) elt.text = xStructuralNode.header(role=role, lang=self.lang) or u"\u00A0" u''' if self.colHdrDocRow: doc = xStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation", lang=self.lang) if self.type == HTML: elt = etree.Element("{http://www.w3.org/1999/xhtml}th", attrib={"class":"xAxisHdr", "style":"text-align:center;max-width:100pt;{0}".format(edgeBorder)}) self.rowElts[self.dataFirstRow - 2 - self.rowHdrCodeCol].insert(thisCol,elt) elif self.type == XML: elt = etree.Element(self.tableModelQName("label")) self.colHdrElts[self.colHdrRows - 1].insert(thisCol,elt) elt.text = doc or "\u00A0" if self.colHdrCodeRow: code = xStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code") if self.type == HTML: elt = etree.Element("{http://www.w3.org/1999/xhtml}th", attrib={"class":"xAxisHdr", "style":"text-align:center;max-width:100pt;{0}".format(edgeBorder)}) self.rowElts[self.dataFirstRow - 2].insert(thisCol,elt) elif self.type == XML: elt = etree.Element(self.tableModelQName("label")) self.colHdrElts[self.colHdrRows - 1 + self.colHdrDocRow].insert(thisCol,elt) elt.text = code or "\u00A0" ''' xStructuralNodes.append(xStructuralNode) if nonAbstract: rightCol += 1 if renderNow and not childrenFirst: self.xAxis(leftCol + (1 if nonAbstract else 0), topRow + 1, rowBelow, xStructuralNode, xStructuralNodes, childrenFirst, True, False) # render on this pass leftCol = rightCol return (rightCol, parentRow, widthToSpanParent, noDescendants) def yAxisByRow(self, leftCol, row, yParentStructuralNode, childrenFirst, renderNow, atLeft): if yParentStructuralNode is not None: nestedBottomRow = row for yStructuralNode in yParentStructuralNode.childStructuralNodes: nestRow, nextRow = self.yAxisByRow(leftCol + 1, row, yStructuralNode, # nested items before totals childrenFirst, childrenFirst, False) isAbstract = (yStructuralNode.isAbstract or (yStructuralNode.childStructuralNodes and not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)))) isNonAbstract = not isAbstract isLabeled = yStructuralNode.isLabeled topRow = row #print ( "row {0} topRow {1} nxtRow {2} col {3} renderNow {4} label {5}".format(row, topRow, nextRow, leftCol, renderNow, label)) if renderNow and isLabeled: label = yStructuralNode.header(lang=self.lang, returnGenLabel=isinstance(yStructuralNode.definitionNode, ModelClosedDefinitionNode), recurseParent=not isinstance(yStructuralNode.definitionNode, ModelFilterDefinitionNode)) columnspan = self.rowHdrCols - leftCol + 1 if isNonAbstract or nextRow == row else 1 if childrenFirst and isNonAbstract and nextRow > row: elt = etree.Element(u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"yAxisSpanArm", u"style":u"text-align:center;min-width:2em;", u"rowspan": unicode(nextRow - topRow)} ) insertPosition = self.rowElts[nextRow-1].__len__() self.rowElts[row - 1].insert(insertPosition, elt) elt.text = u"\u00A0" hdrRow = nextRow # put nested stuff on bottom row row = nextRow # nested header still goes on this row else: hdrRow = row # provide top or bottom borders edgeBorder = u"" if childrenFirst: if hdrRow == self.dataFirstRow: edgeBorder = u"border-top:.5pt solid windowtext;" else: if hdrRow == len(self.rowElts): edgeBorder = u"border-bottom:.5pt solid windowtext;" depth = yStructuralNode.depth attrib = {u"style":u"text-align:{0};max-width:{1}em;{2}".format( u"left" if isNonAbstract or nestRow == hdrRow else u"center", # this is a wrap length max width in characters self.rowHdrColWidth[depth] if isAbstract else self.rowHdrWrapLength - sum(self.rowHdrColWidth[0:depth]), edgeBorder), u"colspan": unicode(columnspan)} if label == OPEN_ASPECT_ENTRY_SURROGATE: # entry of dimension attrib[u"style"] += u";background:#fff" # override for white background if isAbstract: attrib[u"rowspan"] = unicode(nestRow - hdrRow) attrib[u"class"] = u"yAxisHdrAbstractChildrenFirst" if childrenFirst else u"yAxisHdrAbstract" elif nestRow > hdrRow: attrib[u"class"] = u"yAxisHdrWithLeg" elif childrenFirst: attrib[u"class"] = u"yAxisHdrWithChildrenFirst" else: attrib[u"class"] = u"yAxisHdr" elt = etree.Element(u"{http://www.w3.org/1999/xhtml}th", attrib=attrib ) elt.text = label if bool(label) and label != OPEN_ASPECT_ENTRY_SURROGATE else u"\u00A0" if isNonAbstract: self.rowElts[hdrRow-1].append(elt) if not childrenFirst and nestRow > hdrRow: # add spanned left leg portion one row down etree.SubElement(self.rowElts[hdrRow], u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":u"yAxisSpanLeg", u"style":u"text-align:center;max-width:{0}pt;{1}".format(RENDER_UNITS_PER_CHAR, edgeBorder), u"rowspan": unicode(nestRow - hdrRow)} ).text = u"\u00A0" hdrClass = u"yAxisHdr" if not childrenFirst else u"yAxisHdrWithChildrenFirst" for i, role in enumerate(self.rowHdrNonStdRoles): hdr = yStructuralNode.header(role=role, lang=self.lang) etree.SubElement(self.rowElts[hdrRow - 1], u"{http://www.w3.org/1999/xhtml}th", attrib={u"class":hdrClass, u"style":u"text-align:left;max-width:100pt;{0}".format(edgeBorder)} ).text = hdr or u"\u00A0" u''' if self.rowHdrDocCol: docCol = self.dataFirstCol - 1 - self.rowHdrCodeCol doc = yStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation") etree.SubElement(self.rowElts[hdrRow - 1], "{http://www.w3.org/1999/xhtml}th", attrib={"class":hdrClass, "style":"text-align:left;max-width:100pt;{0}".format(edgeBorder)} ).text = doc or "\u00A0" if self.rowHdrCodeCol: codeCol = self.dataFirstCol - 1 code = yStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code") etree.SubElement(self.rowElts[hdrRow - 1], "{http://www.w3.org/1999/xhtml}th", attrib={"class":hdrClass, "style":"text-align:center;max-width:40pt;{0}".format(edgeBorder)} ).text = code or "\u00A0" # gridBorder(self.gridRowHdr, leftCol, self.dataFirstRow - 1, BOTTOMBORDER) ''' else: self.rowElts[hdrRow-1].insert(leftCol - 1, elt) if isNonAbstract: row += 1 elif childrenFirst: row = nextRow if nestRow > nestedBottomRow: nestedBottomRow = nestRow + (isNonAbstract and not childrenFirst) if row > nestedBottomRow: nestedBottomRow = row #if renderNow and not childrenFirst: # dummy, row = self.yAxis(leftCol + 1, row, yAxisHdrObj, childrenFirst, True, False) # render on this pass if not childrenFirst: dummy, row = self.yAxisByRow(leftCol + 1, row, yStructuralNode, childrenFirst, renderNow, False) # render on this pass return (nestedBottomRow, row) def yAxisByCol(self, leftCol, row, yParentStructuralNode, childrenFirst, renderNow, atTop): if yParentStructuralNode is not None: nestedBottomRow = row for yStructuralNode in yParentStructuralNode.childStructuralNodes: nestRow, nextRow = self.yAxisByCol(leftCol + 1, row, yStructuralNode, # nested items before totals childrenFirst, childrenFirst, False) isAbstract = (yStructuralNode.isAbstract or (yStructuralNode.childStructuralNodes and not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)))) isNonAbstract = not isAbstract isLabeled = yStructuralNode.isLabeled topRow = row if childrenFirst and isNonAbstract: row = nextRow #print ( "thisCol {0} leftCol {1} rightCol {2} topRow{3} renderNow {4} label {5}".format(thisCol, leftCol, rightCol, topRow, renderNow, label)) if renderNow and isLabeled: label = yStructuralNode.header(lang=self.lang, returnGenLabel=isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)), recurseParent=not isinstance(yStructuralNode.definitionNode, ModelFilterDefinitionNode)) brkdownNode = yStructuralNode.breakdownNode rowspan= nestRow - row + 1 cellElt = etree.Element(self.tableModelQName(u"cell"), attrib={u"span": unicode(rowspan)} if rowspan > 1 else None) elt = etree.SubElement(cellElt, self.tableModelQName(u"label")) elt.text = label if label != OPEN_ASPECT_ENTRY_SURROGATE else u"" self.headerCells[leftCol].append((brkdownNode, cellElt)) # self.structuralNodeModelElements.append((yStructuralNode, cellElt)) for rollUpCol in xrange(leftCol, self.rowHdrCols - 1): rollUpElt = etree.Element(self.tableModelQName(u"cell"), attrib={u"rollup":u"true"}) self.headerCells[leftCol].append((brkdownNode, rollUpElt)) #if isNonAbstract: i = -1 # for case where no enumeration takes place for i, role in enumerate(self.rowHdrNonStdRoles): roleLabel = yStructuralNode.header(role=role, lang=self.lang, recurseParent=False) if roleLabel is not None: cellElt.append(etree.Comment(u"Label role: {0}, lang {1}" .format(os.path.basename(role), self.lang))) labelElt = etree.SubElement(cellElt, self.tableModelQName(u"label"), #attrib={"role":role, # "lang":self.lang} ).text = roleLabel self.headerCells[leftCol].append((brkdownNode, cellElt)) for aspect in sorted(yStructuralNode.aspectsCovered(), key=lambda a: aspectStr(a)): if yStructuralNode.hasAspect(aspect) and aspect not in (Aspect.DIMENSIONS, Aspect.OMIT_DIMENSIONS): aspectValue = yStructuralNode.aspectValue(aspect) if aspectValue is None: aspectValue = u"(bound dynamically)" if isinstance(aspectValue, ModelObject): # typed dimension value aspectValue = innerTextList(aspectValue) if isinstance(aspectValue, unicode) and aspectValue.startswith(OPEN_ASPECT_ENTRY_SURROGATE): continue # not an aspect, position for a new entry elt = etree.SubElement(cellElt, self.tableModelQName(u"constraint")) etree.SubElement(elt, self.tableModelQName(u"aspect") ).text = aspectStr(aspect) etree.SubElement(elt, self.tableModelQName(u"value") ).text = xsString(None,None,addQnameValue(self.xmlDoc, aspectValue)) u''' if self.rowHdrDocCol: labelElt = etree.SubElement(cellElt, self.tableModelQName("label"), attrib={"span": str(rowspan)} if rowspan > 1 else None) elt.text = yStructuralNode.header(role="http://www.xbrl.org/2008/role/documentation", lang=self.lang) self.rowHdrElts[self.rowHdrCols - 1].append(elt) if self.rowHdrCodeCol: elt = etree.Element(self.tableModelQName("label"), attrib={"span": str(rowspan)} if rowspan > 1 else None) elt.text = yStructuralNode.header(role="http://www.eurofiling.info/role/2010/coordinate-code", lang=self.lang) self.rowHdrElts[self.rowHdrCols - 1 + self.rowHdrDocCol].append(elt) ''' if isNonAbstract: row += 1 elif childrenFirst: row = nextRow if nestRow > nestedBottomRow: nestedBottomRow = nestRow + (isNonAbstract and not childrenFirst) if row > nestedBottomRow: nestedBottomRow = row #if renderNow and not childrenFirst: # dummy, row = self.yAxis(leftCol + 1, row, yStructuralNode, childrenFirst, True, False) # render on this pass if not childrenFirst: dummy, row = self.yAxisByCol(leftCol + 1, row, yStructuralNode, childrenFirst, renderNow, False) # render on this pass return (nestedBottomRow, row) def bodyCells(self, row, yParentStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst): if yParentStructuralNode is not None: dimDefaults = self.modelXbrl.qnameDimensionDefaults for yStructuralNode in yParentStructuralNode.childStructuralNodes: if yChildrenFirst: row = self.bodyCells(row, yStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst) if not (yStructuralNode.isAbstract or (yStructuralNode.childStructuralNodes and not isinstance(yStructuralNode.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)))) and yStructuralNode.isLabeled: if self.type == XML: if self.breakdownNodes.get(u"x"): cellsParentElt = etree.SubElement(self.cellsParentElt, self.tableModelQName(u"cells"), attrib={u"axis": u"x"}) else: cellsParentElt = self.cellsParentElt isEntryPrototype = yStructuralNode.isEntryPrototype(default=False) # row to enter open aspects yAspectStructuralNodes = defaultdict(set) for aspect in aspectModels[self.aspectModel]: if yStructuralNode.hasAspect(aspect): if aspect == Aspect.DIMENSIONS: for dim in (yStructuralNode.aspectValue(Aspect.DIMENSIONS) or emptyList): yAspectStructuralNodes[dim].add(yStructuralNode) else: yAspectStructuralNodes[aspect].add(yStructuralNode) yTagSelectors = yStructuralNode.tagSelectors # data for columns of rows ignoreDimValidity = self.ignoreDimValidity.get() for i, xStructuralNode in enumerate(xStructuralNodes): xAspectStructuralNodes = defaultdict(set) for aspect in aspectModels[self.aspectModel]: if xStructuralNode.hasAspect(aspect): if aspect == Aspect.DIMENSIONS: for dim in (xStructuralNode.aspectValue(Aspect.DIMENSIONS) or emptyList): xAspectStructuralNodes[dim].add(xStructuralNode) else: xAspectStructuralNodes[aspect].add(xStructuralNode) cellTagSelectors = yTagSelectors | xStructuralNode.tagSelectors cellAspectValues = {} matchableAspects = set() for aspect in _DICT_SET(xAspectStructuralNodes.keys()) | _DICT_SET(yAspectStructuralNodes.keys()) | _DICT_SET(zAspectStructuralNodes.keys()): aspectValue = xStructuralNode.inheritedAspectValue(yStructuralNode, self, aspect, cellTagSelectors, xAspectStructuralNodes, yAspectStructuralNodes, zAspectStructuralNodes) # value is None for a dimension whose value is to be not reported in this slice if (isinstance(aspect, _INT) or # not a dimension dimDefaults.get(aspect) != aspectValue or # explicit dim defaulted will equal the value aspectValue is not None): # typed dim absent will be none cellAspectValues[aspect] = aspectValue matchableAspects.add(aspectModelAspect.get(aspect,aspect)) #filterable aspect from rule aspect cellDefaultedDims = _DICT_SET(dimDefaults) - _DICT_SET(cellAspectValues.keys()) priItemQname = cellAspectValues.get(Aspect.CONCEPT) concept = self.modelXbrl.qnameConcepts.get(priItemQname) conceptNotAbstract = concept is None or not concept.isAbstract from arelle.ValidateXbrlDimensions import isFactDimensionallyValid fact = None value = None objectId = None justify = None fp = FactPrototype(self, cellAspectValues) if conceptNotAbstract: # reduce set of matchable facts to those with pri item qname and have dimension aspects facts = self.modelXbrl.factsByQname[priItemQname] if priItemQname else self.modelXbrl.factsInInstance if self.hasTableFilters: facts = self.modelTable.filterFacts(self.rendrCntx, facts) for aspect in matchableAspects: # trim down facts with explicit dimensions match or just present if isinstance(aspect, QName): aspectValue = cellAspectValues.get(aspect, None) if isinstance(aspectValue, ModelDimensionValue): if aspectValue.isExplicit: dimMemQname = aspectValue.memberQname # match facts with this explicit value else: dimMemQname = None # match facts that report this dimension elif isinstance(aspectValue, QName): dimMemQname = aspectValue # match facts that have this explicit value elif aspectValue is None: # match typed dims that don't report this value dimMemQname = DEFAULT else: dimMemQname = None # match facts that report this dimension facts = facts & self.modelXbrl.factsByDimMemQname(aspect, dimMemQname) for fact in facts: if (all(aspectMatches(self.rendrCntx, fact, fp, aspect) for aspect in matchableAspects) and all(fact.context.dimMemberQname(dim,includeDefaults=True) in (dimDefaults[dim], None) for dim in cellDefaultedDims)): if yStructuralNode.hasValueExpression(xStructuralNode): value = yStructuralNode.evalValueExpression(fact, xStructuralNode) else: value = fact.effectiveValue justify = u"right" if fact.isNumeric else u"left" break if justify is None: justify = u"right" if fp.isNumeric else u"left" if conceptNotAbstract: if self.type == XML: cellsParentElt.append(etree.Comment(u"Cell concept {0}: segDims {1}, scenDims {2}" .format(fp.qname, u', '.join(u"({}={})".format(dimVal.dimensionQname, dimVal.memberQname) for dimVal in sorted(fp.context.segDimVals.values(), key=lambda d: d.dimensionQname)), u', '.join(u"({}={})".format(dimVal.dimensionQname, dimVal.memberQname) for dimVal in sorted(fp.context.scenDimVals.values(), key=lambda d: d.dimensionQname)), ))) if value is not None or ignoreDimValidity or isFactDimensionallyValid(self, fp) or isEntryPrototype: if self.type == HTML: etree.SubElement(self.rowElts[row - 1], u"{http://www.w3.org/1999/xhtml}td", attrib={u"class":u"cell", u"style":u"text-align:{0};width:8em".format(justify)} ).text = value or u"\u00A0" elif self.type == XML: if value is not None and fact is not None: cellsParentElt.append(etree.Comment(u"{0}: context {1}, value {2}, file {3}, line {4}" .format(fact.qname, fact.contextID, value[:32], # no more than 32 characters fact.modelDocument.basename, fact.sourceline))) elif fact is not None: cellsParentElt.append(etree.Comment(u"Fact was not matched {0}: context {1}, value {2}, file {3}, line {4}, aspects not matched: {5}, dimensions expected to have been defaulted: {6}" .format(fact.qname, fact.contextID, fact.effectiveValue[:32], fact.modelDocument.basename, fact.sourceline, u', '.join(unicode(aspect) for aspect in matchableAspects if not aspectMatches(self.rendrCntx, fact, fp, aspect)), u', '.join(unicode(dim) for dim in cellDefaultedDims if fact.context.dimMemberQname(dim,includeDefaults=True) not in (dimDefaults[dim], None)) ))) cellElt = etree.SubElement(cellsParentElt, self.tableModelQName(u"cell")) if value is not None and fact is not None: etree.SubElement(cellElt, self.tableModelQName(u"fact") ).text = u'{}#{}'.format(fact.modelDocument.basename, elementFragmentIdentifier(fact)) else: if self.type == HTML: etree.SubElement(self.rowElts[row - 1], u"{http://www.w3.org/1999/xhtml}td", attrib={u"class":u"blockedCell", u"style":u"text-align:{0};width:8em".format(justify)} ).text = u"\u00A0\u00A0" elif self.type == XML: etree.SubElement(cellsParentElt, self.tableModelQName(u"cell"), attrib={u"blocked":u"true"}) else: # concept is abstract if self.type == HTML: etree.SubElement(self.rowElts[row - 1], u"{http://www.w3.org/1999/xhtml}td", attrib={u"class":u"abstractCell", u"style":u"text-align:{0};width:8em".format(justify)} ).text = u"\u00A0\u00A0" elif self.type == XML: etree.SubElement(cellsParentElt, self.tableModelQName(u"cell"), attrib={u"abstract":u"true"}) fp.clear() # dereference row += 1 if not yChildrenFirst: row = self.bodyCells(row, yStructuralNode, xStructuralNodes, zAspectStructuralNodes, yChildrenFirst) return row
apache-2.0
-3,421,861,263,182,897,000
71.842767
222
0.479652
false
4.911152
false
false
false
yunbademo/yunba-live-video
stat/messenger.py
1
3264
#!/usr/bin/env python import time import sys import logging from socketIO_client import SocketIO APPKEY = '56a0a88c4407a3cd028ac2fe' TOPIC = 'test' ALIAS = 'test' logger = logging.getLogger('messenger') class Messenger: def __init__(self, appkey, alias, customid): self.__logger = logging.getLogger('messenger.Messenger') self.__logger.info('init') self.appkey = appkey self.customid = customid self.alias = alias self.socketIO = SocketIO('182.92.1.46', 3000) self.socketIO.on('socketconnectack', self.on_socket_connect_ack) self.socketIO.on('connack', self.on_connack) self.socketIO.on('puback', self.on_puback) self.socketIO.on('suback', self.on_suback) self.socketIO.on('message', self.on_message) self.socketIO.on('set_alias_ack', self.on_set_alias) self.socketIO.on('get_topic_list_ack', self.on_get_topic_list_ack) self.socketIO.on('get_alias_list_ack', self.on_get_alias_list_ack) # self.socketIO.on('puback', self.on_publish2_ack) self.socketIO.on('recvack', self.on_publish2_recvack) self.socketIO.on('get_state_ack', self.on_get_state_ack) self.socketIO.on('alias', self.on_alias) def __del__(self): self.__logger.info('del') def loop(self): self.socketIO.wait(seconds=0.002) def on_socket_connect_ack(self, args): self.__logger.debug('on_socket_connect_ack: %s', args) self.socketIO.emit('connect', {'appkey': self.appkey, 'customid': self.customid}) def on_connack(self, args): self.__logger.debug('on_connack: %s', args) self.socketIO.emit('set_alias', {'alias': self.alias}) def on_puback(self, args): self.__logger.debug('on_puback: %s', args) def on_suback(self, args): self.__logger.debug('on_suback: %s', args) def on_message(self, args): self.__logger.debug('on_message: %s', args) def on_set_alias(self, args): self.__logger.debug('on_set_alias: %s', args) def on_get_alias(self, args): self.__logger.debug('on_get_alias: %s', args) def on_alias(self, args): self.__logger.debug('on_alias: %s', args) def on_get_topic_list_ack(self, args): self.__logger.debug('on_get_topic_list_ack: %s', args) def on_get_alias_list_ack(self, args): self.__logger.debug('on_get_alias_list_ack: %s', args) def on_publish2_ack(self, args): self.__logger.debug('on_publish2_ack: %s', args) def on_publish2_recvack(self, args): self.__logger.debug('on_publish2_recvack: %s', args) def on_get_state_ack(self, args): self.__logger.debug('on_get_state_ack: %s', args) def publish(self, msg, topic, qos): self.__logger.debug('publish: %s', msg) self.socketIO.emit('publish', {'topic': topic, 'msg': msg, 'qos': qos}) def publish_to_alias(self, alias, msg): self.__logger.debug('publish_to_alias: %s %s', alias, msg) self.socketIO.emit('publish_to_alias', {'alias': alias, 'msg': msg}) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) m = Messenger(APPKEY, ALIAS, ALIAS); while True: m.loop() time.sleep(0.02)
mit
2,107,286,996,990,084,600
31
89
0.613971
false
3.050467
false
false
false
tomerfiliba/plumbum
tests/test_nohup.py
1
2234
# -*- coding: utf-8 -*- import os import sys import time import psutil import pytest from plumbum import NOHUP, local try: from plumbum.cmd import bash, echo except ImportError: bash = None echo = None from plumbum._testtools import skip_on_windows from plumbum.path.utils import delete @skip_on_windows class TestNohupLocal: def read_file(self, filename): assert filename in os.listdir(".") with open(filename) as f: return f.read() @pytest.mark.usefixtures("testdir") def test_slow(self): delete("nohup.out") sp = bash["slow_process.bash"] sp & NOHUP time.sleep(0.5) assert self.read_file("slow_process.out") == "Starting test\n1\n" assert self.read_file("nohup.out") == "1\n" time.sleep(1) assert self.read_file("slow_process.out") == "Starting test\n1\n2\n" assert self.read_file("nohup.out") == "1\n2\n" time.sleep(2) delete("nohup.out", "slow_process.out") def test_append(self): delete("nohup.out") output = echo["This is output"] output & NOHUP time.sleep(0.2) assert self.read_file("nohup.out") == "This is output\n" output & NOHUP time.sleep(0.2) assert self.read_file("nohup.out") == "This is output\n" * 2 delete("nohup.out") def test_redir(self): delete("nohup_new.out") output = echo["This is output"] output & NOHUP(stdout="nohup_new.out") time.sleep(0.2) assert self.read_file("nohup_new.out") == "This is output\n" delete("nohup_new.out") (output > "nohup_new.out") & NOHUP time.sleep(0.2) assert self.read_file("nohup_new.out") == "This is output\n" delete("nohup_new.out") output & NOHUP time.sleep(0.2) assert self.read_file("nohup.out") == "This is output\n" delete("nohup.out") def test_closed_filehandles(self): proc = psutil.Process() file_handles_prior = proc.num_fds() sleep_proc = local["sleep"]["1"] & NOHUP sleep_proc.wait() file_handles_after = proc.num_fds() assert file_handles_prior >= file_handles_after
mit
61,400,480,994,359,310
28.012987
76
0.588183
false
3.177809
true
false
false
stephen2run/EcoDataLearn
src/math/mathtool.py
1
1043
# Copyright 2017 The EcoDataLearn. All Rights Reserved. # Author Stephen (Yu) Shao # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Builds the mathtool function Implements the mathtool """ from approximation import * from regression import * # Approximation approx_obj = MathApprox() approx_obj.handle() x = approx_obj.get_x() y = approx_obj.get_y() # Regression deg = 1 regress_obj = MathRegress(x, y) regress_obj.handle(deg) deg = 7 regress_obj.handle(deg)
apache-2.0
-7,336,470,810,491,666,000
28.828571
80
0.693193
false
3.862963
false
false
false
mitsuhiko/sentry
src/sentry/web/frontend/accounts.py
1
10121
""" sentry.web.frontend.accounts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.contrib import messages from django.contrib.auth import login as login_user, authenticate from django.core.context_processors import csrf from django.core.urlresolvers import reverse from django.db import IntegrityError, transaction from django.http import HttpResponseRedirect, Http404 from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.utils import timezone from django.utils.translation import ugettext as _ from sudo.decorators import sudo_required from sentry.models import ( UserEmail, LostPasswordHash, Project, UserOption, Authenticator ) from sentry.signals import email_verified from sentry.web.decorators import login_required, signed_auth_required from sentry.web.forms.accounts import ( AccountSettingsForm, AppearanceSettingsForm, RecoverPasswordForm, ChangePasswordRecoverForm, ) from sentry.web.helpers import render_to_response from sentry.utils.auth import get_auth_providers, get_login_redirect def send_password_recovery_mail(user): password_hash, created = LostPasswordHash.objects.get_or_create( user=user ) if not password_hash.is_valid(): password_hash.date_added = timezone.now() password_hash.set_hash() password_hash.save() password_hash.send_recover_mail() return password_hash @login_required def login_redirect(request): login_url = get_login_redirect(request) return HttpResponseRedirect(login_url) def expired(request, user): password_hash = send_password_recovery_mail(user) return render_to_response('sentry/account/recover/expired.html', { 'email': password_hash.user.email, }, request) def recover(request): form = RecoverPasswordForm(request.POST or None, captcha=bool(request.session.get('needs_captcha'))) if form.is_valid(): password_hash = send_password_recovery_mail(form.cleaned_data['user']) request.session.pop('needs_captcha', None) return render_to_response('sentry/account/recover/sent.html', { 'email': password_hash.user.email, }, request) elif request.POST and not request.session.get('needs_captcha'): request.session['needs_captcha'] = 1 form = RecoverPasswordForm(request.POST or None, captcha=True) form.errors.pop('captcha', None) context = { 'form': form, } return render_to_response('sentry/account/recover/index.html', context, request) def recover_confirm(request, user_id, hash): try: password_hash = LostPasswordHash.objects.get(user=user_id, hash=hash) if not password_hash.is_valid(): password_hash.delete() raise LostPasswordHash.DoesNotExist user = password_hash.user except LostPasswordHash.DoesNotExist: context = {} tpl = 'sentry/account/recover/failure.html' else: tpl = 'sentry/account/recover/confirm.html' if request.method == 'POST': form = ChangePasswordRecoverForm(request.POST) if form.is_valid(): user.set_password(form.cleaned_data['password']) user.save() # Ugly way of doing this, but Django requires the backend be set user = authenticate( username=user.username, password=form.cleaned_data['password'], ) login_user(request, user) password_hash.delete() return login_redirect(request) else: form = ChangePasswordRecoverForm() context = { 'form': form, } return render_to_response(tpl, context, request) @login_required def start_confirm_email(request): has_unverified_emails = request.user.has_unverified_emails() if has_unverified_emails: request.user.send_confirm_emails() msg = _('A verification email has been sent to %s.') % request.user.email else: msg = _('Your email (%s) has already been verified.') % request.user.email messages.add_message(request, messages.SUCCESS, msg) return HttpResponseRedirect(reverse('sentry-account-settings')) def confirm_email(request, user_id, hash): msg = _('Thanks for confirming your email') level = messages.SUCCESS try: email = UserEmail.objects.get(user=user_id, validation_hash=hash) if not email.hash_is_valid(): raise UserEmail.DoesNotExist except UserEmail.DoesNotExist: if request.user.is_anonymous() or request.user.has_unverified_emails(): msg = _('There was an error confirming your email. Please try again or ' 'visit your Account Settings to resend the verification email.') level = messages.ERROR else: email.is_verified = True email.validation_hash = '' email.save() email_verified.send(email=email.email, sender=email) messages.add_message(request, level, msg) return HttpResponseRedirect(reverse('sentry-account-settings')) @csrf_protect @never_cache @login_required @transaction.atomic def settings(request): user = request.user form = AccountSettingsForm( user, request.POST or None, initial={ 'email': user.email, 'username': user.username, 'name': user.name, }, ) if form.is_valid(): old_email = user.email form.save() # remove previously valid email address # TODO(dcramer): we should maintain validation here when we support # multiple email addresses if request.user.email != old_email: UserEmail.objects.filter(user=user, email=old_email).delete() try: with transaction.atomic(): user_email = UserEmail.objects.create( user=user, email=user.email, ) except IntegrityError: pass else: user_email.set_hash() user_email.save() user.send_confirm_emails() messages.add_message( request, messages.SUCCESS, 'Your settings were saved.') return HttpResponseRedirect(request.path) context = csrf(request) context.update({ 'form': form, 'page': 'settings', 'has_2fa': Authenticator.objects.user_has_2fa(request.user), 'AUTH_PROVIDERS': get_auth_providers(), }) return render_to_response('sentry/account/settings.html', context, request) @csrf_protect @never_cache @login_required @sudo_required @transaction.atomic def twofactor_settings(request): interfaces = Authenticator.objects.all_interfaces_for_user( request.user, return_missing=True) if request.method == 'POST' and 'back' in request.POST: return HttpResponseRedirect(reverse('sentry-account-settings')) context = csrf(request) context.update({ 'page': 'security', 'has_2fa': any(x.is_enrolled and not x.is_backup_interface for x in interfaces), 'interfaces': interfaces, }) return render_to_response('sentry/account/twofactor.html', context, request) @csrf_protect @never_cache @login_required @transaction.atomic def avatar_settings(request): context = csrf(request) context.update({ 'page': 'avatar', 'AUTH_PROVIDERS': get_auth_providers(), }) return render_to_response('sentry/account/avatar.html', context, request) @csrf_protect @never_cache @login_required @transaction.atomic def appearance_settings(request): from django.conf import settings options = UserOption.objects.get_all_values(user=request.user, project=None) form = AppearanceSettingsForm(request.user, request.POST or None, initial={ 'language': options.get('language') or request.LANGUAGE_CODE, 'stacktrace_order': int(options.get('stacktrace_order', -1) or -1), 'timezone': options.get('timezone') or settings.SENTRY_DEFAULT_TIME_ZONE, 'clock_24_hours': options.get('clock_24_hours') or False, }) if form.is_valid(): form.save() messages.add_message(request, messages.SUCCESS, 'Your settings were saved.') return HttpResponseRedirect(request.path) context = csrf(request) context.update({ 'form': form, 'page': 'appearance', 'AUTH_PROVIDERS': get_auth_providers(), }) return render_to_response('sentry/account/appearance.html', context, request) @csrf_protect @never_cache @signed_auth_required @transaction.atomic def email_unsubscribe_project(request, project_id): # For now we only support getting here from the signed link. if not request.user_from_signed_request: raise Http404() try: project = Project.objects.get(pk=project_id) except Project.DoesNotExist: raise Http404() if request.method == 'POST': if 'cancel' not in request.POST: UserOption.objects.set_value( request.user, project, 'mail:alert', 0) return HttpResponseRedirect(reverse('sentry')) context = csrf(request) context['project'] = project return render_to_response('sentry/account/email_unsubscribe_project.html', context, request) @csrf_protect @never_cache @login_required def list_identities(request): from social_auth.models import UserSocialAuth identity_list = list(UserSocialAuth.objects.filter(user=request.user)) AUTH_PROVIDERS = get_auth_providers() context = csrf(request) context.update({ 'identity_list': identity_list, 'page': 'identities', 'AUTH_PROVIDERS': AUTH_PROVIDERS, }) return render_to_response('sentry/account/identities.html', context, request)
bsd-3-clause
-231,246,322,902,996,860
31.232484
88
0.653394
false
4.087641
false
false
false
NaPs/Kolekto
kolekto/helpers.py
1
1614
""" Collection of helpers for Kolekto. """ import os import gdbm import json def get_hash(input_string): """ Return the hash of the movie depending on the input string. If the input string looks like a symbolic link to a movie in a Kolekto tree, return its movies hash, else, return the input directly in lowercase. """ # Check if the input looks like a link to a movie: if os.path.islink(input_string): directory, movie_hash = os.path.split(os.readlink(input_string)) input_string = movie_hash return input_string.lower() class JsonDbm(object): """ A simple GNU DBM database which store JSON-serialized Python. """ def __init__(self, filename, object_class=dict): self._db = gdbm.open(filename, 'c') self._object_class = object_class def __contains__(self, key): return key in self._db def get(self, key): """ Get data associated with provided key. """ return self._object_class(json.loads(self._db[key])) def count(self): """ Count records in the database. """ return len(self._db) def save(self, key, data): """ Save data associated with key. """ self._db[key] = json.dumps(data) self._db.sync() def remove(self, key): """ Remove the specified key from the database. """ del self._db[key] self._db.sync() def iteritems(self): """ Iterate over (key, data) couple stored in database. """ for key in self.itermovieshash(): yield key, self.get(key)
mit
-5,483,582,171,683,210,000
25.47541
79
0.599752
false
3.927007
false
false
false
yausern/stlab
devices/BFDaemonOld/server/server.py
1
2767
import socket from MySocket import MySocket import datetime import os.path LOGFOLDER = 'D:\\log\\' def tail( f, lines=20 ): total_lines_wanted = lines BLOCK_SIZE = 1024 f.seek(0, 2) block_end_byte = f.tell() lines_to_go = total_lines_wanted block_number = -1 blocks = [] # blocks of size BLOCK_SIZE, in reverse order starting # from the end of the file while lines_to_go > 0 and block_end_byte > 0: if (block_end_byte - BLOCK_SIZE > 0): # read the last block we haven't yet read f.seek(block_number*BLOCK_SIZE, 2) blocks.append(f.read(BLOCK_SIZE)) else: # file too small, start from begining f.seek(0,0) # only read what was not read blocks.append(f.read(block_end_byte)) lines_found = blocks[-1].count('\n'.encode()) lines_to_go -= lines_found block_end_byte -= BLOCK_SIZE block_number -= 1 blocks = [ x.decode() for x in blocks] all_read_text = ''.join(reversed(blocks)) return '\n'.join(all_read_text.splitlines()[-total_lines_wanted:]) def GetTemperature(sock): now = datetime.datetime.today() foldername = now.strftime('%y-%m-%d') foldername = LOGFOLDER + foldername + '\\CH6 T ' + foldername + '.log' foldername = os.path.normpath(foldername) try: myfile = open(foldername,'rb') except FileNotFoundError: #if file not present, try previous day's file now = now - datetime.timedelta(days=1) foldername = now.strftime('%y-%m-%d') foldername = LOGFOLDER + foldername + '\\CH6 T ' + foldername + '.log' foldername = os.path.normpath(foldername) myfile = open(foldername,'rb') ss = MySocket(sock) word = tail(myfile,2) word = word.split(',')[-1] ss.mysend(word.encode('utf_8')) now = datetime.datetime.today() T = float(word)*1000. print("Temperature sent at %s, T = %f mK" % (now.strftime('%y-%m-%d %H:%M:%S'),T)) ss.sock.close() print("StLab Temperature server for BluFors. Initializing...") # create an INET, STREAMing socket serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # bind the socket to a public host, and a well-known port #serversocket.bind((socket.gethostname(), 8001)) addr = socket.gethostbyname(socket.gethostname()) port = 8472 serversocket.bind(('', port)) # become a server socket serversocket.listen(5) print("Ready. Listening on port %d and address %s" % (port,addr)) try: while True: # accept connections from outside (clientsocket, address) = serversocket.accept() GetTemperature(clientsocket) except KeyboardInterrupt: print('Shutting down temperature server') serversocket.close()
gpl-3.0
-9,217,473,270,939,096,000
34.037975
86
0.631731
false
3.489281
false
false
false
PhoenixBureau/PigeonComputer
old/nonon.py
1
5828
from co import bootstrap, send from la import setUpTransformEngine from metaii import comp object_vt, vtvt = bootstrap() symbol_vt, ast_vt = setUpTransformEngine(object_vt, vtvt) context_vt = send(vtvt, 'delegated') ######################################################################### ## Simple Syntax to AST ## ######################################################################### # Helper functions. send(ast_vt, 'addMethod', 'valueOf', lambda ast: ast.data) def allocate(vt): return send(vt, 'allocate') def make_kind(kind): return send(allocate(symbol_vt), 'setName', kind) def make_ast(KIND, value): return send(allocate(ast_vt), 'init', KIND, value) def name_of_symbol_of(ast): return send(send(ast, 'typeOf'), 'getName') # Some AST symbol types. SYMBOL = make_kind('symbol') LITERAL = make_kind('literal') LIST = make_kind('list') # Helper functions to generate AST for the simple compiler below. def symbol(name): return make_ast(SYMBOL, name) def literal(value): return make_ast(LITERAL, value) def list_(*values): return make_ast(LIST, list(values)) send(vtvt, 'addMethod', 'makeKind', lambda context, kind: make_kind(kind)) send(vtvt, 'addMethod', 'makeAst', lambda context, KIND, value: make_ast(KIND, value)) # META-II compiler for a simple s-expression language, it generates AST # objects using the helper functions above when evaluated. cola_machine = comp(r''' .SYNTAX PROGRAM PROGRAM = .OUT('(') args .OUT(')') '.' ; args = $ term ; term = ( list | literal | symbol ) .OUT(', ') ; list = '(' .OUT('list_(') args ')' .OUT(')') ; literal = ( .STRING | .NUMBER ) .OUT('literal('*')') ; symbol = .ID .OUT('symbol("'*'")') ; .END ''', open('metaii.asm').read()) ######################################################################### ######################################################################### ######################################################################### # Once we have AST we can use this LISP-like machinery to evaluate it. def evaluate(ast, context): sname = name_of_symbol_of(ast) if sname == 'symbol': try: return send(context, 'lookup', ast.data) except: return str(ast.data) + '?' if sname == 'literal': return ast.data first, rest = ast.data[0], ast.data[1:] sname = name_of_symbol_of(first) if sname == 'symbol': if first.data == 'define': define(rest, context) return if first.data == 'lambda': return make_lambda_ast(rest, context) exp = tuple(evaluate(it, context) for it in ast.data) if callable(exp[0]): return exp[0](*exp[1:]) return exp def define((var, exp), context): send(context, 'addMethod', var.data, evaluate(exp, context)) return def make_lambda_ast((variables, exp), context): variables = tuple(v.data for v in variables.data) exp = list_(*exp.data) def inner(*args): new_context = send(context, 'delegated') for k, v in zip(variables, args): send(new_context, 'addMethod', k, v) return evaluate(exp, new_context) return inner def evaluate_list(ast, context): result = evaluate(ast, context) if result is not None: print '<', result, '>' eval_context = send(context_vt, 'delegated') send(eval_context, 'addMethod', 'list', evaluate_list) ######################################################################### ######################################################################### ######################################################################### # We can also use machinery like this to walk the AST and print a # representation. def emit_lit(ast, context): print ' ' * context.indent, repr(ast.data) def emit_word(ast, context): print ' ' * context.indent, '< %s >' % (ast.data,) def eval_seq(ast, context): context.indent += 3 print ' ' * context.indent, '/----\\' for item in ast.data: send(item, 'eval', context) print ' ' * context.indent, '\\____/' context.indent -= 3 print_context = send(context_vt, 'delegated') send(print_context, 'addMethod', 'literal', emit_lit) send(print_context, 'addMethod', 'symbol', emit_word) send(print_context, 'addMethod', 'list', eval_seq) print_context.indent = 0 ######################################################################### ######################################################################### ######################################################################### if __name__ == '__main__': from pprint import pprint send(eval_context, 'addMethod', 'multiply', lambda x, y: y * x) send(eval_context, 'addMethod', 'allocate', allocate) send(eval_context, 'addMethod', 'make_kind', make_kind) send(eval_context, 'addMethod', 'make_ast', make_ast) send(eval_context, 'addMethod', 'name_of_symbol_of', name_of_symbol_of) send(eval_context, 'addMethod', 'eval_ast', lambda ast: evaluate_list(ast, eval_context) ) body = comp(''' ( define SYMBOL ( make_kind 'symbol' ) ) ( define LITERAL ( make_kind 'literal' ) ) ( define LIST ( make_kind 'list' ) ) ( define symbol ( lambda (name) ( make_ast SYMBOL name ) ) ) ( define literal ( lambda (value) ( make_ast LITERAL value ) ) ) ( define list_ ( lambda (values) ( make_ast LIST values ) ) ) ( define it ( list_ ( ( symbol Larry ) ( literal 23 ) ( symbol Barry ) ))) ( eval_ast it ) ( eval_ast ( symbol Bob ) ) (define a 1)(define b Larry) (a b) (bill 23) (define area (lambda (r) (multiply 3.141592653 (multiply r r)))) ( area 23 nic ) ( 12 'neato' ) . ''', cola_machine) ast = eval(body) print body print pprint(ast) print for ast_ in ast: send(ast_, 'eval', print_context) print print print 'Evaluating...' ; print for ast_ in ast: send(ast_, 'eval', eval_context)
gpl-3.0
7,676,236,102,455,079,000
25.252252
86
0.545642
false
3.649343
false
false
false
kontron/python-ipmi
tests/msgs/test_hpm.py
1
1219
#!/usr/bin/env python from nose.tools import eq_ import pyipmi.msgs.hpm from pyipmi.msgs import encode_message from pyipmi.msgs import decode_message def test_uploadfirmwareblockreq_encode(): m = pyipmi.msgs.hpm.UploadFirmwareBlockReq() m.number = 1 m.data = [0, 1, 2, 3] data = encode_message(m) eq_(data, b'\x00\x01\x00\x01\x02\x03') def test_activatefirmwarereq_decode_valid_req(): m = pyipmi.msgs.hpm.ActivateFirmwareReq() decode_message(m, b'\x00\x01') eq_(m.picmg_identifier, 0) eq_(m.rollback_override_policy, 1) def test_activatefirmwarereq_encode_valid_req(): m = pyipmi.msgs.hpm.ActivateFirmwareReq() m.picmg_identifier = 0 m.rollback_override_policy = 0x1 data = encode_message(m) eq_(data, b'\x00\x01') def test_activatefirmwarereq_decode_valid_req_wo_optional(): m = pyipmi.msgs.hpm.ActivateFirmwareReq() decode_message(m, b'\x00') eq_(m.picmg_identifier, 0) eq_(m.rollback_override_policy, None) def test_activatefirmwarereq_encode_valid_req_wo_optional(): m = pyipmi.msgs.hpm.ActivateFirmwareReq() m.picmg_identifier = 0 m.rollback_override_policy = None data = encode_message(m) eq_(data, b'\x00')
lgpl-2.1
1,573,575,823,836,667,000
25.5
60
0.69073
false
2.841492
false
false
false
reubano/csv2ofx
setup.py
1
2830
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import sys from os import path as p import pkutils try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup, find_packages PARENT_DIR = p.abspath(p.dirname(__file__)) sys.dont_write_bytecode = True py2_requirements = set(pkutils.parse_requirements('py2-requirements.txt')) py3_requirements = sorted(pkutils.parse_requirements('requirements.txt')) dev_requirements = sorted(pkutils.parse_requirements('dev-requirements.txt')) readme = pkutils.read('README.md') module = pkutils.parse_module(p.join(PARENT_DIR, 'csv2ofx', '__init__.py')) license = module.__license__ version = module.__version__ project = module.__title__ description = module.__description__ user = 'reubano' # Conditional sdist dependencies: py2 = sys.version_info.major == 2 requirements = sorted(py2_requirements) if py2 else py3_requirements # Conditional bdist_wheel dependencies: py2_require = sorted(py2_requirements.difference(py3_requirements)) # Setup requirements setup_require = [r for r in dev_requirements if 'pkutils' in r] setup( name=project, version=version, description=description, long_description=readme, author=module.__author__, author_email=module.__email__, url=pkutils.get_url(project, user), download_url=pkutils.get_dl_url(project, user, version), packages=find_packages(exclude=['docs', 'tests']), include_package_data=True, package_data={ 'data': ['data/*'], 'helpers': ['helpers/*'], 'tests': ['tests/*'], 'docs': ['docs/*'], 'examples': ['examples/*'] }, install_requires=requirements, extras_require={'python_version<3.0': py2_require}, setup_requires=setup_require, test_suite='nose.collector', tests_require=dev_requirements, license=license, zip_safe=False, keywords=[project] + description.split(' '), classifiers=[ pkutils.LICENSES[license], pkutils.get_status(version), 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: Implementation :: PyPy', 'Environment :: Console', 'Topic :: Software Development :: Libraries :: Python Modules', 'Intended Audience :: Developers', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', ], platforms=['MacOS X', 'Windows', 'Linux'], scripts=[p.join('bin', 'csv2ofx')], )
mit
6,491,888,954,352,643,000
31.906977
77
0.658304
false
3.834688
false
false
false
ericholscher/cookiecutter
cookiecutter/utils.py
3
1314
#!/usr/bin/env python # -*- coding: utf-8 -*- """ cookiecutter.utils ------------------ Helper functions used throughout Cookiecutter. """ from __future__ import unicode_literals import errno import logging import os import sys import contextlib PY3 = sys.version > '3' if PY3: pass else: import codecs def make_sure_path_exists(path): """ Ensures that a directory exists. :param path: A directory path. """ logging.debug("Making sure path exists: {0}".format(path)) try: os.makedirs(path) except OSError as exception: if exception.errno != errno.EEXIST: return False return True def unicode_open(filename, *args, **kwargs): """ Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2. :param filename: Name of file to open. """ kwargs['encoding'] = "utf-8" if PY3: return open(filename, *args, **kwargs) return codecs.open(filename, *args, **kwargs) @contextlib.contextmanager def work_in(dirname=None): """ Context manager version of os.chdir. When exited, returns to the working directory prior to entering. """ curdir = os.getcwd() try: if dirname is not None: os.chdir(dirname) yield finally: os.chdir(curdir)
bsd-3-clause
5,354,811,305,813,673,000
19.53125
76
0.622527
false
3.74359
false
false
false
mantidproject/mantid
scripts/test/Muon/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_presenter_test.py
3
27966
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2021 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest from unittest import mock from mantid.api import FrameworkManager, FunctionFactory from Muon.GUI.Common.fitting_widgets.tf_asymmetry_fitting.tf_asymmetry_fitting_model import TFAsymmetryFittingModel from Muon.GUI.Common.fitting_widgets.tf_asymmetry_fitting.tf_asymmetry_fitting_presenter import \ TFAsymmetryFittingPresenter from Muon.GUI.Common.fitting_widgets.tf_asymmetry_fitting.tf_asymmetry_fitting_view import TFAsymmetryFittingView class TFAsymmetryFittingPresenterTest(unittest.TestCase): @classmethod def setUpClass(cls): FrameworkManager.Instance() def setUp(self): self.dataset_names = ["Name1", "Name2"] self.current_dataset_index = 0 self.start_x = 0.0 self.end_x = 15.0 self.fit_status = "success" self.chi_squared = 1.5 self.function_name = "FlatBackground" self.minimizer = "Levenberg-Marquardt" self.evaluation_type = "eval type" self.fit_to_raw = True self.plot_guess = True self.simultaneous_fitting_mode = True self.simultaneous_fit_by = "Group/Pair" self.simultaneous_fit_by_specifier = "fwd" self.global_parameters = ["A0"] self.fit_function = FunctionFactory.createFunction("FlatBackground") self.single_fit_functions = [self.fit_function.clone(), self.fit_function.clone()] self.tf_asymmetry_mode = True self.normalisation = 3.0 self.normalisation_error = 0.3 self._setup_mock_view() self._setup_mock_model() self._setup_presenter() self.mock_view_minimizer.assert_called_once_with() self.mock_view_evaluation_type.assert_called_once_with() self.mock_view_fit_to_raw.assert_called_once_with() self.mock_view_simultaneous_fit_by.assert_called_once_with() self.mock_view_simultaneous_fit_by_specifier.assert_called_once_with() self.mock_view_global_parameters.assert_called_once_with() self.mock_view_tf_asymmetry_mode.assert_called_once_with() self.mock_model_minimizer.assert_called_once_with(self.minimizer) self.mock_model_evaluation_type.assert_called_once_with(self.evaluation_type) self.mock_model_fit_to_raw.assert_called_once_with(self.fit_to_raw) self.mock_model_simultaneous_fit_by.assert_called_once_with(self.simultaneous_fit_by) self.mock_model_simultaneous_fit_by_specifier.assert_called_once_with(self.simultaneous_fit_by_specifier) self.mock_model_global_parameters.assert_called_once_with(self.global_parameters) self.mock_model_tf_asymmetry_mode.assert_called_once_with(self.tf_asymmetry_mode) self.assertEqual(self.view.set_slot_for_fit_generator_clicked.call_count, 1) self.assertEqual(self.view.set_slot_for_fit_button_clicked.call_count, 1) self.assertEqual(self.view.set_slot_for_undo_fit_clicked.call_count, 1) self.assertEqual(self.view.set_slot_for_plot_guess_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_fit_name_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_function_structure_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_function_parameter_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_start_x_updated.call_count, 1) self.assertEqual(self.view.set_slot_for_end_x_updated.call_count, 1) self.assertEqual(self.view.set_slot_for_minimizer_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_evaluation_type_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_use_raw_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_dataset_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_fitting_mode_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_simultaneous_fit_by_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_simultaneous_fit_by_specifier_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_fitting_type_changed.call_count, 1) self.assertEqual(self.view.set_slot_for_normalisation_changed.call_count, 1) def tearDown(self): self.presenter = None self.model = None self.view = None def test_that_handle_instrument_changed_will_turn_tf_asymmetry_mode_off(self): self.presenter.handle_instrument_changed() self.mock_view_tf_asymmetry_mode.assert_called_with(False) self.mock_model_tf_asymmetry_mode.assert_called_with(False) self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 2) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 2) def test_that_handle_pulse_type_changed_will_not_turn_tf_asymmetry_mode_off_if_it_contains_does_not_contain_DoublePulseEnabled(self): updated_variables = {"FirstVariable": True, "OtherVariable": False} self.presenter.handle_pulse_type_changed(updated_variables) # The call count is one because they were initialized in the constructor self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 1) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 1) def test_that_handle_ads_clear_or_remove_workspace_event_will_attempt_to_reset_all_the_data_and_enable_gui(self): self.presenter.update_and_reset_all_data = mock.Mock() self.presenter.handle_ads_clear_or_remove_workspace_event() self.presenter.update_and_reset_all_data.assert_called_with() self.presenter.enable_editing_notifier.notify_subscribers.assert_called_once_with() self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 1) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 1) def test_that_handle_ads_clear_or_remove_workspace_event_will_attempt_to_reset_all_the_data_and_enable_gui_with_no_datasets(self): self.mock_model_number_of_datasets = mock.PropertyMock(return_value=0) type(self.model).number_of_datasets = self.mock_model_number_of_datasets self.presenter.update_and_reset_all_data = mock.Mock() self.presenter.handle_ads_clear_or_remove_workspace_event() self.presenter.update_and_reset_all_data.assert_called_with() self.view.disable_view.assert_called_once_with() self.mock_view_tf_asymmetry_mode.assert_called_with(False) self.mock_model_tf_asymmetry_mode.assert_called_with(False) self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 2) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 2) def test_that_handle_new_data_loaded_will_attempt_to_reset_all_the_data_and_enable_the_gui(self): self.presenter.clear_undo_data = mock.Mock() self.presenter.update_and_reset_all_data = mock.Mock() self.presenter.handle_new_data_loaded() self.presenter.update_and_reset_all_data.assert_called_with() self.mock_view_plot_guess.assert_called_once_with(False) self.mock_model_plot_guess.assert_called_once_with(False) self.presenter.clear_undo_data.assert_called_with() self.presenter.enable_editing_notifier.notify_subscribers.assert_called_once_with() self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 1) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 1) def test_that_handle_new_data_loaded_will_disable_the_tab_if_no_data_is_loaded(self): self.mock_model_number_of_datasets = mock.PropertyMock(return_value=0) type(self.model).number_of_datasets = self.mock_model_number_of_datasets self.presenter.clear_undo_data = mock.Mock() self.presenter.update_and_reset_all_data = mock.Mock() self.presenter.handle_new_data_loaded() self.presenter.update_and_reset_all_data.assert_called_with() self.mock_view_plot_guess.assert_called_once_with(False) self.mock_model_plot_guess.assert_called_once_with(False) self.presenter.clear_undo_data.assert_called_with() self.view.disable_view.assert_called_once_with() self.mock_view_tf_asymmetry_mode.assert_called_with(False) self.mock_model_tf_asymmetry_mode.assert_called_with(False) self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 2) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 2) def test_that_handle_function_structure_changed_will_attempt_to_update_the_tf_asymmetry_functions(self): self.presenter.update_tf_asymmetry_functions_in_model_and_view = mock.Mock() self.presenter.handle_function_structure_changed() self.presenter.update_tf_asymmetry_functions_in_model_and_view.assert_called_once_with() def test_that_handle_dataset_name_changed_will_attempt_to_update_the_normalisation_displayed_in_the_view(self): self.presenter.handle_dataset_name_changed() self.model.current_normalisation.assert_called_with() self.model.current_normalisation_error.assert_called_with() self.view.set_normalisation.assert_called_with(self.normalisation, self.normalisation_error) def test_that_handle_tf_asymmetry_mode_changed_will_not_set_the_tf_asymmetry_mode_if_the_workspaces_do_not_comply(self): self.presenter._check_tf_asymmetry_compliance = mock.Mock(return_value=False) self.presenter.handle_tf_asymmetry_mode_changed(self.tf_asymmetry_mode) # The call count is one because they were initialized in the constructor self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 1) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 1) self.presenter._check_tf_asymmetry_compliance.assert_called_once_with(self.tf_asymmetry_mode) def test_that_handle_tf_asymmetry_mode_changed_will_set_the_tf_asymmetry_mode_if_the_workspaces_do_comply(self): self.presenter._check_tf_asymmetry_compliance = mock.Mock(return_value=True) self.presenter.update_tf_asymmetry_functions_in_model_and_view = mock.Mock() self.presenter.reset_start_xs_and_end_xs = mock.Mock() self.presenter.reset_fit_status_and_chi_squared_information = mock.Mock() self.presenter.clear_undo_data = mock.Mock() self.presenter.automatically_update_function_name = mock.Mock() self.presenter.handle_tf_asymmetry_mode_changed(self.tf_asymmetry_mode) # The call count is one because they were initialized in the constructor self.assertEqual(self.mock_view_tf_asymmetry_mode.call_count, 2) self.assertEqual(self.mock_model_tf_asymmetry_mode.call_count, 2) self.presenter._check_tf_asymmetry_compliance.assert_called_once_with(self.tf_asymmetry_mode) self.presenter.update_tf_asymmetry_functions_in_model_and_view.assert_called_once_with() self.presenter.reset_fit_status_and_chi_squared_information.assert_called_once_with() self.presenter.clear_undo_data.assert_called_once_with() self.presenter.automatically_update_function_name.assert_called_once_with() self.model.update_plot_guess(self.plot_guess) def test_that_handle_normalisation_changed_sets_the_normalisation_in_the_model_and_updates_the_guess(self): self.presenter.handle_normalisation_changed() self.mock_view_normalisation.assert_called_with() self.model.set_current_normalisation.assert_called_once_with(self.normalisation) self.model.update_plot_guess(self.plot_guess) def test_that_update_and_reset_all_data_will_attempt_to_update_the_tf_asymmetry_functions(self): self.presenter.update_tf_asymmetry_functions_in_model_and_view = mock.Mock() self.presenter.update_and_reset_all_data() self.presenter.update_tf_asymmetry_functions_in_model_and_view.assert_called_once_with() def test_that_update_tf_asymmetry_functions_in_model_and_view_shows_a_warning_if_it_fails_to_recalculate_the_functions(self): self.model.recalculate_tf_asymmetry_functions = mock.Mock(return_value=False) self.presenter.update_tf_asymmetry_functions_in_model_and_view() self.model.recalculate_tf_asymmetry_functions.assert_called_once_with() self.view.warning_popup.assert_called_once_with("Failed to convert fit function to a TF Asymmetry function.") self.model.current_normalisation.assert_called_with() self.view.set_normalisation.assert_called_with(self.normalisation, self.normalisation_error) def test_that_update_tf_asymmetry_functions_in_model_and_view_will_set_the_normalisation_in_the_view(self): self.model.recalculate_tf_asymmetry_functions = mock.Mock(return_value=True) self.presenter.update_tf_asymmetry_functions_in_model_and_view() self.model.recalculate_tf_asymmetry_functions.assert_called_once_with() self.model.current_normalisation.assert_called_with() self.view.set_normalisation.assert_called_with(self.normalisation, self.normalisation_error) def test_that_update_fit_function_in_model_will_update_the_simultaneous_fit_functions_when_in_simultaneous_mode(self): self.model.update_tf_asymmetry_simultaneous_fit_function = mock.Mock() self.mock_model_simultaneous_fitting_mode = mock.PropertyMock(return_value=True) type(self.model).simultaneous_fitting_mode = self.mock_model_simultaneous_fitting_mode self.presenter.update_fit_function_in_model(self.fit_function) self.model.update_tf_asymmetry_simultaneous_fit_function.assert_called_once_with(self.fit_function) def test_that_update_fit_function_in_model_will_update_the_current_fit_function_when_in_single_mode(self): self.model.update_tf_asymmetry_single_fit_function = mock.Mock() self.mock_model_simultaneous_fitting_mode = mock.PropertyMock(return_value=False) type(self.model).simultaneous_fitting_mode = self.mock_model_simultaneous_fitting_mode self.presenter.update_fit_function_in_model(self.fit_function) self.model.update_tf_asymmetry_single_fit_function.assert_called_once_with(self.model.current_dataset_index, self.fit_function) def test_that_handle_sequential_fit_finished_will_update_the_fit_functions_and_statuses_in_the_view_and_model(self): self.presenter.update_fit_function_in_view_from_model = mock.Mock() self.presenter.update_fit_statuses_and_chi_squared_in_view_from_model = mock.Mock() self.presenter.handle_sequential_fit_finished() self.presenter.update_fit_function_in_view_from_model.assert_called_once_with() self.presenter.update_fit_statuses_and_chi_squared_in_view_from_model.assert_called_once_with() def _setup_mock_view(self): self.view = mock.Mock(spec=TFAsymmetryFittingView) # Mock the properties of the view self.mock_view_current_dataset_index = mock.PropertyMock(return_value=self.current_dataset_index) type(self.view).current_dataset_index = self.mock_view_current_dataset_index self.mock_view_current_dataset_name = mock.PropertyMock(return_value= self.dataset_names[self.current_dataset_index]) type(self.view).current_dataset_name = self.mock_view_current_dataset_name self.mock_view_simultaneous_fit_by = mock.PropertyMock(return_value=self.simultaneous_fit_by) type(self.view).simultaneous_fit_by = self.mock_view_simultaneous_fit_by self.mock_view_simultaneous_fit_by_specifier = mock.PropertyMock(return_value= self.simultaneous_fit_by_specifier) type(self.view).simultaneous_fit_by_specifier = self.mock_view_simultaneous_fit_by_specifier self.mock_view_global_parameters = mock.PropertyMock(return_value=self.global_parameters) type(self.view).global_parameters = self.mock_view_global_parameters self.mock_view_minimizer = mock.PropertyMock(return_value=self.minimizer) type(self.view).minimizer = self.mock_view_minimizer self.mock_view_evaluation_type = mock.PropertyMock(return_value=self.evaluation_type) type(self.view).evaluation_type = self.mock_view_evaluation_type self.mock_view_fit_to_raw = mock.PropertyMock(return_value=self.fit_to_raw) type(self.view).fit_to_raw = self.mock_view_fit_to_raw self.mock_view_fit_object = mock.PropertyMock(return_value=self.fit_function) type(self.view).fit_object = self.mock_view_fit_object self.mock_view_start_x = mock.PropertyMock(return_value=self.start_x) type(self.view).start_x = self.mock_view_start_x self.mock_view_end_x = mock.PropertyMock(return_value=self.end_x) type(self.view).end_x = self.mock_view_end_x self.mock_view_plot_guess = mock.PropertyMock(return_value=self.plot_guess) type(self.view).plot_guess = self.mock_view_plot_guess self.mock_view_function_name = mock.PropertyMock(return_value=self.function_name) type(self.view).function_name = self.mock_view_function_name self.mock_view_simultaneous_fitting_mode = mock.PropertyMock(return_value=self.simultaneous_fitting_mode) type(self.view).simultaneous_fitting_mode = self.mock_view_simultaneous_fitting_mode self.mock_view_tf_asymmetry_mode = mock.PropertyMock(return_value=self.tf_asymmetry_mode) type(self.view).tf_asymmetry_mode = self.mock_view_tf_asymmetry_mode self.mock_view_normalisation = mock.PropertyMock(return_value=self.normalisation) type(self.view).normalisation = self.mock_view_normalisation # Mock the methods of the view self.view.set_slot_for_fit_generator_clicked = mock.Mock() self.view.set_slot_for_fit_button_clicked = mock.Mock() self.view.set_slot_for_undo_fit_clicked = mock.Mock() self.view.set_slot_for_plot_guess_changed = mock.Mock() self.view.set_slot_for_fit_name_changed = mock.Mock() self.view.set_slot_for_function_structure_changed = mock.Mock() self.view.set_slot_for_function_parameter_changed = mock.Mock() self.view.set_slot_for_start_x_updated = mock.Mock() self.view.set_slot_for_end_x_updated = mock.Mock() self.view.set_slot_for_minimizer_changed = mock.Mock() self.view.set_slot_for_evaluation_type_changed = mock.Mock() self.view.set_slot_for_use_raw_changed = mock.Mock() self.view.set_slot_for_dataset_changed = mock.Mock() self.view.set_slot_for_fitting_mode_changed = mock.Mock() self.view.set_slot_for_simultaneous_fit_by_changed = mock.Mock() self.view.set_slot_for_simultaneous_fit_by_specifier_changed = mock.Mock() self.view.set_slot_for_fitting_type_changed = mock.Mock() self.view.set_slot_for_normalisation_changed = mock.Mock() self.view.set_datasets_in_function_browser = mock.Mock() self.view.set_current_dataset_index = mock.Mock() self.view.update_local_fit_status_and_chi_squared = mock.Mock() self.view.update_global_fit_status = mock.Mock() self.view.update_fit_function = mock.Mock() self.view.enable_undo_fit = mock.Mock() self.view.number_of_datasets = mock.Mock(return_value=len(self.dataset_names)) self.view.warning_popup = mock.Mock() self.view.get_global_parameters = mock.Mock(return_value=[]) self.view.switch_to_simultaneous = mock.Mock() self.view.switch_to_single = mock.Mock() self.view.set_normalisation = mock.Mock() def _setup_mock_model(self): self.model = mock.Mock(spec=TFAsymmetryFittingModel) # Mock the properties of the model self.mock_model_current_dataset_index = mock.PropertyMock(return_value=self.current_dataset_index) type(self.model).current_dataset_index = self.mock_model_current_dataset_index self.mock_model_dataset_names = mock.PropertyMock(return_value=self.dataset_names) type(self.model).dataset_names = self.mock_model_dataset_names self.mock_model_current_dataset_name = mock.PropertyMock(return_value= self.dataset_names[self.current_dataset_index]) type(self.model).current_dataset_name = self.mock_model_current_dataset_name self.mock_model_number_of_datasets = mock.PropertyMock(return_value=len(self.dataset_names)) type(self.model).number_of_datasets = self.mock_model_number_of_datasets self.mock_model_start_xs = mock.PropertyMock(return_value=[self.start_x] * len(self.dataset_names)) type(self.model).start_xs = self.mock_model_start_xs self.mock_model_current_start_x = mock.PropertyMock(return_value=self.start_x) type(self.model).current_start_x = self.mock_model_current_start_x self.mock_model_end_xs = mock.PropertyMock(return_value=[self.end_x] * len(self.dataset_names)) type(self.model).end_xs = self.mock_model_end_xs self.mock_model_current_end_x = mock.PropertyMock(return_value=self.end_x) type(self.model).current_end_x = self.mock_model_current_end_x self.mock_model_plot_guess = mock.PropertyMock(return_value=self.plot_guess) type(self.model).plot_guess = self.mock_model_plot_guess self.mock_model_minimizer = mock.PropertyMock(return_value=self.minimizer) type(self.model).minimizer = self.mock_model_minimizer self.mock_model_evaluation_type = mock.PropertyMock(return_value=self.evaluation_type) type(self.model).evaluation_type = self.mock_model_evaluation_type self.mock_model_fit_to_raw = mock.PropertyMock(return_value=self.fit_to_raw) type(self.model).fit_to_raw = self.mock_model_fit_to_raw self.mock_model_simultaneous_fit_by = mock.PropertyMock(return_value=self.simultaneous_fit_by) type(self.model).simultaneous_fit_by = self.mock_model_simultaneous_fit_by self.mock_model_simultaneous_fit_by_specifier = mock.PropertyMock(return_value= self.simultaneous_fit_by_specifier) type(self.model).simultaneous_fit_by_specifier = self.mock_model_simultaneous_fit_by_specifier self.mock_model_global_parameters = mock.PropertyMock(return_value=self.global_parameters) type(self.model).global_parameters = self.mock_model_global_parameters self.mock_model_single_fit_functions = mock.PropertyMock(return_value=self.single_fit_functions) type(self.model).single_fit_functions = self.mock_model_single_fit_functions self.mock_model_current_single_fit_function = mock.PropertyMock(return_value=self.fit_function) type(self.model).current_single_fit_function = self.mock_model_current_single_fit_function self.mock_model_single_fit_functions_cache = mock.PropertyMock(return_value=self.fit_function) type(self.model).single_fit_functions_cache = self.mock_model_single_fit_functions_cache self.mock_model_simultaneous_fit_function = mock.PropertyMock(return_value=self.fit_function) type(self.model).simultaneous_fit_function = self.mock_model_simultaneous_fit_function self.mock_model_fit_statuses = mock.PropertyMock(return_value=[self.fit_status] * len(self.dataset_names)) type(self.model).fit_statuses = self.mock_model_fit_statuses self.mock_model_current_fit_status = mock.PropertyMock(return_value=self.fit_status) type(self.model).current_fit_status = self.mock_model_current_fit_status self.mock_model_chi_squared = mock.PropertyMock(return_value=[self.chi_squared] * len(self.dataset_names)) type(self.model).chi_squared = self.mock_model_chi_squared self.mock_model_current_chi_squared = mock.PropertyMock(return_value=self.chi_squared) type(self.model).current_chi_squared = self.mock_model_current_chi_squared self.mock_model_function_name = mock.PropertyMock(return_value=self.function_name) type(self.model).function_name = self.mock_model_function_name self.mock_model_function_name_auto_update = mock.PropertyMock(return_value=True) type(self.model).function_name_auto_update = self.mock_model_function_name_auto_update self.mock_model_simultaneous_fitting_mode = mock.PropertyMock(return_value=self.simultaneous_fitting_mode) type(self.model).simultaneous_fitting_mode = self.mock_model_simultaneous_fitting_mode self.mock_model_global_parameters = mock.PropertyMock(return_value=[]) type(self.model).global_parameters = self.mock_model_global_parameters self.mock_model_do_rebin = mock.PropertyMock(return_value=False) type(self.model).do_rebin = self.mock_model_do_rebin self.mock_model_tf_asymmetry_mode = mock.PropertyMock(return_value=self.tf_asymmetry_mode) type(self.model).tf_asymmetry_mode = self.mock_model_tf_asymmetry_mode # Mock the context self.model.context = mock.Mock() # Mock the methods of the model self.model.clear_single_fit_functions = mock.Mock() self.model.get_single_fit_function_for = mock.Mock(return_value=self.fit_function) self.model.cache_the_current_fit_functions = mock.Mock() self.model.clear_undo_data = mock.Mock() self.model.automatically_update_function_name = mock.Mock() self.model.save_current_fit_function_to_undo_data = mock.Mock() self.model.update_plot_guess = mock.Mock() self.model.remove_all_fits_from_context = mock.Mock() self.model.reset_current_dataset_index = mock.Mock() self.model.reset_start_xs_and_end_xs = mock.Mock() self.model.reset_fit_statuses_and_chi_squared = mock.Mock() self.model.reset_fit_functions = mock.Mock() self.model.x_limits_of_workspace = mock.Mock(return_value=(self.start_x, self.end_x)) self.model.retrieve_first_good_data_from_run = mock.Mock(return_value=self.start_x) self.model.get_active_fit_function = mock.Mock(return_value=self.fit_function) self.model.get_active_workspace_names = mock.Mock(return_value=[self.dataset_names[self.current_dataset_index]]) self.model.get_active_fit_results = mock.Mock(return_value=[]) self.model.get_workspace_names_to_display_from_context = mock.Mock(return_value=self.dataset_names) self.model.perform_fit = mock.Mock(return_value=(self.fit_function, self.fit_status, self.chi_squared)) self.model.current_normalisation = mock.Mock(return_value=self.normalisation) self.model.set_current_normalisation = mock.Mock() self.model.current_normalisation_error = mock.Mock(return_value=self.normalisation_error) def _setup_presenter(self): self.presenter = TFAsymmetryFittingPresenter(self.view, self.model) # Mock unimplemented methods and notifiers self.presenter.disable_editing_notifier.notify_subscribers = mock.Mock() self.presenter.enable_editing_notifier.notify_subscribers = mock.Mock() self.presenter.selected_fit_results_changed.notify_subscribers = mock.Mock() self.presenter.fit_function_changed_notifier.notify_subscribers = mock.Mock() self.presenter.fit_parameter_changed_notifier.notify_subscribers = mock.Mock() self.presenter.fitting_mode_changed_notifier.notify_subscribers = mock.Mock() if __name__ == '__main__': unittest.main()
gpl-3.0
-5,286,299,378,675,061,000
61.008869
137
0.709862
false
3.328889
true
false
false
theno/ctutlz
ctutlz/tls/sctlist.py
1
1759
import collections from utlz import flo from utlz import StructContext _SctListEntry = collections.namedtuple( typename='SctListEntry', field_names=[ 'sct_len', 'sct_der', ] ) _TlsExtension18 = collections.namedtuple( typename='TlsExtension18', field_names=[ 'tls_extension_type', 'tls_extension_len', 'signed_certificate_timestamp_list_len', 'sct_list', ] ) def TlsExtension18(extension_18_tdf): with StructContext(extension_18_tdf) as struct: data_dict = { 'tls_extension_type': struct.read('!H'), 'tls_extension_len': struct.read('!H'), 'signed_certificate_timestamp_list_len': struct.read('!H'), } sct_list = [] while struct.offset < struct.length: sct_len = struct.read('!H') sct_der = struct.read(flo('!{sct_len}s')) sct_list.append(_SctListEntry(sct_len, sct_der)) return _TlsExtension18(sct_list=sct_list, **data_dict) _SignedCertificateTimestampList = collections.namedtuple( typename='SignedCertificateTimestampList', field_names=[ 'signed_certificate_timestamp_list_len', 'sct_list', ] ) def SignedCertificateTimestampList(sctlist): with StructContext(sctlist) as struct: data_dict = { 'signed_certificate_timestamp_list_len': struct.read('!H'), } sct_list = [] while struct.offset < struct.length: sct_len = struct.read('!H') sct_der = struct.read(flo('!{sct_len}s')) sct_list.append(_SctListEntry(sct_len, sct_der)) return _SignedCertificateTimestampList(sct_list=sct_list, **data_dict)
mit
7,337,246,304,160,839,000
27.370968
78
0.593519
false
3.539235
false
false
false
all-of-us/raw-data-repository
rdr_service/alembic/versions/2e1d3f329efd_add_column_report_consent_removal_date_.py
1
2206
"""add column report_consent_removal_date to genomic_set_member Revision ID: 2e1d3f329efd Revises: 1ea7864c251e Create Date: 2020-09-25 15:51:21.977008 """ from alembic import op import sqlalchemy as sa import rdr_service.model.utils from rdr_service.participant_enums import PhysicalMeasurementsStatus, QuestionnaireStatus, OrderStatus from rdr_service.participant_enums import WithdrawalStatus, WithdrawalReason, SuspensionStatus, QuestionnaireDefinitionStatus from rdr_service.participant_enums import EnrollmentStatus, Race, SampleStatus, OrganizationType, BiobankOrderStatus from rdr_service.participant_enums import OrderShipmentTrackingStatus, OrderShipmentStatus from rdr_service.participant_enums import MetricSetType, MetricsKey, GenderIdentity from rdr_service.model.base import add_table_history_table, drop_table_history_table from rdr_service.model.code import CodeType from rdr_service.model.site_enums import SiteStatus, EnrollingStatus, DigitalSchedulingStatus, ObsoleteStatus # revision identifiers, used by Alembic. revision = '2e1d3f329efd' down_revision = '1ea7864c251e' branch_labels = None depends_on = None def upgrade(engine_name): globals()["upgrade_%s" % engine_name]() def downgrade(engine_name): globals()["downgrade_%s" % engine_name]() def upgrade_rdr(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('genomic_set_member', sa.Column('report_consent_removal_date', sa.DateTime(), nullable=True)) op.add_column('genomic_set_member_history', sa.Column('report_consent_removal_date', sa.DateTime(), nullable=True)) # ### end Alembic commands ### def downgrade_rdr(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('genomic_set_member', 'report_consent_removal_date') op.drop_column('genomic_set_member_history', 'report_consent_removal_date') # ### end Alembic commands ### def upgrade_metrics(): # ### commands auto generated by Alembic - please adjust! ### pass # ### end Alembic commands ### def downgrade_metrics(): # ### commands auto generated by Alembic - please adjust! ### pass # ### end Alembic commands ###
bsd-3-clause
7,623,654,461,418,293,000
34.015873
125
0.744787
false
3.523962
false
false
false
atlassian/asap-authentication-python
atlassian_jwt_auth/contrib/tests/aiohttp/test_verifier.py
1
1491
import asyncio from asynctest import TestCase, CoroutineMock from atlassian_jwt_auth.contrib.aiohttp import ( JWTAuthVerifier, HTTPSPublicKeyRetriever) from atlassian_jwt_auth.tests import utils, test_verifier class SyncJWTAuthVerifier(JWTAuthVerifier): def __init__(self, *args, loop=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() self.loop = loop super().__init__(*args, **kwargs) def verify_jwt(self, *args, **kwargs): return self.loop.run_until_complete( super().verify_jwt(*args, **kwargs) ) class JWTAuthVerifierTestMixin(test_verifier.BaseJWTAuthVerifierTest): loop = None def _setup_mock_public_key_retriever(self, pub_key_pem): m_public_key_ret = CoroutineMock(spec=HTTPSPublicKeyRetriever) m_public_key_ret.retrieve.return_value = pub_key_pem.decode() return m_public_key_ret def _setup_jwt_auth_verifier(self, pub_key_pem, **kwargs): m_public_key_ret = self._setup_mock_public_key_retriever(pub_key_pem) return SyncJWTAuthVerifier(m_public_key_ret, loop=self.loop, **kwargs) class JWTAuthVerifierRS256Test( utils.RS256KeyTestMixin, JWTAuthVerifierTestMixin, TestCase): """Tests for aiohttp.JWTAuthVerifier class for RS256 algorithm""" class JWTAuthVerifierES256Test( utils.ES256KeyTestMixin, JWTAuthVerifierTestMixin, TestCase): """Tests for aiohttp.JWTAuthVerifier class for ES256 algorithm"""
mit
2,959,869,761,312,623,000
32.886364
78
0.702884
false
3.365688
true
false
false
iurisilvio/Flask-SQLAlchemy-Cache
flask_sqlalchemy_cache/tests/test_minimal.py
1
1988
# coding: UTF-8 import unittest from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy, Model from flask.ext.cache import Cache from flask.ext.sqlalchemy_cache import CachingQuery, FromCache Model.query_class = CachingQuery db = SQLAlchemy() cache = Cache() class Country(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), nullable=False) def __init__(self, name): self.name = name def __repr__(self): return self.name def create_app(): app = Flask(__name__) app.config['CACHE_TYPE'] = 'simple' db.init_app(app) cache.init_app(app) return app class TestFromCache(unittest.TestCase): def setUp(self): self.app = create_app() self.ctx = self.app.app_context() self.ctx.push() db.create_all() db.session.add(Country(name='Brazil')) db.session.commit() def tearDown(self): db.session.remove() db.drop_all() self.ctx.pop() def test_cache_hit(self): q = Country.query.order_by(Country.name.desc()) caching_q = q.options(FromCache(cache)) # cache miss country = caching_q.first() self.assertEqual('Brazil', country.name) # add another record c = Country(name='Germany') db.session.add(c) db.session.commit() # no cache used self.assertEqual('Germany', q.first().name) # cache hit self.assertEqual('Brazil', caching_q.first().name) def test_no_results(self): # regression test (check #3) to handle zero results gracefully Country.query.filter_by(name="URSS").options(FromCache(cache)).all() def test_special_chars(self): unicode_name = u"Côte d'Ivoire" unicode_country = Country(unicode_name) db.session.add(unicode_country) db.session.commit() Country.query.filter_by(name=unicode_name).options(FromCache(cache)).all()
mit
-6,000,290,008,303,468,000
24.805195
82
0.623553
false
3.548214
true
false
false
skachuck/giapy
giapy/command_line.py
1
8412
from argparse import ArgumentParser, FileType import sys, os import numpy as np from giapy.earth_tools.elasticlove import compute_love_numbers, hLK_asymptotic from giapy.earth_tools.viscellove import compute_viscel_numbers from giapy.earth_tools.earthParams import EarthParams def ellove(): """useage: giapy-ellove [-h] [--lstart LSTART] [--params PARAMS] [--nlayers NLAYERS] lmax [outfile] Compute the elastic surface load love numbers positional arguments: lmax maximum order number to compute outfile file to save out optional arguments: -h, --help show this help message and exit --lstart LSTART starting order number (default: 1). Cannot be less than 1. --params PARAMS material parameter table --nlayers NLAYERS number of layers (default: 100) --incomp flag for incompressibility (default: False) --conv [CONV] perform convergence check for asymptotic love number at supplied (very large) l (if flag present, defaults to l=50000). """ # Read the command line arguments. parser = ArgumentParser(description='Compute the elastic surface load love numbers') parser.add_argument('-l', '--lstart', type=int, default=1, help='starting order number (default: %(default)s). Cannot be less than 1.') parser.add_argument('lmax', type=int, help='maximum order number to compute') parser.add_argument('--params', default=None, help="""material parameter table with columns: r (km) density (kg/m^3) bulk mod (GPa) shear mod (GPa) g (m/2^2) (default: PREM)""") parser.add_argument('-n', '--nlayers', type=int, default=100, help='number of layers (default: %(default)s)') parser.add_argument('outfile', nargs='?', type=FileType('w'), default=sys.stdout, help='file to save out') parser.add_argument('--conv', nargs='?', const=50000, default=False, help='''perform convergence check for asymptotic love number at supplied (very large) l (if present, defaults to l=50000)''') parser.add_argument('--incomp', default=False, action='store_const', const=True, help='impose incompressibility') args = parser.parse_args() # Set up the order number range assert args.lstart >= 1, 'lstart must be 1 or greater.' ls = range(args.lstart, args.lmax+1) # Load prem if no paramname given if args.params is None: paramname = 'prem' else: paramname = os.path.abspath(args.params) # Load the parameters with no crust for viscoelastic response params = EarthParams(model=paramname) # If convergence check requested, append to ls. if args.conv: ls = np.r_[ls, args.conv] zarray = np.linspace(params.rCore, 1., args.nlayers) # Compute the love numbers. hLks = compute_love_numbers(ls, zarray, params, err=1e-14, Q=2, it_counts=False, comp=not args.incomp, scaled=True) if args.conv: hLk_conv = hLks[:,-1] hLk_conv[-1] = args.conv*(1+hLk_conv[-1]) hLks = hLks[:,:-1] # Write them out. fmt = '{0:'+'{0:.0f}'.format(1+np.floor(np.log10(args.lmax)))+'d}\t{1}\t{2}\t{3}\n' # Write out header args.outfile.write("n\th'\tl'\tk'\n") for l, hLk in zip(ls, hLks.T): args.outfile.write(fmt.format(l, hLk[0], hLk[1]/l, -(1+hLk[2]))) if args.conv: hLk_inf = np.array(hLK_asymptotic(params)) errs = np.abs(hLk_conv - hLk_inf)/np.abs(hLk_inf) sys.stdout.write('''Difference of computed love numbers at {} from analytic value (if too large, consider increasing layers with '--nlayers'):\n'''.format(args.conv)) for tag, err in zip('hLK', errs): sys.stdout.write('\t{} : {:.2f}%\n'.format(tag, err*100)) def velove(): """useage: giapy-velove [-h] [--lstart LSTART] [--params PARAMS] [--nlayers NLAYERS] lmax [outfile] Compute the viscoelastic surface load Love numbers. positional arguments: lmax maximum order number to compute outfile file to save out (default: stdout) optional arguments: -h, --help show this help message and exit --lstart LSTART starting order number (default: 1). Cannot be less than 1. --params PARAMS material parameter table -n, --nlayers NLAYERS number of layers (default: 100) --incomp flag for incompressibility (default: False) -D, --lith LITH flexural rigidity of lith (1e23 N m), overwrite params """ # Read the command line arguments. parser = ArgumentParser(description='Compute the viscoelastic surface load Love numbers') parser.add_argument('-l', '--lstart', type=int, default=1, help='starting order number (default: %(default)s). Cannot be less than 1.') parser.add_argument('lmax', type=int, help='maximum order number to compute') parser.add_argument('--params', default=None, help="""material parameter table with columns: r (km) density (kg/m^3) bulk mod (GPa) shear mod (GPa) g (m/2^2) viscosity (1e21 Pa s) (default: PREM)""") parser.add_argument('-n', '--nlayers', type=int, default=1000, help='number of layers (default: %(default)s)') parser.add_argument('--incomp', default=False, action='store_const', const=True, help='impose incompressibility') parser.add_argument('--lith', '-D', type=float, default=-1, dest='lith', help='''The flexural rigidity of the lithosphere, in units of 1e23 N m (overrides parameter table, if set)''') parser.add_argument('outfile', nargs='?', type=FileType('w'), default=sys.stdout, help='file to save out') args = parser.parse_args() # Set up the order number range assert args.lstart >= 1, 'lstart must be 1 or greater.' ls = range(args.lstart, args.lmax+1) # Load prem if no paramname given if args.params is None: paramname = 'prem' else: paramname = os.path.abspath(args.params) # Load the parameters with no crust for viscoelastic response params = EarthParams(model=paramname+'_nocrust') # Check for lithospheric override if args.lith < 0: params.addLithosphere(D=args.lith) zarray = np.linspace(params.rCore, 1., args.nlayers) times = np.logspace(-4,np.log10(250),30) # Compute the viscoelastic Love numbers. hLkf = compute_viscel_numbers(ls, times, zarray, params, comp=not args.incomp, scaled=True) if len(ls)==1: hLkf = hLkf[None,...] # Load the parameters with crust for elastic response. params_crust = EarthParams(model=paramname) # Compute the elastic response for lithosphere correction. hLke = compute_love_numbers(ls, zarray, params_crust, err=1e-14, Q=2, it_counts=False, comp=not args.incomp, scaled=True).T # Incorporate the lithosphere correction. a = (1. - 1./ params.getLithFilter(n=np.asarray(ls))) hLkf += a[:,None,None]*hLke[:,:,None] # Add t=0 elastic response hLkf = np.dstack([hLke[:,:,None], hLkf]) # Convert to k' (incorporates self-gravitation of load) hLkf[:,2,:] = -1 - hLkf[:,2,:] # Write them out. fmt = '{0}\t{1}\t{2}\t{3}\n' # Write out header args.outfile.write("# Viscoelastic Love numbers computed in giapy. Formatted:\n") args.outfile.write("# l\n") args.outfile.write("# t\th'\tl'\tk'\n") for l, hLkl in zip(ls, hLkf): args.outfile.write('# l={}\n'.format(l)) for t, hLk in zip(np.r_[0,times], hLkl.T): args.outfile.write(fmt.format(t, hLk[0], hLk[1]/l, -(1+hLk[2])))
mit
-6,094,939,335,375,212,000
43.273684
100
0.580837
false
3.618065
false
false
false
rafidka/hadithhouse
hadiths/auth.py
2
2532
""" Contains Facebook-based authentication classes. These classes automatically reads the tokens in the request and authenticate a Facebook user. """ from django.contrib.auth.models import User from rest_framework.authentication import BaseAuthentication from rest_framework.exceptions import NotAuthenticated from hadiths import fbapi from hadiths.models import FbUser # The classes below are used in settings.py class FacebookAuthentication(BaseAuthentication): """ Authenticate requests having Facebook authentication token. """ def authenticate(self, request): """ Try to authenticate the user depending on Facebook tokens. If they are not available or invalid, the user returned is None. :param request: The request being made by the user. :return: The user or None. """ if request.method == 'GET' and request.path != '/apis/users/current': # We don't authenticate GET requests since our data are open to # everyone. An exception to that is when we need to get the # current user. return None if 'fb_token' not in request.query_params: return None fb_token = request.query_params['fb_token'] fb_user_info = fbapi.get_current_user(fb_token) if fb_user_info is None: raise NotAuthenticated('Invalid Facebook access token.') fb_id = fb_user_info['id'] try: fb_user = FbUser.objects.get(fb_id=fb_id) except FbUser.DoesNotExist: return None return fb_user.user, None class FacebookOfflineAuthentication(BaseAuthentication): """ Like FacebookAuthentication, but can be used when the developer doesn't have an internet connection. Obviously, it is fixed to return a certain user. """ def authenticate(self, request): """ Try to authenticate the user depending on Facebook tokens. If they are not available or invalid, the user returned is None. :param request: The request being made by the user. :return: The user or None. """ if request.method == 'GET' and request.path != '/apis/users/current': # We don't authenticate GET requests since our data are open to # everyone. An exception to that is when we need to get the # current user. return None if 'fb_token' not in request.query_params: return None return User.objects.first(), None
mit
3,294,355,897,632,497,000
35.695652
80
0.656793
false
4.529517
false
false
false
cuttlefishh/emp
legacy/code/emp/alpha_diversity_by_sample_type.py
1
3390
#!/usr/bin/env python from __future__ import division __author__ = "Jai Ram Rideout" __copyright__ = "Copyright 2012, The QIIME project" __credits__ = ["Jai Ram Rideout", "Greg Caporaso"] __license__ = "GPL" __version__ = "1.5.0-dev" __maintainer__ = "Jai Ram Rideout" __email__ = "jai.rideout@gmail.com" __status__ = "Development" """Contains functions used in the alpha_diversity_by_sample_type.py script.""" from collections import defaultdict from operator import itemgetter from os import makedirs from os.path import join from tempfile import NamedTemporaryFile from numpy import median from pylab import savefig, tight_layout from biom.parse import parse_biom_table from cogent import DNA, LoadSeqs from cogent.app.blast import blast_seqs, Blastall from cogent.app.formatdb import build_blast_db_from_fasta_path from cogent.parse.blast import BlastResult from cogent.parse.fasta import MinimalFastaParser from cogent.util.misc import remove_files from qiime.parse import parse_mapping_file_to_dict from qiime.pycogent_backports.distribution_plots import generate_box_plots from qiime.util import (parse_command_line_parameters, get_options_lookup, make_option) def alpha_diversity_by_sample_type(adiv_fs, mapping_f, mapping_category='Sample_Type', min_num_samples=11, category_values_to_exclude=None): """Will exclude 'NA' category value by default if this parameter is not provided""" if category_values_to_exclude is None: category_values_to_exclude = ['NA'] mapping_dict, mapping_comments = parse_mapping_file_to_dict(mapping_f) sample_type_map = {} #sample_type_counts = defaultdict(int) for samp_id in mapping_dict: sample_type_map[samp_id] = mapping_dict[samp_id][mapping_category] #sample_type_counts[sample_type_map[samp_id]] += 1 sample_type_to_adiv = defaultdict(list) for adiv_f in adiv_fs: adiv_data = [line.strip().split('\t') for line in adiv_f if line.strip()][1:] for samp_id, adiv in adiv_data: try: sample_type = sample_type_map[samp_id] except KeyError: sample_type = 'Unknown' # TODO do we need to normalize this? how? #adiv = float(adiv) / sample_type_counts[sample_type] adiv = float(adiv) sample_type_to_adiv[sample_type].append(adiv) plotting_data = [(median(v), '%s (n=%d)' % (k, len(v)), v) for k, v in sample_type_to_adiv.items() if k != 'Unknown' and k not in category_values_to_exclude and len(v) >= min_num_samples] plotting_data.sort() plot_fig = generate_box_plots([dist[2] for dist in plotting_data], x_tick_labels=[dist[1] for dist in plotting_data], x_label=mapping_category, y_label='Alpha Diversity', title='Alpha Diversity by %s' % mapping_category) plot_fig.set_size_inches(12, 12) try: plot_fig.tight_layout() except ValueError: print "tight_layout() failed. Try making the plot figure larger " + \ "with Figure.set_size_inches(). The labels will be cut off " + \ "otherwise." return plotting_data, plot_fig
bsd-3-clause
4,353,328,139,314,533,400
37.965517
78
0.629499
false
3.512953
false
false
false
Exploit-install/Veil-Pillage
lib/http.py
4
2095
""" HTTP-related methods. Includes: RequestHandler() - a customized handler to serve out /tmp/pillage/ VeilHTTPServer() - a small webserver for Veil that can run HTTP or HTTPS """ import BaseHTTPServer, threading, ssl, os from SimpleHTTPServer import SimpleHTTPRequestHandler # Prepend /tmp/pillage/ to any served file path- not the best way # to do this (i.e. nesting) but it's quick and easy and all we need # to host out of the directory we want class RequestHandler(SimpleHTTPRequestHandler): def translate_path(self, path): return "/tmp/pillage/" + path class VeilHTTPServer(threading.Thread): """ Version of a simple HTTP[S] Server with specifiable port and SSL cert. Defaults to HTTP is no cert is specified. Uses RequestHandler to serve a custom directory. """ def __init__(self, port=80, cert=''): threading.Thread.__init__(self) # remove the temp directory, recreate it and build a blank index.html cleanCmd = "rm -rf /tmp/pillage/ && mkdir /tmp/pillage/ && touch /tmp/pillage/index.html" os.system(cleanCmd) self.server = BaseHTTPServer.HTTPServer(('0.0.0.0', port), RequestHandler) self.serverType = "HTTP" # wrap it all up in SSL if a cert is specified if cert != "": self.serverType = "HTTPS" self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True) def run(self): print "\n [*] Setting up "+self.serverType+" server..." try: self.server.serve_forever() except: pass def shutdown(self): print "\n [*] Killing "+self.serverType+" server..." # shut down the server/socket self.server.shutdown() self.server.socket.close() self.server.server_close() self._Thread__stop() # make sure all the threads are killed for thread in threading.enumerate(): if thread.isAlive(): try: thread._Thread__stop() except: pass
gpl-3.0
-5,674,370,111,604,362,000
28.097222
101
0.621957
false
4.075875
false
false
false
vvinuv/HaloModel
halowlsz/pressure_profiles.py
1
11370
import os, sys import numpy as np from numba import jit import pylab as pl from CosmologyFunctions import CosmologyFunctions from convert_NFW_RadMass import MfracToMvir, MvirToMRfrac @jit(nopython=True) def battaglia_profile_2d(x, y, Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h, P01, P02, P03, xc1, xc2, xc3, beta1, beta2, beta3): ''' Using Battaglia et al (2012). Eq. 10. M200 in solar mass and R200 in Mpc x = r/Rs where r and Rs in angular diameter distance Retrun: Pressure profile in keV/cm^3 at radius r in angular comoving distance This result is confirmed by using Adam's code ''' #Rs & R200 are in the physical distance, i.e. angular comoving distance x = np.sqrt(x**2. + y**2) r = x * Rs x = r / R200 msolar = 1.9889e30 #kg mpc2cm = 3.0856e24 #cm G = 4.3e-9 #Mpc Mo^-1 (km/s)^2 alpha = 1.0 gamma = -0.3 P200 = 200. * rho_critical * omega_b0 * G * M200 / omega_m0 / 2. / R200 #Msun km^2 / Mpc^3 / s^2 #Delta=200 P0 = P01 * ((M200 / 1e14)**P02 * (1. + z)**P03) xc = xc1 * ((M200 / 1e14)**xc2 * (1. + z)**xc3) beta = beta1 * ((M200 / 1e14)**beta2 * (1. + z)**beta3) #Delta=500 #P0 = 7.49 * ((M200 / 1e14)**0.226 * (1. + z)**-0.957) #xc = 0.710 * ((M200 / 1e14)**-0.0833 * (1. + z)**0.853) #beta = 4.19 * ((M200 / 1e14)**0.0480 * (1. + z)**0.615) #Shock Delta=500 #P0 = 20.7 * ((M200 / 1e14)**-0.074 * (1. + z)**-0.743) #xc = 0.438 * ((M200 / 1e14)**0.011 * (1. + z)**1.01) #beta = 3.82 * ((M200 / 1e14)**0.0375 * (1. + z)**0.535) #print P0, xc, beta #print (P200*msolar * 6.24e18 * 1e3 / mpc2cm**3), P0, xc, beta pth = P200 * P0 * (x / xc)**gamma * (1. + (x/xc))**(-1. * beta) #(km/s)^2 M_sun / Mpc^3 #Joule = kg m^2 / s^2, Joule = 6.24e18 eV = 6.24e15 keV pth *= (msolar * 6.24e15 * 1e6 / mpc2cm**3) #keV/cm^3. 1e6 implies that I have converted km to m p_e = pth * 0.518 #For Y=0.24, Vikram, Lidz & Jain return p_e @jit(nopython=True) def battaglia_profile_proj(x, y, Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h): '''Projected to circle''' M = np.sqrt(xmax**2 - x**2) N = int(M / 0.01) if N == 0: return 2. * battaglia_profile_2d(x, 0., Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h) else: xx = np.linspace(0, M, N) f = 0.0 for x1 in xx: f += battaglia_profile_2d(x, x1, Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h) f *= (2 * (xx[1] - xx[0])) return f @jit(nopython=True) def ks2002(x, Mvir, z, BryanDelta, rho_critical, omega_b0, omega_m0, cosmo_h, nu=150.): ''' Output is pgas3d in unit of eV/cm^3 ''' Mvir, Rvir, M500, R500, rho_s, Rs = MvirToMRfrac(Mvir, z, BryanDelta, rho_critical, cosmo_h, frac=500.) conc = Rvir / Rs #print conc #Eq. 18 eta0 = 2.235 + 0.202 * (conc - 5.) - 1.16e-3 * (conc - 5.)**2 #Eq. 17 gamma = 1.137 + 8.94e-2 * np.log(conc/5.) - 3.68e-3 * (conc - 5.) #Eq. 16 B = 3 / eta0 * (gamma - 1.) / gamma / (np.log(1.+conc) / conc - 1./(1.+conc)) #print conc, gamma, eta0, B #Eq. 15 ygasc = (1. - B *(1. - np.log(1.+conc) / conc))**(1./(gamma-1.)) #Eq. 21 of KS 2002 rhogas0 = 7.96e13 * (omega_b0 * cosmo_h * cosmo_h/omega_m0) * (Mvir*cosmo_h/1e15)/ Rvir**3 / cosmo_h**3 * conc * conc / ygasc / (1.+conc)**2/(np.log(1.+conc)-conc/(1.+conc)) #In the CRL code it is multiplied by square of conc. However, I think it should be multiplied by only with concentration #Eq. 19 Tgas0 = 8.80 * eta0 * Mvir / 1e15 / Rvir #keV. This is really kBT Pgas0 = 55.0 * rhogas0 / 1e14 * Tgas0 / 8. #x = 8.6e-3 pgas3d = Pgas0 * (1. - B *(1. - np.log(1.+x) / x))**(gamma/(gamma-1.)) #print x,gamma, eta0, B, Pgas0, (1. - B *(1. - np.log(1.+x) / x))**(gamma/(gamma-1.)), pgas3d pgas2d = 0.0 txarr = np.linspace(x, 5*Rvir/Rs, 100) for tx in txarr: if tx <= 0: continue pgas2d += (1. - B *(1. - np.log(1.+tx) / tx))**(gamma/(gamma-1.)) pgas2d = 2. * Pgas0 * pgas2d * (txarr[1] - txarr[0]) #h = 6.625e-34 #kB = 1.38e-23 #Kcmb = 2.725 #x = h * nu * 1e9 / kB / Kcmb #y_factor = Kcmb * (x / np.tanh(x / 2.) - 4) p_e = pgas3d*0.518 #eV/cm^3 return x*Rs/Rvir, pgas3d, p_e #pgas3d*0.518, pgas2d, y_factor * pgas2d def bprofile(r, Mvir, z, BryanDelta, rho_critical, omega_b0, omega_m0, cosmo_h, mtype='vir'): ''' Using Battaglia et al (2012). Eq. 10. M200 in solar mass and R200 in Mpc mtype: Definition of mass provided. mtype=vir or frac Retrun: Pressure profile in eV/cm^3 at radius r ''' if mtype == 'vir': Mvir, Rvir, M200, R200, rho_s, Rs = MvirToMRfrac(Mvir, z, BryanDelta, rho_critical, cosmo_h) if mtype == 'frac': Mvir, Rvir, M200, R200, rho_s, Rs = MRfracToMvir(Mvir, z, BryanDelta, rho_critical, cosmo_h) print(M200, R200) #It seems R200 is in the physical distance, i.e. proper distance #Need to multiplied by (1+z) to get the comoving unit as I am giving r in #comoving unit. R200 *= (1. + z) #Comoving radius #r = x * (1. + z) * Rs #r = x * Rs x = r / R200 #print Mvir, M200, R200 msolar = 1.9889e30 #kg mpc2cm = 3.0856e24 #cm G = 4.3e-9 #Mpc Mo^-1 (km/s)^2 alpha = 1.0 gamma = -0.3 P200 = 200. * rho_critical * omega_b0 * G * M200 / omega_m0 / 2. / (R200 / (1. + z)) #Msun km^2 / Mpc^3 / s^2 P0 = 18.1 * ((M200 / 1e14)**0.154 * (1. + z)**-0.758) xc = 0.497 * ((M200 / 1e14)**-0.00865 * (1. + z)**0.731) beta = 4.35 * ((M200 / 1e14)**0.0393 * (1. + z)**0.415) #print P0, xc, beta #print (P200*msolar * 6.24e18 * 1e3 / mpc2cm**3), P0, xc, beta pth = P200 * P0 * (x / xc)**gamma * (1. + (x/xc))**(-1. * beta) #(km/s)^2 M_sun / Mpc^3 #Joule = kg m^2 / s^2, Joule = 6.24e18 eV = 6.24e15 keV pth *= (msolar * 6.24e15 * 1e6 / mpc2cm**3) #keV/cm^3. 1e6 implies that I have converted km to m p_e = pth * 0.518 #For Y=0.24, Vikram, Lidz & Jain return x*R200/(1.+z)/Rvir, pth, p_e #@jit(nopython=True) def arnaud_profile(x, y, Mvir, zi, BD, rho_crit, hz, omega_b0, omega_m0, cosmo_h): Mvir, Rvir, M500, R500, rho_s, Rs = MvirToMRfrac(Mvir, zi, BD, rho_crit, cosmo_h, frac=500.0) print(M500, R500) r = x * R500 x = np.sqrt(x**2. + y**2.) #Eq. 11, 12, 13 P0 = 8.403 * (0.7/cosmo_h)**1.5 c500 = 1.177 gamma = 0.3081 alpha = 1.0510 beta = 5.4905 px = P0 / (c500 * x)**gamma / (1. + (c500 * x)**alpha)**((beta - gamma) / alpha) #alpha_p=0.12 and alpha'_p(x) can be ignored from first approximation pr = 1.65 * 1e-3 * hz**(8./3.)*(M500/3.e14/0.7)**(2./3.+0.12) * px * 0.7**2 #keV/cm^-3 return pr / 0.518 @jit(nopython=True) def arnaud_profile_2d(x, y, Rs, M500, R500, zi, rho_crit, hz, omega_b0, omega_m0, cosmo_h): r = x * R500 x = np.sqrt(x**2. + y**2.) #Eq. 11, 12, 13 P0 = 8.403 * (0.7/cosmo_h)**1.5 c500 = 1.177 gamma = 0.3081 alpha = 1.0510 beta = 5.4905 px = P0 / (c500 * x)**gamma / (1. + (c500 * x)**alpha)**((beta - gamma) / alpha) #alpha_p=0.12 and alpha'_p(x) can be ignored from first approximation pr = 1.65 * 1e-3 * hz**(8./3.)*(M500/3.e14/0.7)**(2./3.+0.12) * px * 0.7**2 #keV/cm^-3 return pr / 0.518 @jit(nopython=True) def arnaud_profile_proj(x, Rs, M500, R500, zi, rho_crit, hz, xmax, omega_b0, omega_m0, cosmo_h): M = np.sqrt(xmax**2 - x**2) N = int(M / 0.01) if N == 0: return 2. * arnaud_profile_2d(x, 0, Rs, M500, R500, zi, rho_crit, hz, omega_b0, omega_m0, cosmo_h) else: xx = np.linspace(0, M, N) f = 0.0 for x1 in xx: f += arnaud_profile_2d(x, x1, Rs, M500, R500, zi, rho_crit, hz, omega_b0, omega_m0, cosmo_h) #print xx f *= (2 * (xx[1] - xx[0])) return f if __name__=='__main__': from scipy.interpolate import interp1d z = 1. #0.0231 cosmo = CosmologyFunctions(z, 'wlsz.ini', 'battaglia') omega_b0 = cosmo._omega_b0 omega_m0 = cosmo._omega_m0 cosmo_h = cosmo._h BryanDelta = cosmo.BryanDelta() rho_critical = cosmo.rho_crit() * cosmo._h * cosmo._h rarr = np.logspace(-3, 3, 100) Mvir = 1.e15 #/ cosmo_h Mvir, Rvir, M200, R200, rho_s, Rs = MvirToMRfrac(Mvir, z, BryanDelta, rho_critical, cosmo_h) print('%.2e %.2f %.2e %.2f %.2e %.2f'%(Mvir, Rvir, M200, R200, rho_s, Rs)) M200 = 8.915e14 R200 = 1.392 Rs = 0.53 xarr = rarr / Rs pe_ba = np.array([battaglia_profile_2d(x, 0., Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h) for x in xarr]) pl.subplot(121) pl.loglog(np.logspace(-3, 3, 100), pe_ba, label='Vinu') spl = interp1d(np.logspace(-3, 3, 100), pe_ba, fill_value='extrapolate') #This file contains the angular radial bins NOT comoving radial bins and the 3d pressure profile from Adam's code. This is implemented lines between ~130 to 150 fa = np.genfromtxt('/media/luna1/vinu/software/AdamSZ/pressure3d_z_1_M_1e15') pl.loglog(fa[:,0], fa[:,1], label='Adam') pl.legend(loc=0) pl.subplot(122) pl.scatter(fa[:,0], fa[:,1]/spl(fa[:,0])) pl.show() sys.exit() #ks2002(1.34e-2, Mvir, z, BryanDelta, rho_critical, omega_b0, omega_m0, cosmo_h, nu=150.) #sys.exit() rrvirarr, rrvirarr1, pgas3d_ksarr, pgas3d_baarr = [], [], [], [] pe_ba_arr, pe_ks_arr = [], [] for rrs in np.logspace(-2, np.log10(20), 30): rrvir, pgas3d_ks, pe_ks = ks2002(rrs, Mvir, z, BryanDelta, rho_critical, omega_b0, omega_m0, cosmo_h, 150.) rrvirarr.append(rrvir) pe_ba = battaglia_profile_2d(rrs, 0., Rs, M200, R200, z, rho_critical, omega_b0, omega_m0, cosmo_h) rrvirarr1.append(rrs/Rvir) pgas3d_ksarr.append(pgas3d_ks) pgas3d_baarr.append(pe_ba * 1e3 / 0.518) pe_ba_arr.append(pe_ba * 1e3) pe_ks_arr.append(pe_ks) pl.subplot(121) pl.loglog(rrvirarr1, pgas3d_baarr, c='k', label='Battaglia') pl.loglog(rrvirarr, pgas3d_ksarr, c='g', label='KS') f = np.genfromtxt('/media/luna1/vinu/software/komastu_crl/clusters/battagliaprofile/battaglia/xvir_pgas_tsz.txt') pl.loglog(f[:,0], f[:,1], c='r', label='CRL Battaglia') f = np.genfromtxt('/media/luna1/vinu/software/komastu_crl/clusters/komatsuseljakprofile/ks/xvir_pgas_tsz.txt') pl.loglog(f[:,0], f[:,1], c='m', label='CRL KS') pl.legend(loc=0) f = np.genfromtxt('/media/luna1/vinu/software/komastu_crl/clusters/komatsuseljakprofile/ks/xvir_pgas_tsz.txt') pl.subplot(122) pl.loglog(rrvirarr1, pe_ba_arr, c='k', label='Battaglia electron') pl.loglog(rrvirarr, pe_ks_arr, c='g', label='KS electron') f = np.genfromtxt('/media/luna1/vinu/software/komastu_crl/clusters/battagliaprofile/battaglia/xvir_pgas_tsz.txt') pl.loglog(f[:,0], f[:,1]*0.518, c='r', label='CRL Battaglia electron') f = np.genfromtxt('/media/luna1/vinu/software/komastu_crl/clusters/komatsuseljakprofile/ks/xvir_pgas_tsz.txt') pl.loglog(f[:,0], f[:,1]*0.518, c='m', label='CRL KS electron') pl.legend(loc=0) pl.show() sys.exit() pl.subplot(133) pl.loglog(np.array(rrvirarr), pgas2darr, c='k', label='Vinu') pl.loglog(f[:,0], f[:,2], c='r', label='KS') pl.legend(loc=0) pl.show()
gpl-3.0
-3,316,877,864,590,955,000
39.899281
298
0.569041
false
2.316154
false
false
false
a11r/grpc
tools/buildgen/build-cleaner.py
44
3608
#!/usr/bin/env python2.7 # Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # produces cleaner build.yaml files import collections import os import sys import yaml TEST = (os.environ.get('TEST', 'false') == 'true') _TOP_LEVEL_KEYS = ['settings', 'proto_deps', 'filegroups', 'libs', 'targets', 'vspackages'] _ELEM_KEYS = [ 'name', 'gtest', 'cpu_cost', 'flaky', 'build', 'run', 'language', 'public_headers', 'headers', 'src', 'deps'] def repr_ordered_dict(dumper, odict): return dumper.represent_mapping(u'tag:yaml.org,2002:map', odict.items()) yaml.add_representer(collections.OrderedDict, repr_ordered_dict) def rebuild_as_ordered_dict(indict, special_keys): outdict = collections.OrderedDict() for key in sorted(indict.keys()): if '#' in key: outdict[key] = indict[key] for key in special_keys: if key in indict: outdict[key] = indict[key] for key in sorted(indict.keys()): if key in special_keys: continue if '#' in key: continue outdict[key] = indict[key] return outdict def clean_elem(indict): for name in ['public_headers', 'headers', 'src']: if name not in indict: continue inlist = indict[name] protos = list(x for x in inlist if os.path.splitext(x)[1] == '.proto') others = set(x for x in inlist if x not in protos) indict[name] = protos + sorted(others) return rebuild_as_ordered_dict(indict, _ELEM_KEYS) for filename in sys.argv[1:]: with open(filename) as f: js = yaml.load(f) js = rebuild_as_ordered_dict(js, _TOP_LEVEL_KEYS) for grp in ['filegroups', 'libs', 'targets']: if grp not in js: continue js[grp] = sorted([clean_elem(x) for x in js[grp]], key=lambda x: (x.get('language', '_'), x['name'])) output = yaml.dump(js, indent=2, width=80, default_flow_style=False) # massage out trailing whitespace lines = [] for line in output.splitlines(): lines.append(line.rstrip() + '\n') output = ''.join(lines) if TEST: with open(filename) as f: assert f.read() == output else: with open(filename, 'w') as f: f.write(output)
bsd-3-clause
8,627,390,080,199,632,000
34.722772
91
0.696785
false
3.651822
false
false
false
huddlej/django_lims
samples/models.py
1
1351
from django.db import models #import ncbi class Clone(models.Model): """ Represents DNA for a clone identified by a unique name and living in a cellular environment like a bacterium or yeast. """ name = models.CharField(max_length=100) accessions = models.CharField(max_length=200, null=True, blank=True) def __unicode__(self): return self.name def get_accessions(self): if not self.accessions: #self.accessions = u"".join(ncbi.search_clone_by_name(self.name)) #self.save() self.accessions = [] return self.accessions class SequencedClone(models.Model): """ Represents a sequenced clone from NCBI's CloneDB reports. """ gi = models.IntegerField(blank=True, null=True) clonename = models.CharField(max_length=100, db_index=True) stdn = models.CharField(max_length=1) chrom = models.CharField(max_length=5) phase = models.IntegerField(blank=True, null=True) clonestate = models.CharField(max_length=5) gcenter = models.CharField(max_length=100) accession = models.CharField(max_length=20, db_index=True) seqlen = models.IntegerField(blank=True, null=True) libabbr = models.CharField(max_length=10, db_index=True) def __unicode__(self): return u"%s (%s)" % (self.clonename, self.accession)
gpl-2.0
4,268,282,638,533,947,000
31.95122
77
0.669134
false
3.411616
false
false
false
haadr/myodbus
sample.py
1
3130
from myodbus import MyoDbus import numpy import argparse import struct import dbus from dbus.mainloop.glib import DBusGMainLoop, threads_init from gi.repository import GLib ################################################################################ #### Args ################################################################################ parser = argparse.ArgumentParser(description='Sample program for connecting to, configuring and reading sensor values from a Myo IMU sensor.') parser.add_argument('--sleep', dest='sleep', action='store_true') parser.add_argument('--myopath', dest='myopath', required=True, help="dbus path to Myo device. Example: /org/bluez/hci1/dev_XX_XX_XX_XX_XX_XX") parser.set_defaults(sleep=False) args = parser.parse_args() ################################################################################ #### Callback function ################################################################################ def handleIMU( interfaceName, payload, arrayOfString, myo_basepath=None): print("\n################################################################################") print("From Myo with path: {}".format(myo_basepath[:37])) print("handleIMU arguments: \n\tInterface name: {}\n\tData: {}\n\t{}".format(interfaceName, payload, arrayOfString)) # Unpack sensor values rb = payload['Value'] MYOHW_ORIENTATION_SCALE = 16384.0 MYOHW_ACCELEROMETER_SCALE = 2048.0 MYOHW_GYROSCOPE_SCALE = 16.0 vals = struct.unpack('10h', rb) quat = vals[:4] acc = vals[4:7] gyr = vals[7:10] acc = [ a * MYOHW_ACCELEROMETER_SCALE for a in acc ] gyr = [ g * MYOHW_GYROSCOPE_SCALE for g in gyr ] quat = [ q * MYOHW_ORIENTATION_SCALE for q in quat ] magnitude = numpy.sqrt( sum( [quat[i]*quat[i] for i in range(len(quat))] ) ) for i,q in enumerate(quat): quat[i] = q/magnitude print("quat: {}\nacc: {}\ngyro: {}".format( quat, acc, gyr) ) print("################################################################################") ################################################################################ #### Event loop ################################################################################ DBusGMainLoop(set_as_default=True) loop = GLib.MainLoop() # Get system bus bus = dbus.SystemBus() if __name__ == '__main__': # New Myo myo = MyoDbus(bus, args.myopath) # Connect and configure myo.connect(wait=True, verbose=True) myo.lock() myo.setNeverSleep() myo.subscribeToIMU() myo.attachIMUHandler( handleIMU ) myo.enableIMU() print("Battery: {}%".format( myo.getBatterLevel() ) ) # Start main loop try: print("Running event loop! Press Ctrl+C to exit...") loop.run() except KeyboardInterrupt: print("Shutting down...") loop.quit() print("Disconnecting...") myo.unsubscribeFromIMU() myo.disableIMU_EMG_CLF() myo.vibrate(duration='short') if args.sleep: print("Setting Myo to deep sleep...") myo.setDeepSleep()
mit
-5,124,620,437,763,198,000
32.297872
143
0.505112
false
3.902743
false
false
false
luwei0917/awsemmd_script
compute_energy_helperFunctions.py
1
49784
import os import sys import random import time from random import seed, randint import argparse import platform from datetime import datetime import imp import numpy as np import fileinput from itertools import product import pandas as pd from scipy.interpolate import griddata from scipy.interpolate import interp2d import seaborn as sns from os import listdir import matplotlib.pyplot as plt import seaborn as sns from scipy.interpolate import griddata import matplotlib as mpl # sys.path.insert(0,'..') # from notebookFunctions import * # from .. import notebookFunctions from Bio.PDB.PDBParser import PDBParser from pyCodeLib import * code = {"GLY" : "G", "ALA" : "A", "LEU" : "L", "ILE" : "I", "ARG" : "R", "LYS" : "K", "MET" : "M", "CYS" : "C", "TYR" : "Y", "THR" : "T", "PRO" : "P", "SER" : "S", "TRP" : "W", "ASP" : "D", "GLU" : "E", "ASN" : "N", "GLN" : "Q", "PHE" : "F", "HIS" : "H", "VAL" : "V", "M3L" : "K", "MSE" : "M", "CAS" : "C"} gamma_se_map_1_letter = { 'A': 0, 'R': 1, 'N': 2, 'D': 3, 'C': 4, 'Q': 5, 'E': 6, 'G': 7, 'H': 8, 'I': 9, 'L': 10, 'K': 11, 'M': 12, 'F': 13, 'P': 14, 'S': 15, 'T': 16, 'W': 17, 'Y': 18, 'V': 19} def read_gamma(gammaFile): data = np.loadtxt(gammaFile) gamma_direct = data[:210] gamma_mediated = data[210:] return gamma_direct, gamma_mediated def change_gamma_format(gamma_direct, gamma_mediated): nwell = 2 gamma_ijm = np.zeros((nwell, 20, 20)) water_gamma_ijm = np.zeros((nwell, 20, 20)) protein_gamma_ijm = np.zeros((nwell, 20, 20)) m = 0 count = 0 for i in range(20): for j in range(i, 20): gamma_ijm[0][i][j] = gamma_direct[count][0] gamma_ijm[0][j][i] = gamma_direct[count][0] gamma_ijm[1][i][j] = gamma_direct[count][1] gamma_ijm[1][j][i] = gamma_direct[count][1] count += 1 count = 0 for i in range(20): for j in range(i, 20): water_gamma_ijm[m][i][j] = gamma_mediated[count][1] water_gamma_ijm[m][j][i] = gamma_mediated[count][1] count += 1 count = 0 for i in range(20): for j in range(i, 20): protein_gamma_ijm[m][i][j] = gamma_mediated[count][0] protein_gamma_ijm[m][j][i] = gamma_mediated[count][0] count += 1 return gamma_ijm, water_gamma_ijm, protein_gamma_ijm def compute_chi(data): res_list = get_res_list(structure) energy = 0 for res1globalindex, res1 in enumerate(res_list): ca = ca_all[i] cb = cb_all[i] c = c_all[i] n = n_all[i] chi0 = -0.83 k_chi = 20*4.184 r_ca_cb = cb-ca r_c_ca = ca-c r_ca_n = n-ca norm_r_ca_cb = np.sum(r_ca_cb**2)**0.5 norm_r_c_ca = np.sum(r_c_ca**2)**0.5 norm_r_ca_n = np.sum(r_ca_n**2)**0.5 a = np.cross(-r_c_ca,r_ca_n)/norm_r_c_ca/norm_r_ca_n chi = np.dot(a,r_ca_cb)/norm_r_ca_cb dchi = chi - chi0 energy += k_chi*dchi*dchi return energy def compute_debye_huckel(data): res_list = get_res_list(structure) k_dh = 4.15 debye_huckel = 0 k_screening = 1.0 screening_length = 10 # (in the unit of A) min_seq_sep = 10 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) for res2globalindex, res2 in enumerate(res_list): res2index = get_local_index(res2) res2chain = get_chain(res2) # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): # if res2globalindex > res1globalindex: if res2globalindex >= res1globalindex + min_seq_sep: res1Name = three_to_one(res1.get_resname()) res2Name = three_to_one(res2.get_resname()) charge_1 = 0 charge_2 = 0 if res1Name == "R" or res1Name == "K": charge_1 = 1 if res1Name == "D" or res1Name == "E": charge_1 = -1 if res2Name == "R" or res2Name == "K": charge_2 = 1 if res2Name == "D" or res2Name == "E": charge_2 = -1 if charge_1 * charge_2 != 0: r = get_interaction_distance(res1, res2) debye_huckel += charge_1*charge_2/r*math.exp(-k_screening*r/screening_length) debye_huckel *= k_dh return debye_huckel input_pdb_filename = "/Users/weilu/Research/server_backup/jan_2019/compute_energy/12asA00" def phosphorylation(res1globalindex, res2globalindex, res1type, res2type, m, phosphorylated_residue_index, phosphorylated_residue_seq): # // Four letter classes # // 1) SHL: Small Hydrophilic (ALA, GLY, PRO, SER THR) or (A, G, P, S, T) or {0, 7, 14, 15, 16} # // 2) AHL: Acidic Hydrophilic (ASN, ASP, GLN, GLU) or (N, D, Q, E) or {2, 3, 5, 6} # // 3) BAS: Basic (ARG HIS LYS) or (R, H, K) or {1, 8, 11} # // 4) HPB: Hydrophobic (CYS, ILE, LEU, MET, PHE, TRP, TYR, VAL) or (C, I, L, M, F, W, Y, V) or {4, 9, 10, 12, 13, 17, 18, 19} bb_four_letter_map = [1, 3, 2, 2, 4, 2, 2, 1, 3, 4, 4, 3, 4, 4, 1, 1, 1, 4, 4, 4] k_hypercharge = 1 if (res1globalindex+1) in phosphorylated_residue_index: # print(res1globalindex, res2globalindex, k_hypercharge) idx = phosphorylated_residue_index.index(res1globalindex+1) if bb_four_letter_map[res2type] == 1: k_hypercharge = m elif bb_four_letter_map[res2type] == 2 or bb_four_letter_map[res2type] == 3: k_hypercharge = m*m else: k_hypercharge = 1 res1type = res_type_map[phosphorylated_residue_seq[idx]] if (res2globalindex+1) in phosphorylated_residue_index: # print(res1globalindex, res2globalindex, k_hypercharge) idx = phosphorylated_residue_index.index(res2globalindex+1) if bb_four_letter_map[res1type] == 1: k_hypercharge = m elif bb_four_letter_map[res1type] == 2 or bb_four_letter_map[res1type] == 3: k_hypercharge = m*m else: k_hypercharge = 1 res2type = res_type_map[phosphorylated_residue_seq[idx]] return k_hypercharge, res1type, res2type def compute_mediated(structure, protein_gamma_ijm, water_gamma_ijm, kappa=5.0, hasPhosphorylation=False, fixWellCenter=True): if hasPhosphorylation: import configparser config = configparser.ConfigParser() config.read("phosphorylation.dat") m = eval(config['phosphorylation']['m']) phosphorylated_residue_index = eval(config['phosphorylation']['phosphorylated_residue_index']) phosphorylated_residue_seq = eval(config['phosphorylation']['phosphorylated_residue_seq']) # print(m, phosphorylated_residue_index, phosphorylated_residue_seq) # print(res_type_map['E']) res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) r_min = 6.5 r_max = 9.5 # kappa = 5.0 min_seq_sep = 10 density_threshold = 2.6 density_kappa = 7.0 # phi_mediated_contact_well = np.zeros((2, 20,20)) v_mediated = 0 if not fixWellCenter: a = pd.read_csv("/Users/weilu/opt/parameters/side_chain/cbd_cbd_real_contact_symmetric.csv") cb_density = calculate_cb_density_wellCenter(res_list, neighbor_list, a) for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) rho_i = cb_density[res1globalindex] for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) rho_j = cb_density[res2globalindex] # if 1 is wrong. because B20 will interact with A1 twice. if_1 = res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex) # if 2 is the correct one. should be used. if_2 = res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex) if_3 = res2globalindex - res1globalindex >= min_seq_sep # if if_1 and not if_2: # print("true 1, false 2",res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) # if not if_1 and if_2: # print("false 1, true 2", res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) # if if_3 and not if_1: # print("true 3, false 1",res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) # if not if_3 and if_1: # print("false 3, true 1, if3 stricker than if1,",res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) if if_3 and not if_2: print("true 3, false 2",res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) if not if_3 and if_2: print("false 3, true 2, if3 stricker than if2,",res2globalindex, res1globalindex, res2chain, res1chain, res2index, res1index) # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): # if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain): # if res2globalindex - res1globalindex >= min_seq_sep: if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) rij = get_interaction_distance(res1, res2) res1type_old = res1type res2type_old = res2type if hasPhosphorylation: k_hypercharge, res1type, res2type = phosphorylation(res1globalindex, res2globalindex, res1type, res2type, m, phosphorylated_residue_index, phosphorylated_residue_seq) else: k_hypercharge = 1 protein_gamma = protein_gamma_ijm[0][res1type][res2type]*k_hypercharge water_gamma = water_gamma_ijm[0][res1type][res2type]*k_hypercharge if k_hypercharge != 1: print(res1globalindex, res2globalindex, res1type_old, res2type_old, res1type, res2type, protein_gamma_ijm[0][res1type_old][res2type_old], water_gamma_ijm[0][res1type_old][res2type_old], protein_gamma, water_gamma, k_hypercharge) _pij_protein = prot_water_switchFunc_sigmaProt( rho_i, rho_j, density_threshold, density_kappa) * protein_gamma _pij_water = prot_water_switchFunc_sigmaWater( rho_i, rho_j, density_threshold, density_kappa) * water_gamma if not fixWellCenter: res1_name = res1.get_resname() res2_name = res2.get_resname() if res1_name == "GLY" or res2_name == "GLY": r_min_res1_res2 = 6.5 r_max_res1_res2 = 9.5 else: b = a.query(f"ResName1=='{res1_name}' and ResName2=='{res2_name}'") if len(b) == 0: b = a.query(f"ResName1=='{res2_name}' and ResName2=='{res1_name}'") try: r_min_res1_res2 = float(b["r_max"]) + 1.5 r_max_res1_res2 = float(b["r_max"]) + 4.5 except: print(b) # r_min_res1_res2 = 6.5 # r_max_res1_res2 = 9.5 v_mediated += (_pij_protein + _pij_water) * interaction_well(rij, r_min_res1_res2, r_max_res1_res2, kappa) else: v_mediated += (_pij_protein + _pij_water) * interaction_well(rij, r_min, r_max, kappa) return v_mediated input_pdb_filename = "/Users/weilu/Research/server_backup/jan_2019/compute_energy/12asA00" def compute_direct(structure, gamma_ijm, kappa=5.0, hasPhosphorylation=False, r_min=2.5, fixWellCenter=True, environment=False): if hasPhosphorylation: import configparser config = configparser.ConfigParser() config.read("phosphorylation.dat") m = eval(config['phosphorylation']['m']) phosphorylated_residue_index = eval(config['phosphorylation']['phosphorylated_residue_index']) phosphorylated_residue_seq = eval(config['phosphorylation']['phosphorylated_residue_seq']) # print(m, phosphorylated_residue_index, phosphorylated_residue_seq) # print(res_type_map['E']) res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) if environment: isH = {} isP = {} for i in range(20): isH[dindex_to_1[i]] = res_type_map_HP[dindex_to_1[i]] isP[dindex_to_1[i]] = 1 - res_type_map_HP[dindex_to_1[i]] cbd_info = pd.read_csv("/Users/weilu/opt/parameters/side_chain/cbd_cbd_real_contact_symmetric.csv") density_H = calculate_property_density_with_cbd_info(res_list, neighbor_list, isH, cbd_info).round(3) density_P = calculate_property_density_with_cbd_info(res_list, neighbor_list, isP, cbd_info).round(3) # print(density_H) # print(density_P) # print(isH, isP) density_kappa = 1 d_HP0 = 0 # r_min = 4.5 r_max = 6.5 # kappa = 5 min_seq_sep = 10 # phi_pairwise_contact_well = np.zeros((20,20)) v_direct = 0 if not fixWellCenter: a = pd.read_csv("/Users/weilu/opt/parameters/side_chain/cbd_cbd_real_contact_symmetric.csv") for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) # print(get_interaction_atom(res1).get_vector()[2], type(get_interaction_atom(res1).get_vector()[2])) for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): # if res2globalindex - res1globalindex >= min_seq_sep: if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): # print(i) res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) rij = get_interaction_distance(res1, res2) if hasPhosphorylation: k_hypercharge, res1type, res2type = phosphorylation(res1globalindex, res2globalindex, res1type, res2type, m, phosphorylated_residue_index, phosphorylated_residue_seq) else: k_hypercharge = 1 gamma = gamma_ijm[0][res1type][res2type] * k_hypercharge # phi_pairwise_contact_well[res1type][res2type] += interaction_well(rij, r_min, r_max, kappa) if not fixWellCenter: res1_name = res1.get_resname() res2_name = res2.get_resname() if res1_name == "GLY" or res2_name == "GLY": r_min_res1_res2 = 2.5 r_max_res1_res2 = 6.5 else: b = a.query(f"ResName1=='{res1_name}' and ResName2=='{res2_name}'") if len(b) == 0: b = a.query(f"ResName1=='{res2_name}' and ResName2=='{res1_name}'") try: r_min_res1_res2 = float(b["r_min"]) - 0.5 r_max_res1_res2 = float(b["r_max"]) + 1.5 except: print(b) # r_min_res1_res2 = 2.5 # r_max_res1_res2 = 6.5 else: r_min_res1_res2 = r_min r_max_res1_res2 = r_max if environment: d_H_i = density_H[res1globalindex] d_P_i = density_P[res1globalindex] d_H_j = density_H[res2globalindex] d_P_j = density_P[res2globalindex] d_H = d_H_i + d_H_j d_P = d_P_i + d_P_j sigma_H = 0.5 * np.tanh(density_kappa * (d_H - d_P - d_HP0)) + 0.5 sigma_P = 1 - sigma_H gamma_H = gamma_ijm[0][res1type][res2type] gamma_P = gamma_ijm[1][res1type][res2type] theta = interaction_well(rij, r_min_res1_res2, r_max_res1_res2, kappa) v_direct += (gamma_H * sigma_H + gamma_P * sigma_P) * theta else: v_direct += gamma * interaction_well(rij, r_min_res1_res2, r_max_res1_res2, kappa) return v_direct def compute_burial(structure, burial_gamma, kappa=4.0, hasPhosphorylation=False): if hasPhosphorylation: import configparser config = configparser.ConfigParser() config.read("phosphorylation.dat") m = eval(config['phosphorylation']['m']) phosphorylated_residue_index = eval(config['phosphorylation']['phosphorylated_residue_index']) phosphorylated_residue_seq = eval(config['phosphorylation']['phosphorylated_residue_seq']) print(m, phosphorylated_residue_index, phosphorylated_residue_seq) print(res_type_map['E']) res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) rho_table = [[0.0, 3.0], [3.0, 6.0], [6.0, 9.0]] v_burial = 0 for i in range(3): for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) res1density = cb_density[res1globalindex] if hasPhosphorylation and (res1globalindex+1) in phosphorylated_residue_index: idx = phosphorylated_residue_index.index(res1globalindex+1) res1type = res_type_map[phosphorylated_residue_seq[idx]] gamma = burial_gamma[i][res1type] # print res1globalindex, res1index, res1chain, res1type, res1density v_burial += gamma * interaction_well(res1density, rho_table[i][0], rho_table[i][1], kappa) return v_burial def read_hydrophobicity_scale(seq, tableLocation, isNew=False): seq_dataFrame = pd.DataFrame({"oneLetterCode":list(seq)}) # HFscales = pd.read_table("~/opt/small_script/Whole_residue_HFscales.txt") # print(f"reading hydrophobicity scale table from {tableLocation}/Whole_residue_HFscales.txt") HFscales = pd.read_csv(f"{tableLocation}/Whole_residue_HFscales.txt", sep="\t") if not isNew: # Octanol Scale # new and old difference is at HIS. code = {"GLY" : "G", "ALA" : "A", "LEU" : "L", "ILE" : "I", "ARG+" : "R", "LYS+" : "K", "MET" : "M", "CYS" : "C", "TYR" : "Y", "THR" : "T", "PRO" : "P", "SER" : "S", "TRP" : "W", "ASP-" : "D", "GLU-" : "E", "ASN" : "N", "GLN" : "Q", "PHE" : "F", "HIS+" : "H", "VAL" : "V", "M3L" : "K", "MSE" : "M", "CAS" : "C"} else: code = {"GLY" : "G", "ALA" : "A", "LEU" : "L", "ILE" : "I", "ARG+" : "R", "LYS+" : "K", "MET" : "M", "CYS" : "C", "TYR" : "Y", "THR" : "T", "PRO" : "P", "SER" : "S", "TRP" : "W", "ASP-" : "D", "GLU-" : "E", "ASN" : "N", "GLN" : "Q", "PHE" : "F", "HIS0" : "H", "VAL" : "V", "M3L" : "K", "MSE" : "M", "CAS" : "C"} HFscales_with_oneLetterCode = HFscales.assign(oneLetterCode=HFscales.AA.str.upper().map(code)).dropna() data = seq_dataFrame.merge(HFscales_with_oneLetterCode, on="oneLetterCode", how="left") return data def compute_membrane(structure, kappa=4.0): k_membrane = 1 membrane_center = 0 k_m = 2 z_m = 15 tanh = np.tanh res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) # sequence = get_sequence_from_structure(structure) seq = [three_to_one(res.get_resname()) for res in res_list] sequence = "".join(seq) v_membrane = 0 hydrophobicityScale_list = read_hydrophobicity_scale(sequence, "/Users/weilu/openmmawsem/helperFunctions")["DGwoct"].values # print(hydrophobicityScale_list) for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) z = res1['CA'].get_coord()[-1] # print res1globalindex, res1index, res1chain, res1type, res1density v_membrane += k_membrane*(0.5*tanh(k_m*((z-membrane_center)+z_m))+0.5*tanh(k_m*(z_m-(z-membrane_center))))*hydrophobicityScale_list[res1globalindex] return v_membrane def compute_positive_inside_rule(structure, kappa=4.0): k_membrane = 1 membrane_center = 0 k_m = 2 z_m = 15 tanh = np.tanh res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) # sequence = get_sequence_from_structure(structure) seq = [three_to_one(res.get_resname()) for res in res_list] sequence = "".join(seq) v_membrane = 0 positive_inside_residue_table = {"G":0, "A":0, "V":0, "C":0, "P":0, "L":0, "I":0, "M":0, "W":0, "F":0, "S":0, "T":0, "Y":0, "N":0, "Q":0, "K":-1, "R":-1, "H":0, "D":0, "E":0} for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) try: z = res1['CB'].get_coord()[-1] except: z = res1['CA'].get_coord()[-1] # print res1globalindex, res1index, res1chain, res1type, res1density thickness = 15 z_m = thickness * positive_inside_residue_table[three_to_one(res1.get_resname())] v_membrane += k_membrane*(z-membrane_center-z_m)**2 v_membrane /= 100 return v_membrane input_pdb_filename = "/Users/weilu/Research/server_backup/jan_2019/compute_energy/12asA00.pdb" def compute_direct_2(input_pdb_filename, gamma_ijm): _all = [] seq = "" p=PDBParser() structure=p.get_structure("x", input_pdb_filename) for model in structure: for chain in model: for residue in chain: seq += code[residue.resname] if residue.resname == "GLY": x,y,z = residue["CA"].get_coord() else: x,y,z = residue["CB"].get_coord() _all.append([x,y,z]) v_direct = 0 data = np.array(_all) n = len(data) for i in range(n): x, y, z = data[i] ai = gamma_se_map_1_letter[seq[i]] for j in range(i+10, n): xj, yj, zj = data[j] aj = gamma_se_map_1_letter[seq[j]] r = ((x-xj)**2 + (y-yj)**2 + (z-zj)**2)**0.5 gamma = gamma_ijm[0][ai][aj] # gamma = 1 v_direct += gamma * interaction_well(r, 4.5, 6.5, 5) # v_direct += 1 return v_direct def compute_mediated_multiDensity(structure, protein_gamma_ijm, kappa=5.0): res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) weight_density = calculate_cb_weight_density(res_list, neighbor_list) r_min = 6.5 r_max = 9.5 # kappa = 5.0 min_seq_sep = 10 density_threshold = 2.6 weight_density_threshold = 3.0 density_kappa = 7.0 # phi_mediated_contact_well = np.zeros((2, 20,20)) v_mediated = 0 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) rho_i = cb_density[res1globalindex] rho_i_weight = weight_density[res1globalindex] for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) rho_j = cb_density[res2globalindex] rho_j_weight = weight_density[res2globalindex] # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) protein_gamma = protein_gamma_ijm[0][res1type][res2type] water_gamma = water_gamma_ijm[0][res1type][res2type] rij = get_interaction_distance(res1, res2) _pij_protein = prot_water_switchFunc_sigmaProt( rho_i, rho_j, density_threshold, density_kappa) * protein_gamma _pij_water = prot_water_switchFunc_sigmaWater( rho_i, rho_j, density_threshold, density_kappa) * water_gamma v_mediated += (_pij_protein + _pij_water) * interaction_well(rij, r_min, r_max, kappa) heavy_gamma = protein_gamma_ijm[0][res1type][res2type] light_gamma = water_gamma_ijm[0][res1type][res2type] _pij_heavy = prot_water_switchFunc_sigmaProt( rho_i_weight, rho_j_weight, weight_density_threshold, density_kappa) * heavy_gamma _pij_light = prot_water_switchFunc_sigmaWater( rho_i_weight, rho_j_weight, weight_density_threshold, density_kappa) * light_gamma v_mediated += (_pij_heavy + _pij_light) * interaction_well(rij, r_min, r_max, kappa) return v_mediated def compute_burial_multiDensity(structure, burial_gamma, kappa=4.0): res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) weight_density = calculate_cb_weight_density(res_list, neighbor_list) rho_table = [[0.0, 3.0], [3.0, 6.0], [6.0, 9.0]] weight_rho_table = [[0.0, 4.0], [4.0, 8.0], [8.0, 28.0]] v_burial = 0 for i in range(3): for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) res1density = cb_density[res1globalindex] res1weight = weight_density[res1globalindex] # print res1globalindex, res1index, res1chain, res1type, res1density v_burial += burial_gamma[i][res1type] * interaction_well(res1density, rho_table[i][0], rho_table[i][1], kappa) v_burial += burial_gamma[i][res1type] * interaction_well(res1weight, weight_rho_table[i][0], weight_rho_table[i][1], kappa) return v_burial def compute_direct_family_fold(structure, f_direct, kappa=5.0): res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) r_min = 4.5 r_max = 6.5 # kappa = 5 min_seq_sep = 10 # phi_pairwise_contact_well = np.zeros((20,20)) v_direct = 0 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) # print(get_interaction_atom(res1).get_vector()[2], type(get_interaction_atom(res1).get_vector()[2])) for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) rij = get_interaction_distance(res1, res2) # gamma = gamma_ijm[0][res1type][res2type] gamma = f_direct[res1globalindex][res2globalindex] # phi_pairwise_contact_well[res1type][res2type] += interaction_well(rij, r_min, r_max, kappa) v_direct += gamma * interaction_well(rij, r_min, r_max, kappa) return v_direct def compute_mediated_family_fold(structure, f_water, f_protein, kappa=5.0): res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) r_min = 6.5 r_max = 9.5 # kappa = 5.0 min_seq_sep = 10 density_threshold = 2.6 density_kappa = 7.0 # phi_mediated_contact_well = np.zeros((2, 20,20)) v_mediated = 0 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) rho_i = cb_density[res1globalindex] for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) rho_j = cb_density[res2globalindex] # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) rij = get_interaction_distance(res1, res2) # protein_gamma = protein_gamma_ijm[0][res1type][res2type] # water_gamma = water_gamma_ijm[0][res1type][res2type] protein_gamma = f_protein[res1globalindex][res2globalindex] water_gamma = f_water[res1globalindex][res2globalindex] _pij_protein = prot_water_switchFunc_sigmaProt( rho_i, rho_j, density_threshold, density_kappa) * protein_gamma _pij_water = prot_water_switchFunc_sigmaWater( rho_i, rho_j, density_threshold, density_kappa) * water_gamma v_mediated += (_pij_protein + _pij_water) * interaction_well(rij, r_min, r_max, kappa) return v_mediated def compute_burial_family_fold(structure, f_burial, kappa=4.0): res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) rho_table = [[0.0, 3.0], [3.0, 6.0], [6.0, 9.0]] v_burial = 0 for i in range(3): for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) res1density = cb_density[res1globalindex] # print res1globalindex, res1index, res1chain, res1type, res1density # b_gamma = burial_gamma[i][res1type] b_gamma = f_burial[res1globalindex][i] v_burial += b_gamma * interaction_well(res1density, rho_table[i][0], rho_table[i][1], kappa) return v_burial def get_pre_and_post(res_list, index): n = len(res_list) if index == 0: return res_list[0], res_list[1] elif index == n - 1: return res_list[index-1], res_list[index] else: return res_list[index-1], res_list[index+1] def compute_direct_multiLetter(structure, gamma_ijm, kappa=5.0): # gamma_ij_multiLetter = np.zeros((4, 4, 20, 20)) gamma_ij_multiLetter = np.zeros((80, 80)) for i in range(4): for j in range(4): for ii in range(20): for jj in range(20): gamma_ij_multiLetter[i*20+ii][j*20+jj] = gamma_ijm[0][ii][jj] res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) r_min = 4.5 r_max = 6.5 # kappa = 5 min_seq_sep = 10 # phi_pairwise_contact_well = np.zeros((20,20)) v_direct = 0 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) # print(get_interaction_atom(res1).get_vector()[2], type(get_interaction_atom(res1).get_vector()[2])) for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) res1_pre, res1_post = get_pre_and_post(res_list, res1globalindex) res2_pre, res2_post = get_pre_and_post(res_list, res2globalindex) res1_neighbor_type = get_neighbor_res_type(res1_pre, res1_post) res2_neighbor_type = get_neighbor_res_type(res2_pre, res2_post) rij = get_interaction_distance(res1, res2) gamma = gamma_ij_multiLetter[res1_neighbor_type*20+res1type][res2_neighbor_type*20+res2type] # phi_pairwise_contact_well[res1type][res2type] += interaction_well(rij, r_min, r_max, kappa) v_direct += gamma * interaction_well(rij, r_min, r_max, kappa) return v_direct def compute_mediated_multiLetter(structure, protein_gamma_ijm, kappa=5.0): # protein_gamma_ij_multiLetter = np.zeros((4, 4, 20, 20)) # water_gamma_ij_multiLetter = np.zeros((4, 4, 20, 20)) # for i in range(4): # for j in range(4): # protein_gamma_ij_multiLetter[i][j] = protein_gamma_ijm[0] # water_gamma_ij_multiLetter[i][j] = water_gamma_ijm[0] protein_gamma_ij_multiLetter = np.zeros((80, 80)) water_gamma_ij_multiLetter = np.zeros((80, 80)) for i in range(4): for j in range(4): for ii in range(20): for jj in range(20): protein_gamma_ij_multiLetter[i*20+ii][j*20+jj] = protein_gamma_ijm[0][ii][jj] water_gamma_ij_multiLetter[i*20+ii][j*20+jj] = water_gamma_ijm[0][ii][jj] res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) r_min = 6.5 r_max = 9.5 # kappa = 5.0 min_seq_sep = 10 density_threshold = 2.6 density_kappa = 7.0 # phi_mediated_contact_well = np.zeros((2, 20,20)) v_mediated = 0 for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) rho_i = cb_density[res1globalindex] for res2 in get_neighbors_within_radius(neighbor_list, res1, r_max+2.0): res2index = get_local_index(res2) res2chain = get_chain(res2) res2globalindex = get_global_index(res_list, res2) rho_j = cb_density[res2globalindex] # if res2index - res1index >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): if res2globalindex - res1globalindex >= min_seq_sep or (res1chain != res2chain and res2globalindex > res1globalindex): res1type = get_res_type(res_list, res1) res2type = get_res_type(res_list, res2) rij = get_interaction_distance(res1, res2) res1_pre, res1_post = get_pre_and_post(res_list, res1globalindex) res2_pre, res2_post = get_pre_and_post(res_list, res2globalindex) res1_neighbor_type = get_neighbor_res_type(res1_pre, res1_post) res2_neighbor_type = get_neighbor_res_type(res2_pre, res2_post) gamma_p = protein_gamma_ij_multiLetter[res1_neighbor_type*20+res1type][res2_neighbor_type*20+res2type] gamma_w = water_gamma_ij_multiLetter[res1_neighbor_type*20+res1type][res2_neighbor_type*20+res2type] _pij_protein = prot_water_switchFunc_sigmaProt( rho_i, rho_j, density_threshold, density_kappa) * gamma_p _pij_water = prot_water_switchFunc_sigmaWater( rho_i, rho_j, density_threshold, density_kappa) * gamma_w v_mediated += (_pij_protein + _pij_water) * interaction_well(rij, r_min, r_max, kappa) return v_mediated def compute_burial_multiLetter(structure, burial_gamma_multiLetter, kappa=4.0): burial_gamma_multiLetter = np.zeros((4, 3, 20)) for i in range(4): burial_gamma_multiLetter[i] = burial_gamma res_list = get_res_list(structure) neighbor_list = get_neighbor_list(structure) sequence = get_sequence_from_structure(structure) cb_density = calculate_cb_density(res_list, neighbor_list) rho_table = [[0.0, 3.0], [3.0, 6.0], [6.0, 9.0]] v_burial = 0 for i in range(3): for res1globalindex, res1 in enumerate(res_list): res1index = get_local_index(res1) res1chain = get_chain(res1) res1type = get_res_type(res_list, res1) res1_pre, res1_post = get_pre_and_post(res_list, res1globalindex) res1_neighbor_type = get_neighbor_res_type(res1_pre, res1_post) res1density = cb_density[res1globalindex] # print res1globalindex, res1index, res1chain, res1type, res1density v_burial += burial_gamma_multiLetter[res1_neighbor_type][i][res1type] * interaction_well(res1density, rho_table[i][0], rho_table[i][1], kappa) return v_burial # def compute_single_helix_orientation(structure): # res_list = get_res_list(structure) # for res1globalindex, res1 in enumerate(res_list): # for res2globalindex, res2 in enumerate(res_list): def read_beta_parameters(): ### directly copied from Nick Schafer's # os.chdir(parameter_directory) in_anti_HB = open("anti_HB", 'r').readlines() in_anti_NHB = open("anti_NHB", 'r').readlines() in_para_HB = open("para_HB", 'r').readlines() in_para_one = open("para_one", 'r').readlines() in_anti_one = open("anti_one", 'r').readlines() p_par = np.zeros((20)) p_anti = np.zeros((20)) p_antihb = np.zeros((20,20,2)) p_antinhb = np.zeros((20,20,2)) p_parhb = np.zeros((20,20,2)) for i in range(20): p_par[i] = float(in_para_one[i].strip()) p_anti[i] = float(in_anti_one[i].strip()) for j in range(20): p_antihb[i][j][0] = float(in_anti_HB[i].strip().split()[j]) p_antinhb[i][j][0] = float(in_anti_NHB[i].strip().split()[j]) p_parhb[i][j][0] = float(in_para_HB[i].strip().split()[j]) for i in range(20): for j in range(20): p_antihb[i][j][1] = float(in_anti_HB[i+21].strip().split()[j]) p_antinhb[i][j][1] = float(in_anti_NHB[i+21].strip().split()[j]) p_parhb[i][j][1] = float(in_para_HB[i+21].strip().split()[j]) return p_par, p_anti, p_antihb, p_antinhb, p_parhb def get_pap_gamma_APH(donor_idx, acceptor_idx, chain_i, chain_j, gamma_APH): # if chain_i == chain_j and abs(j-i) < 13 or abs(j-i) > 16: # if abs(j-i) < 13 or abs(j-i) > 16: # if i-j < 13 or i-j > 16: # if (donor_idx - acceptor_idx >= 13 and donor_idx - acceptor_idx <= 16) or chain_i != chain_j: if (donor_idx - acceptor_idx >= 13 and donor_idx - acceptor_idx <= 16): return gamma_APH else: return 0 def get_pap_gamma_AP(donor_idx, acceptor_idx, chain_i, chain_j, gamma_AP): if (donor_idx - acceptor_idx >= 17) or chain_i != chain_j: # if (donor_idx - acceptor_idx >= 17): return gamma_AP else: return 0 def compute_pap1(structure): all_res = list(structure.get_residues()) chains = [res.get_full_id()[2] for res in all_res] n = len(all_res) eta = 7 r0 = 8 e_aph = 0 e_ap = 0 for i in range(n-4): for j in range(4, n): chain_i = chains[i] chain_j = chains[j] gamma_APH = get_pap_gamma_APH(j, i, chain_i, chain_j, 1) gamma_AP = get_pap_gamma_AP(j, i, chain_i, chain_j, 0.4) dis = all_res[i]["CA"] - all_res[j]["CA"] dis_p4 = all_res[i+4]["CA"] - all_res[j-4]["CA"] rho1 = 0.5*(1+np.tanh(eta*(r0-dis))) rho2 = 0.5*(1+np.tanh(eta*(r0-dis_p4))) e_aph += -gamma_APH * rho1 * rho2 e_ap += -gamma_AP * rho1 * rho2 # if show > 1e-6: # print(i, j, show, rho1, rho2) # if i == 0: # print(i, j, show, rho1, rho2) # break # print(a_) return e_aph, e_ap def get_pap_gamma_P(donor_idx, acceptor_idx, chain_i, chain_j, gamma_P): if (donor_idx - acceptor_idx >= 9) or chain_i != chain_j: return gamma_P else: return 0 def compute_pap2(structure): all_res = list(structure.get_residues()) chains = [res.get_full_id()[2] for res in all_res] n = len(all_res) eta = 7 r0 = 8 e_p = 0 for i in range(n-4): for j in range(n-4): chain_i = chains[i] chain_j = chains[j] gamma_P = get_pap_gamma_AP(j, i, chain_i, chain_j, 0.4) dis = all_res[i]["CA"] - all_res[j]["CA"] dis_p4 = all_res[i+4]["CA"] - all_res[j+4]["CA"] rho1 = 0.5*(1+np.tanh(eta*(r0-dis))) rho2 = 0.5*(1+np.tanh(eta*(r0-dis_p4))) e_p += -gamma_P * rho1 * rho2 # if show > 1e-6: # print(i, j, show, rho1, rho2) # if i == 0: # print(i, j, show, rho1, rho2) # break # print(a_) return e_p def dis(a, b): return ((a[0]-b[0])**2 + (a[1]-b[1])**2 + (a[2]-b[2])**2)**0.5 def compute_side_chain_energy_for_x(x, means, precisions_chol, log_det, weights): n_features = 3 n_components, _ = means.shape mean_dot_precisions_chol = np.zeros((3,3)) log_prob = np.zeros(3) for i in range(n_components): mean_dot_precisions_chol[i] = np.dot(means[i], precisions_chol[i]) y = np.dot(x, precisions_chol[i]) - mean_dot_precisions_chol[i] log_prob[i] = np.sum(np.square(y)) log_gaussian_prob = -.5 * (n_features * np.log(2 * np.pi) + log_prob) + log_det c = np.max(log_gaussian_prob + np.log(weights)) score = np.log(np.sum(np.exp(log_gaussian_prob + np.log(weights) - c))) + c kt = 1 E_side_chain = -score*kt # print(E_side_chain) return E_side_chain def read_fasta(fastaFile): seq = "" with open(fastaFile, "r") as f: for line in f: if line[0] == ">": pass else: # print(line) seq += line.strip() return seq def compute_side_chain_energy(structure, seq): E_side_chain_energy = 0 # parser = PDBParser() # pdbFile = "/Users/weilu/Research/server/feb_2020/compare_side_chain_with_and_without/native/256_cbd_submode_7_debug/crystal_structure.pdb" # fastaFile = "/Users/weilu/Research/server/feb_2020/compare_side_chain_with_and_without/native/256_cbd_submode_7_debug/crystal_structure.fasta" # structure = parser.get_structure("x", pdbFile) print(seq) means_dic = {} precisions_chol_dic = {} log_det_dic = {} weights_dic = {} res_type_list = ['GLY', 'ALA', 'VAL', 'CYS', 'PRO', 'LEU', 'ILE', 'MET', 'TRP', 'PHE', 'SER', 'THR', 'TYR', 'GLN', 'ASN', 'LYS', 'ARG', 'HIS', 'ASP', 'GLU'] for res_type in res_type_list: if res_type == "GLY": continue means = np.loadtxt(f"/Users/weilu/opt/parameters/side_chain/{res_type}_means.txt") precisions_chol = np.loadtxt(f"/Users/weilu/opt/parameters/side_chain/{res_type}_precisions_chol.txt").reshape(3,3,3) log_det = np.loadtxt(f"/Users/weilu/opt/parameters/side_chain/{res_type}_log_det.txt") weights = np.loadtxt(f"/Users/weilu/opt/parameters/side_chain/{res_type}_weights.txt") means_dic[res_type] = means precisions_chol_dic[res_type] = precisions_chol log_det_dic[res_type] = log_det weights_dic[res_type] = weights for res in structure.get_residues(): if res.get_full_id()[1] != 0: continue # x_com = get_side_chain_center_of_mass(res) # resname = res.resname resname = one_to_three(seq[res.id[1]-1]) if resname == "GLY": continue try: n = res["N"].get_coord() ca = res["CA"].get_coord() c = res["C"].get_coord() except: continue x_com = res["CB"].get_coord() x = np.array([dis(x_com, n), dis(x_com, ca), dis(x_com, c)]) r_ca_com = dis(x_com, ca) # resname = "TYR" if resname == "GLY": side_chain_energy = 0 else: side_chain_energy = compute_side_chain_energy_for_x(x, means_dic[resname], precisions_chol_dic[resname], log_det_dic[resname], weights_dic[resname]) if abs(side_chain_energy) > 10: print(res.id[1], resname, x_com, x, round(side_chain_energy,3), round(r_ca_com,3)) # print(res.id[1], resname, x_com, round(side_chain_energy,3), round(r_ca_com,3)) E_side_chain_energy += side_chain_energy return E_side_chain_energy def get_side_chain_center_of_mass(atoms): # ensure complete first total = np.array([0., 0., 0.]) total_mass = 0 for atom in atoms: if atom.get_name() in ["N", "CA", "C", "O", "OXT"]: continue if atom.element == "H": continue total += atom.mass * atom.get_coord() total_mass += atom.mass # print(atom.get_name(), atom.get_coord()) x_com = total / total_mass return x_com def compute_side_chain_exclude_volume_energy(structure, fileLocation='./cbd_cbd_real_contact_symmetric.csv'): gamma_se_map_1_letter = { 'A': 0, 'R': 1, 'N': 2, 'D': 3, 'C': 4, 'Q': 5, 'E': 6, 'G': 7, 'H': 8, 'I': 9, 'L': 10, 'K': 11, 'M': 12, 'F': 13, 'P': 14, 'S': 15, 'T': 16, 'W': 17, 'Y': 18, 'V': 19} r_min_table = np.zeros((20,20)) r_max_table = np.zeros((20,20)) # fileLocation = '/Users/weilu/Research/server/mar_2020/cmd_cmd_exclude_volume/cbd_cbd_real_contact_symmetric.csv' df = pd.read_csv(fileLocation) for i, line in df.iterrows(): res1 = line["ResName1"] res2 = line["ResName2"] r_min_table[gamma_se_map_1_letter[three_to_one(res1)]][gamma_se_map_1_letter[three_to_one(res2)]] = line["r_min"] r_min_table[gamma_se_map_1_letter[three_to_one(res2)]][gamma_se_map_1_letter[three_to_one(res1)]] = line["r_min"] r_max_table[gamma_se_map_1_letter[three_to_one(res1)]][gamma_se_map_1_letter[three_to_one(res2)]] = line["r_max"] r_max_table[gamma_se_map_1_letter[three_to_one(res2)]][gamma_se_map_1_letter[three_to_one(res1)]] = line["r_max"] all_res = get_res_list(structure) n = len(all_res) e = 0 for i in range(n): for j in range(i+1, n): res1 = all_res[i] res2 = all_res[j] resname1 = res1.resname resname2 = res2.resname if resname1 == "GLY" or resname2 == "GLY": continue cbd_1 = get_side_chain_center_of_mass(res1.get_atoms()) cbd_2 = get_side_chain_center_of_mass(res2.get_atoms()) r = dis(cbd_1, cbd_2) r_max = r_max_table[gamma_se_map_1_letter[three_to_one(resname1)]][gamma_se_map_1_letter[three_to_one(resname2)]] r_min = r_min_table[gamma_se_map_1_letter[three_to_one(resname1)]][gamma_se_map_1_letter[three_to_one(resname2)]] if r_max - r_min < 0.1: print(res1, res2, r_max, r_min) e += np.heaviside(r_max-r, 0)*((r-r_max)/(r_max-r_min))**2 print(res1, cbd_1) return e
mit
2,329,335,013,494,113,300
46.458532
248
0.574843
false
2.978046
true
false
false
simpeg/simpegem
simpegEM/Analytics/FDEMcasing.py
2
4088
from SimPEG import Utils, np from scipy.constants import mu_0, epsilon_0 from simpegEM.Utils.EMUtils import k def getKc(freq,sigma,a,b,mu=mu_0,eps=epsilon_0): a = float(a) b = float(b) # return 1./(2*np.pi) * np.sqrt(b / a) * np.exp(-1j*k(freq,sigma,mu,eps)*(b-a)) return np.sqrt(b / a) * np.exp(-1j*k(freq,sigma,mu,eps)*(b-a)) def _r2(xyz): return np.sum(xyz**2,1) def _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): Kc1 = getKc(freq,sigma[1],a,b,mu[1],eps) nobs = obsloc.shape[0] dxyz = obsloc - np.c_[np.ones(nobs)]*np.r_[srcloc] r2 = _r2(dxyz[:,:2]) sqrtr2z2 = np.sqrt(r2 + dxyz[:,2]**2) k2 = k(freq,sigma[2],mu[2],eps) return Kc1 * moment / (4.*np.pi) *np.exp(-1j*k2*sqrtr2z2) / sqrtr2z2 def _getCasingHertzMagDipoleDeriv_r(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): HertzZ = _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) nobs = obsloc.shape[0] dxyz = obsloc - np.c_[np.ones(nobs)]*np.r_[srcloc] r2 = _r2(dxyz[:,:2]) sqrtr2z2 = np.sqrt(r2 + dxyz[:,2]**2) k2 = k(freq,sigma[2],mu[2],eps) return -HertzZ * np.sqrt(r2) / sqrtr2z2 * (1j*k2 + 1./ sqrtr2z2) def _getCasingHertzMagDipoleDeriv_z(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): HertzZ = _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) nobs = obsloc.shape[0] dxyz = obsloc - np.c_[np.ones(nobs)]*np.r_[srcloc] r2z2 = _r2(dxyz) sqrtr2z2 = np.sqrt(r2z2) k2 = k(freq,sigma[2],mu[2],eps) return -HertzZ*dxyz[:,2] /sqrtr2z2 * (1j*k2 + 1./sqrtr2z2) def _getCasingHertzMagDipole2Deriv_z_r(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): HertzZ = _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) dHertzZdr = _getCasingHertzMagDipoleDeriv_r(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) nobs = obsloc.shape[0] dxyz = obsloc - np.c_[np.ones(nobs)]*np.r_[srcloc] r2 = _r2(dxyz[:,:2]) r = np.sqrt(r2) z = dxyz[:,2] sqrtr2z2 = np.sqrt(r2 + z**2) k2 = k(freq,sigma[2],mu[2],eps) return dHertzZdr*(-z/sqrtr2z2)*(1j*k2+1./sqrtr2z2) + HertzZ*(z*r/sqrtr2z2**3)*(1j*k2 + 2./sqrtr2z2) def _getCasingHertzMagDipole2Deriv_z_z(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): HertzZ = _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) dHertzZdz = _getCasingHertzMagDipoleDeriv_z(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) nobs = obsloc.shape[0] dxyz = obsloc - np.c_[np.ones(nobs)]*np.r_[srcloc] r2 = _r2(dxyz[:,:2]) r = np.sqrt(r2) z = dxyz[:,2] sqrtr2z2 = np.sqrt(r2 + z**2) k2 = k(freq,sigma[2],mu[2],eps) return (dHertzZdz*z + HertzZ)/sqrtr2z2*(-1j*k2 - 1./sqrtr2z2) + HertzZ*z/sqrtr2z2**3*(1j*k2*z + 2.*z/sqrtr2z2) def getCasingEphiMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): return 1j * omega(freq) * mu * _getCasingHertzMagDipoleDeriv_r(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) def getCasingHrMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): return _getCasingHertzMagDipole2Deriv_z_r(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) def getCasingHzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): d2HertzZdz2 = _getCasingHertzMagDipole2Deriv_z_z(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) k2 = k(freq,sigma[2],mu[2],eps) HertzZ = _getCasingHertzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) return d2HertzZdz2 + k2**2 * HertzZ def getCasingBrMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): return mu_0 * getCasingHrMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment) def getCasingBzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu=mu_0*np.ones(3),eps=epsilon_0,moment=1.): return mu_0 * getCasingHzMagDipole(srcloc,obsloc,freq,sigma,a,b,mu,eps,moment)
mit
-7,403,261,594,443,942,000
40.72449
114
0.662427
false
2.091049
false
false
false
homeworkprod/better-bomb-defusal-manual
bombdefusalmanual/subjects/passwords.py
1
2091
# -*- coding: utf-8 -*- """ On the Subject of Passwords :Copyright: 2015 Jochen Kupperschmidt :License: MIT, see LICENSE for details. """ from string import ascii_lowercase PASSWORDS = frozenset([ 'about', 'after', 'again', 'below', 'could', 'every', 'first', 'found', 'great', 'house', 'large', 'learn', 'never', 'other', 'place', 'plant', 'point', 'right', 'small', 'sound', 'spell', 'still', 'study', 'their', 'there', 'these', 'thing', 'think', 'three', 'water', 'where', 'which', 'world', 'would', 'write', ]) def ask_for_letters_and_match_passwords(ui, position_index, passwords): letters = ask_for_letters_in_position(ui, position_index) matches = list(get_passwords_matching_letters_in_position(passwords, position_index, letters)) if not matches: ui.display_instruction('No password matches!') return if len(matches) == 1: ui.display_instruction(matches[0]) return print() print(' Multiple candidates:') for match in matches: print(' ->', match) ask_for_letters_and_match_passwords(ui, position_index + 1, matches) def ask_for_letters_in_position(ui, position_index): question_label = 'Which letters can be chosen at position {:d}?' \ .format(position_index + 1) values = ui.ask_for_text(question_label) return extract_letters(values) def extract_letters(value): """Select and normalize ASCII letters, drop anything else.""" lowercase_values = frozenset(map(str.lower, value)) return lowercase_values.intersection(ascii_lowercase) def get_passwords_matching_letters_in_position(passwords, position, letters): """Return all passwords that contain any of the given letters at the indicated position. """ predicate = lambda password: password[position] in letters return filter(predicate, passwords) def execute(ui): ask_for_letters_and_match_passwords(ui, 0, PASSWORDS)
mit
6,338,308,990,574,762,000
29.75
77
0.618843
false
3.879406
false
false
false
kristerhedfors/nbportscan
fabfile.py
1
1423
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright(c) 2014 Krister Hedfors # # # Example: # Portscan hosta, hostb, hostc (a->a, a->b, a->c, b->a, b->b, ...) # on all tcp port numbers listening on at least one of hosta, hostb, hostc: # # $ fab -H hosta,hostb,hostc list_open_ports > ports.txt # $ plist="`grep out:.. ports.txt | cut -d' ' -f4 | sort -nu | tr '\n' ' '`" # $ fab -H hosta,hostb,hostc portscan:"hosta hostb hostc $plist" # # You can also compare the results of two extensive portscans between various # src and dst addresses using the regular `diff` command. # import zlib from fabric.api import task from fabric.api import run from fabric.api import hide from fabric.tasks import Task class Portscan(Task): ''' example: fab portscan:"127.0.0.1 10.0.0.2-100 21-23 25 80 443" ''' name = 'portscan' def run(self, hosts_and_ports): portscanner = open('nbportscan.py').read() cmd = 'python -c "{0}" {1}'.format( portscanner, hosts_and_ports ) with hide('running'): run(cmd) class ListOpenPorts(Task): ''' show listening TCP-ports ''' name = 'list_open_ports' def run(self): cmd = "netstat -nlt" cmd += r"|sed -rne 's/.* ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+):([0-9]+).*/\1 \2/p'" with hide('running'): run(cmd) portscan = Portscan() list_open_ports = ListOpenPorts()
bsd-3-clause
4,957,014,512,817,538,000
24.410714
86
0.591708
false
2.921971
false
false
false
whelan957/leetcode
python3/Array/leetcode075. Sort Colors.py
1
1433
# Given an array with n objects colored red, white or blue, sort them in-place so that objects of the same color are adjacent, with the colors in the order red, white and blue. # Here, we will use the integers 0, 1, and 2 to represent the color red, white, and blue respectively. # Note: You are not suppose to use the library's sort function for this problem. # Example: # Input: [2,0,2,1,1,0] # Output: [0,0,1,1,2,2] # Follow up: # A rather straight forward solution is a two-pass algorithm using counting sort. # First, iterate the array counting number of 0's, 1's, and 2's, then overwrite array with total number of 0's, then 1's and followed by 2's. # Could you come up with a one-pass algorithm using only constant space? class Solution: def sortColors(self, nums: List[int]) -> None: """ Do not return anything, modify nums in-place instead. """ # nx is pointing to the end of the subarrayx n0 = -1 n1 = -1 n2 = -1 for i in range(len(nums)): if nums[i] == 0: n2 += 1 n1 += 1 n0 += 1 nums[n2] = 2 nums[n1] = 1 nums[n0] = 0 elif nums[i] == 1: n2 += 1 n1 += 1 nums[n2] = 2 nums[n1] = 1 elif nums[i] == 2: n2 += 1 nums[n2] = 2
gpl-3.0
-977,085,949,206,133,100
33.95122
176
0.540823
false
3.637056
false
false
false
Data2Semantics/linkitup
linkitup/nifregistry/plugin.py
1
2471
''' Created on 26 Mar 2013 @author: hoekstra ''' from flask.ext.login import login_required import xml.etree.ElementTree as ET import requests import re from linkitup import app from linkitup.util.baseplugin import plugin from linkitup.util.provenance import provenance NIF_REGISTRY_URL = "http://nif-services.neuinfo.org/nif/services/registry/search?q=" ## TODO: generate direct derivedfrom relations between tags/categories and results. This requires successive querying of the NIF endpoint. @app.route('/nifregistry', methods=['POST']) @login_required @plugin(fields=[('tags','id','name'),('categories','id','name')], link='mapping') @provenance() def link_to_nif_registry(*args, **kwargs): # Retrieve the article id from the wrapper article_id = kwargs['article']['id'] app.logger.debug("Running NIF Registry plugin for article {}".format(article_id)) # Rewrite the tags and categories of the article in a form understood by the NIF Registry match_items = kwargs['inputs'] query_string = "".join([ "'{}'".format(match_items[0]['label']) ] + [ " OR '{}'".format(item['label']) for item in match_items[1:]]) query_url = NIF_REGISTRY_URL + query_string app.logger.debug("Query URL: {}".format(query_url)) response = requests.get(query_url) tree = ET.fromstring(response.text.encode('utf-8')) matches = {} for result in tree.iter('registryResult') : match_uri = result.attrib['url'] web_uri = result.attrib['url'] display_uri = result.attrib['shortName'] if display_uri == "" or display_uri == None : display_uri = result.attrib['name'] id_base = re.sub('\s|\(|\)','_',result.attrib['name']) description = result[0].text[:600] nifid = result.attrib['id'] entry_type = result.attrib['type'] # Create the match dictionary match = {'type': "link", 'uri': match_uri, 'web': web_uri, 'show': display_uri, 'short': id_base, 'description': description, 'extra': nifid, 'subscript': entry_type, 'original':article_id} # Append it to all matches matches[match_uri] = match # Return the matches return matches
mit
840,909,001,955,806,600
27.744186
138
0.582355
false
3.928458
false
false
false
leiferikb/bitpop
src/native_client/pnacl/driver/pnacl-ranlib.py
3
1269
#!/usr/bin/python # Copyright (c) 2012 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # IMPORTANT NOTE: If you make local mods to this file, you must run: # % pnacl/build.sh driver # in order for them to take effect in the scons build. This command # updates the copy in the toolchain/ tree. # from driver_tools import Run, ParseArgs from driver_env import env EXTRA_ENV = { 'ARGS': '' } # just pass all args through to 'ARGS' and eventually to the underlying tool PATTERNS = [ ( '(.*)', "env.append('ARGS', $0)") ] def main(argv): if len(argv) == 0: print get_help(argv) return 1 env.update(EXTRA_ENV) ParseArgs(argv, PATTERNS) Run('"${RANLIB}" --plugin=${GOLD_PLUGIN_SO} ${ARGS}') # only reached in case of no errors return 0 def get_help(unused_argv): return """ Usage: %s [options] archive Generate an index to speed access to archives The options are: @<file> Read options from <file> -t Update the archive's symbol map timestamp -h --help Print this help message -v --version Print version information """ % env.getone('SCRIPT_NAME')
gpl-3.0
8,640,538,835,697,295,000
31.538462
76
0.647754
false
3.594901
false
false
false
HPPTECH/hpp_IOSTressTest
IOST_0.23/Libs/IOST_Testcase/IOST_SaveConfig.py
3
15318
#!/usr/bin/python #====================================================================== # # Project : hpp_IOStressTest # File : Libs/IOST_Testcase/IOST_SaveConfig.py # Date : Dec 14, 2016 # Author : HuuHoang Nguyen # Contact : hhnguyen@apm.com # : hoangnh.hpp@gmail.com # License : MIT License # Copyright : 2016 # Description: The hpp_IOStressTest is under the MIT License, a copy of license which may be found in LICENSE # #====================================================================== import io import os import sys import time from IOST_Basic import * from IOST_Config import * from IOST_Testcase import * import gtk import gtk.glade import gobject #====================================================================== try: IOST_DBG_EN if IOST_DBG_EN: IOST_SaveConfig_DebugEnable =0 IOST_SaveConfig_DebugLevel =IOST_DBG_L01 else: IOST_SaveConfig_DebugEnable =0 IOST_SaveConfig_DebugLevel =IOST_DBG_L01 except: IOST_DBG_EN = False IOST_SaveConfig_DebugEnable =0 IOST_SaveConfig_DebugLevel =IOST_DBG_L01 #====================================================================== class IOST_SaveConfig(): """ """ #---------------------------------------------------------------------- def __init__(self, glade_filename, window_name="", object_name="", builder=None): "" self.IOST_SaveConfig_WindowName = window_name self.IOST_SaveConfig_ObjectName = object_name if not builder: self.IOST_SaveConfig_Builder = gtk.Builder() self.IOST_SaveConfig_Builder.add_from_file(glade_filename) self.IOST_SaveConfig_Builder.connect_signals(self) else: self.IOST_SaveConfig_Builder = builder self.IOST_SaveConfig_Builder.connect_signals(self) #---------------------------------------------------------------------- def SaveConfig_Get_Objs(self, window_name): "" self.CreateObjsDictFromDict(window_name, self.IOST_Objs[window_name], self.IOST_SaveConfig_Builder, 0) self.IOST_Objs[window_name][self.IOST_SaveConfig_ObjectName].set_keep_above(True) self.SaveConfig_Init_Objs() #---------------------------------------------------------------------- def SaveConfig_Init_Objs(self): self.SaveConfig_Set_StationInfoPath_Obj() self.SaveConfig_Set_StationInfoSetup_Obj() self.SaveConfig_Set_TestcasesPath_Obj() self.SaveConfig_Set_TestcasesSetup_Obj() self.SaveConfig_StationInfoPath = self.IOST_Data['IOST_RunPath']+'/StationInfo.json' self.SaveConfig_TestcasesPath = self.IOST_Data['IOST_RunPath']+'/Testcases.json' self.SaveConfig_StationInfo_SaveEnable = False self.SaveConfig_Testcase_SaveEnable = False self.SaveConfig_Testcase_IP_IsDisableb = False #---------------------------------------------------------------------- def SaveConfig_Show(self): self.IOST_Objs[self.IOST_SaveConfig_WindowName][self.IOST_SaveConfig_ObjectName].show() #---------------------------------------------------------------------- # def SaveConfig_Run(self): # self.IOST_Objs[self.IOST_SaveConfig_WindowName][self.IOST_SaveConfig_ObjectName].run() #---------------------------------------------------------------------- def SaveConfig_Hide(self): self.IOST_Objs[self.IOST_SaveConfig_WindowName][self.IOST_SaveConfig_ObjectName].hide() #---------------------------------------------------------------------- def SaveConfig_Destroy(self): self.IOST_Objs[self.IOST_SaveConfig_WindowName][self.IOST_SaveConfig_ObjectName].destroy() #---------------------------------------------------------------------- def on_IOST_SaveConfig_Skylark_delete_event(self, object, event, data=None): "" self.SaveConfig_Hide() return True #---------------------------------------------------------------------- def on_IOST_SaveConfig_Skylark_destroy_event(self, object, event, data=None): "" self.SaveConfig_Hide() return True #---------------------------------------------------------------------- def on_IOST_SaveConfig_StationInfoEnable_CB_toggled(self, object, data=None): "" res = object.get_active() if res: path = self.SaveConfig_StationInfoPath else: path='' iost_print(IOST_SaveConfig_DebugLevel, path, "StationInfoEnable_CB -> The path is :") self.SaveConfig_Set_StationInfoPath_Obj(res, path) self.SaveConfig_Set_StationInfoSetup_Obj(res) self.SaveConfig_StationInfo_SaveEnable = res #---------------------------------------------------------------------- def on_IOST_SaveConfig_TestcasesEnable_CB_toggled(self, object, data=None): "" res = object.get_active() if res: path = self.SaveConfig_TestcasesPath else: path='' iost_print(IOST_SaveConfig_DebugLevel, path, "TestcasesEnable_CB -> The path is :") self.SaveConfig_Set_TestcasesPath_Obj(res, path) self.SaveConfig_Set_TestcasesSetup_Obj(res) self.SaveConfig_Testcase_SaveEnable = res def on_IOST_SaveConfig_SaveTestcases_OfIPDisable_CB_toggled(self, object, data=None): "" res = object.get_active() self.SaveConfig_Testcase_IP_IsDisableb = res #---------------------------------------------------------------------- def SaveConfig_Set_StationInfoPath_Obj(self, sensitive=False, path=""): self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_StationInfoPath_TE'].set_sensitive(sensitive) self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_StationInfoPath_TE'].set_text(path) #---------------------------------------------------------------------- def SaveConfig_Set_StationInfoSetup_Obj(self, sensitive=False): self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_StationInfoSetup_B'].set_sensitive(sensitive) #---------------------------------------------------------------------- def SaveConfig_Set_TestcasesPath_Obj(self, sensitive=False, path=""): self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_TestcasesPath_TE'].set_sensitive(sensitive) self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_TestcasesPath_TE'].set_text(path) #---------------------------------------------------------------------- def SaveConfig_Set_TestcasesSetup_Obj(self, sensitive=False): self.IOST_Objs[self.IOST_SaveConfig_WindowName]['_TestcasesSetup_B'].set_sensitive(sensitive) #---------------------------------------------------------------------- def on_IOST_SaveConfig_StationInfoSetup_B_clicked(self, object, data=None): "" text = "Save configure file to < Station Info >" self.SaveConfig_StationInfoPath = self.SaveConfig_SaveDialog(self.SaveConfig_StationInfoPath, text) iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_StationInfoPath, "self.SaveConfig_StationInfoPath" ) self.SaveConfig_Set_StationInfoPath_Obj(True, self.SaveConfig_StationInfoPath) #---------------------------------------------------------------------- def on_IOST_SaveConfig_TestcasesSetup_B_clicked(self, object, data=None): "" text = "Save configure file to < Testcases >" self.SaveConfig_TestcasesPath = self.SaveConfig_SaveDialog(self.SaveConfig_TestcasesPath, text) iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_TestcasesPath, "self.SaveConfig_TestcasesPath") self.SaveConfig_Set_TestcasesPath_Obj(True, self.SaveConfig_TestcasesPath) #---------------------------------------------------------------------- def on_IOST_SaveConfig_Cancel_B_clicked(self,object, data=None): "" self.SaveConfig_Hide() return True #---------------------------------------------------------------------- def on_IOST_SaveConfig_Save_B_clicked(self, object, data=None): """ self.SaveConfig_StationInfoPath = self.IOST_Data['IOST_RunPath']+'/StationInfo.json' self.SaveConfig_TestcasesPath = self.IOST_Data['IOST_RunPath']+'/Testcases.json' self.SaveConfig_StationInfo_SaveEnable = False self.SaveConfig_Testcase_SaveEnable = False """ iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_StationInfoPath, "self.SaveConfig_StationInfoPath :") iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_StationInfo_SaveEnable, "self.SaveConfig_StationInfo_SaveEnable :") iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_TestcasesPath, "self.SaveConfig_TestcasesPath :") iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_Testcase_SaveEnable, "self.SaveConfig_Testcase_SaveEnable :") iost_print(IOST_SaveConfig_DebugLevel, self.SaveConfig_Testcase_IP_IsDisableb, "self.SaveConfig_Testcase_IP_IsDisableb :") self.save_cfg = IOST_Config() """ 'self.save_cfg.IOST_Data' will store Testcases 'self.save_cfg.IOST_Objs' will store StationInfo """ # Begin process to StationInfo self.save_cfg.IOST_Objs.update({"StationInfo": self.IOST_Data["StationInfo"]}) # Begin process to Testcase ip_checked={} #init ip_check variable for ip in self.IOST_Data['ObjectUpdate']: if ip == "AutoMail": continue if ip_checked.has_key(ip[:-1]): continue else: ip_checked.update({ ip[:-1]: False } ) pprint (ip_checked) # for ip in self.IOST_Data['ObjectUpdate']: #If ip is autoMail, ignore it if ip == "AutoMail": continue #If ip was checked, ignore it if ip_checked[ip[:-1]]: continue # if check button 'Save the testcases of IP which have disabed' : Enable if self.SaveConfig_Testcase_IP_IsDisableb: # print ip, ' : ', self.IOST_Data[ip[:-1]+'_PortNum'] for ip_port in range(0, self.IOST_Data[ip[:-1]+'_PortNum']): if len( self.IOST_Data[ip[:-1]+str(ip_port)] ) <= 1: continue else: if not ip_checked[ip[:-1]]: ip_checked[ip[:-1]] = True self.save_cfg.IOST_Data.update( { ip[:-1] : self.IOST_Data[ip[:-1]] } ) self.save_cfg.IOST_Data.update( { ip[:-1]+str(ip_port) : self.IOST_Data[ip[:-1]+str(ip_port)] } ) # iost_print(IOST_SaveConfig_DebugLevel, ip_port, "%s" %(ip[:-1]) ) # if not ip_checked[ip[:-1]]: # ip_checked[ip[:-1]] = True # self.save_cfg.IOST_Data.update( { ip[:-1] : self.IOST_Data[ip[:-1]] } ) # self.save_cfg.IOST_Data.update( { ip[:-1]+str(ip_port) : self.IOST_Data[ip[:-1]+str(ip_port)] } ) # if check button 'Save the testcases of IP which have disabed' : Disable else: if not Str2Bool(self.IOST_Data[ip[:-1]]): pass else: for ip_port in range(0, self.IOST_Data[ip[:-1]+'_PortNum']): if Str2Bool( self.IOST_Data[ip[:-1]+str(ip_port)][0] ) : if not ip_checked[ip[:-1]]: ip_checked[ip[:-1]] = True self.save_cfg.IOST_Data.update( { ip[:-1] : self.IOST_Data[ip[:-1]] } ) self.save_cfg.IOST_Data.update( { ip[:-1]+str(ip_port) : self.IOST_Data[ip[:-1]+str(ip_port)] } ) else: continue if IOST_SaveConfig_DebugEnable: print "==================================================================" print " Station_info " print "==================================================================" pprint (self.save_cfg.IOST_Objs) print "==================================================================\n\n" print "\n\n==================================================================" print " Testcases " print "==================================================================" pprint (self.save_cfg.IOST_Data) print "==================================================================\n\n" if self.SaveConfig_StationInfo_SaveEnable: iost_print(IOST_SaveConfig_DebugLevel, "Save Configure File to StationInfo", "") self.SaveConfig_WriteConfigFile(self.SaveConfig_StationInfoPath, self.save_cfg.IOST_Objs) if self.SaveConfig_Testcase_SaveEnable: iost_print(IOST_SaveConfig_DebugLevel, "Save Configure File to the Testcase", "") self.SaveConfig_WriteConfigFile(self.SaveConfig_TestcasesPath, self.save_cfg.IOST_Data) self.SaveConfig_Hide() return True def SaveConfig_SaveDialog(self, current_filename, title_str=""): "" if title_str=="": title_str = "Save Config Files" dlg = gtk.FileChooserDialog(title=title_str, action=gtk.FILE_CHOOSER_ACTION_SAVE) dlg.add_button(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL) dlg.add_button(gtk.STOCK_SAVE, gtk.RESPONSE_OK) dlg.set_do_overwrite_confirmation(True) #Set file name if os.path.isdir(current_filename): dlg.set_current_name("") dlg.set_current_folder( current_filename ) else: dir_path = os.path.dirname(os.path.realpath(current_filename)) dlg.set_current_folder( dir_path ) dlg.set_current_name(os.path.basename(current_filename)) if dlg.run() == gtk.RESPONSE_OK: filename = dlg.get_filename() iost_print(IOST_SaveConfig_DebugLevel, filename, "SaveConfig_SaveDialog --> filename :") dlg.destroy() return filename else: dlg.destroy() return current_filename def SaveConfig_WriteConfigFile(self, file_name='', data=None): "" try: self.WriteFile(file_name, data) except: MsgBox( "%s: %s" %("Can't open file for writting", file_name) ) return False # try: # # f = open(filename, "w") # # f.write(buff) # # f.close() # # WriteFile(self, file_name="", data=None): # # self.WriteFile(self.IOST_Data["IOST_Path"] + "/Temp_Configs/Data_Config_Before_ResetAll.json", self.IOST_Data) # self.WriteFile(filename) # except: # dlg.destroy() # MsgBox("%s: %s" % (_("Can't open file for writting"), filename) ) # return
mit
-7,475,310,437,211,625,000
43.923754
130
0.512861
false
4.038492
true
false
false
rocket-league-replays/rocket-league-replays
rocket_league/urls.py
1
3392
"""URL config for rocket_league project.""" from cms.forms import CMSPasswordChangeForm from cms.sitemaps import registered_sitemaps from cms.views import TextTemplateView from django.conf import settings from django.conf.urls import include, patterns, url from django.conf.urls.static import static from django.contrib import admin from django.views import generic from rest_framework import routers from .apps.replays import views as api_views from .apps.users.views import StreamDataAPIView admin.autodiscover() router = routers.DefaultRouter() router.register(r'maps', api_views.MapViewSet) router.register(r'replays', api_views.ReplayViewSet) router.register(r'replay-packs', api_views.ReplayPackViewSet) router.register(r'players', api_views.PlayerViewSet) router.register(r'goals', api_views.GoalViewSet) router.register(r'seasons', api_views.SeasonViewSet) router.register(r'components', api_views.ComponentViewSet) urlpatterns = patterns( "", # Admin URLs. url(r'^admin/password_change/$', 'django.contrib.auth.views.password_change', {'password_change_form': CMSPasswordChangeForm}, name='password_change'), url(r'^admin/password_change/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'), url(r"^admin/", include(admin.site.urls)), url(r'^replays/', include('rocket_league.apps.replays.urls', namespace='replay')), url(r'^replay-packs/', include('rocket_league.apps.replays.replaypack_urls', namespace='replaypack')), # Permalink redirection service. url(r"^r/(?P<content_type_id>\d+)-(?P<object_id>[^/]+)/$", "django.contrib.contenttypes.views.shortcut", name="permalink_redirect"), # Google sitemap service. url(r"^sitemap.xml$", "django.contrib.sitemaps.views.index", {"sitemaps": registered_sitemaps}), url(r"^sitemap-(?P<section>.+)\.xml$", "django.contrib.sitemaps.views.sitemap", {"sitemaps": registered_sitemaps}), # Basic robots.txt. url(r"^robots.txt$", TextTemplateView.as_view(template_name="robots.txt")), # There's no favicon here! url(r"^favicon.ico$", generic.RedirectView.as_view(url='/static/build/img/icons/favicon.ico', permanent=True)), url(r'^(?i)api/replays/(?P<replay_id>[a-f0-9]{8}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{12})/$', api_views.ReplayViewSet.as_view({'get': 'retrieve'})), url(r'^api/', include(router.urls)), url(r'^api/stream-data/(?P<user_id>\d+)/$', StreamDataAPIView.as_view(), name='stream-data'), url(r'^api/latest-replay/(?P<user_id>\d+)/$', api_views.LatestUserReplay.as_view(), name='latest-replay'), url(r'^api-docs/', include('rest_framework_swagger.urls')), url(r'^login/$', 'django.contrib.auth.views.login', name='auth_login'), url(r'^logout/$', 'django.contrib.auth.views.logout_then_login', name='auth_logout'), url(r'', include('rocket_league.apps.site.urls', namespace='site')), url(r'', include('rocket_league.apps.users.urls', namespace='users')), url('', include('social.apps.django_app.urls', namespace='social')) ) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: urlpatterns += patterns( "", url("^404/$", generic.TemplateView.as_view(template_name="404.html")), url("^500/$", generic.TemplateView.as_view(template_name="500.html")), ) handler500 = "cms.views.handler500"
gpl-3.0
-7,748,654,026,379,278,000
41.4
165
0.701061
false
3.332024
false
false
false
ltilve/chromium
tools/gn/bin/help_as_html.py
105
3135
#!/usr/bin/env python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Runs 'gn help' and various subhelps, and spits out html. # TODO: # - Handle numbered and dashed lists -> <ol> <ul>. (See "os" and "toolchain"). # - Handle "Arguments:" blocks a bit better (the argument names could be # distinguished). # - Convert "|blahblah|" to <code>. # - Spit out other similar formats like wiki, markdown, whatever. import cgi import subprocess import sys def GetOutput(*args): try: return subprocess.check_output([sys.argv[1]] + list(args)) except subprocess.CalledProcessError: return '' def ParseTopLevel(out): commands = [] output = [] for line in out.splitlines(): if line.startswith(' '): command, sep, rest = line.partition(':') command = command.strip() is_option = command.startswith('-') output_line = ['<li>'] if not is_option: commands.append(command) output_line.append('<a href="#' + cgi.escape(command) + '">') output_line.append(cgi.escape(command)) if not is_option: output_line.append('</a>') output_line.extend([sep + cgi.escape(rest) + '</li>']) output.append(''.join(output_line)) else: output.append('<h2>' + cgi.escape(line) + '</h2>') return commands, output def ParseCommand(command, out): first_line = True got_example = False output = [] for line in out.splitlines(): if first_line: name, sep, rest = line.partition(':') name = name.strip() output.append('<h3><a name="' + cgi.escape(command) + '">' + cgi.escape(name + sep + rest) + '</a></h3>') first_line = False else: if line.startswith('Example'): # Special subsection that's pre-formatted. if got_example: output.append('</pre>') got_example = True output.append('<h4>Example</h4>') output.append('<pre>') elif not line.strip(): output.append('<p>') elif not line.startswith(' ') and line.endswith(':'): # Subsection. output.append('<h4>' + cgi.escape(line[:-1]) + '</h4>') else: output.append(cgi.escape(line)) if got_example: output.append('</pre>') return output def main(): if len(sys.argv) < 2: print 'usage: help_as_html.py <gn_binary>' return 1 header = '''<!DOCTYPE html> <html> <head> <meta name="viewport" content="width=device-width, initial-scale=1"> <style> body { font-family: Arial, sans-serif; font-size: small; } pre { font-family: Consolas, monospace; font-size: small; } #container { margin: 0 auto; max-width: 48rem; width: 90%; } </style> </head> <body> <div id="container"><h1>GN</h1> ''' footer = '</div></body></html>' commands, output = ParseTopLevel(GetOutput('help')) for command in commands: output += ParseCommand(command, GetOutput('help', command)) print header + '\n'.join(output) + footer return 0 if __name__ == '__main__': sys.exit(main())
bsd-3-clause
-7,027,008,553,045,315,000
28.857143
78
0.605423
false
3.538375
false
false
false
juliushaertl/i3pystatus
i3pystatus/xkblayout.py
1
1546
from i3pystatus import IntervalModule import subprocess class Xkblayout(IntervalModule): """Displays and changes current keyboard layout. ``change_layout`` callback finds the current layout in the ``layouts`` setting and enables the layout following it. If the current layout is not in the ``layouts`` setting the first layout is enabled. ``layouts`` can be stated with or without variants, e.g.: status.register("xkblayout", layouts=["de neo", "de"]) """ interval = 1 format = u"\u2328 {name}" settings = ( ("layouts", "List of layouts"), ) layouts = [] on_leftclick = "change_layout" def run(self): kblayout = subprocess.check_output("setxkbmap -query | awk '/layout/,/variant/{print $2}'", shell=True).decode('utf-8').replace("\n", " ").strip() self.output = { "full_text": self.format.format(name=kblayout).upper(), "color": "#ffffff" } def change_layout(self): layouts = self.layouts kblayout = subprocess.check_output("setxkbmap -query | awk '/layout/,/variant/{print $2}'", shell=True).decode('utf-8').replace("\n", " ").strip() if kblayout in layouts: position = layouts.index(kblayout) try: subprocess.check_call(["setxkbmap"] + layouts[position + 1].split()) except IndexError: subprocess.check_call(["setxkbmap"] + layouts[0].split()) else: subprocess.check_call(["setxkbmap"] + layouts[0].split())
mit
3,405,981,075,283,309,600
35.809524
154
0.60608
false
3.752427
false
false
false
Trax-/data_analysis
data_analysis/matplotlib_example_gui.py
1
3719
import sys import time import matplotlib.pyplot as plt import numpy as np from PyQt5 import QtWidgets from matplotlib.backends.backend_qt5agg import FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavToolbar def main(): app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() main_window.setWindowTitle('Matplotlib Example') central_widget = TabWidget() main_window.setCentralWidget(central_widget) main_window.show() app.exec_() class TabWidget(QtWidgets.QTabWidget): def __init__(self, parent=None): super().__init__(parent) xy_scatter_widget = XYScatterGraphWidget() pie_widget = PieGraphWidget() bar_widget = BarGraphWidget() graph_widget = GraphWidget() self.addTab(graph_widget, 'Graph Widget') self.addTab(bar_widget, 'Bar Graph') self.addTab(xy_scatter_widget, 'Scatter Graph') self.addTab(pie_widget, 'Pie Graph') class GraphWidget(QtWidgets.QWidget): def __init__(self, parent=None): super().__init__(parent) self._figure = plt.Figure() # Widget! self._canvas = FigureCanvas(self._figure) # widget! toolbar = NavToolbar(self._canvas, self) # Widget! plot_button = QtWidgets.QPushButton('Plot!') plot_button.clicked.connect(self.plot) layout = QtWidgets.QVBoxLayout() layout.addWidget(toolbar) layout.addWidget(self._canvas) layout.addWidget(plot_button) self.setLayout(layout) self.plot() """ self.random_signal.connect(self.random_slot) self.random_signal.emit('hello', 5, False) random_signal = QtCore.pyqtSignal(str, int, bool) # you can add decorator in, but it's optional @QtCore.pyqtSlot(str, int, bool) def random_slot(self, string, integer, boolean, *args, **kwargs): print(string, integer, boolean) """ def plot(self): data = np.random.rand(20) ax = self._figure.add_subplot(111) ax.set_yscale('log') ax.set_xlim(-1, 6) ax.set_ylim(-1, 3) ax.set_xlabel('This is an x label') ax.set_ylabel('Set a Y label') ax.legend() ax.set_title('A really cool default chart') ax.plot(data, '*-', label=time.time()) self.update_canvas() def update_canvas(self): self._canvas.draw() class XYScatterGraphWidget(GraphWidget): def plot(self): self._figure.clear() ax = self._figure.add_subplot(111) n = 100 x = np.random.rand(n) y = np.random.rand(n) colors = np.random.rand(n) area = np.pi * (15 * np.random.rand(n)) ** 2 ax.scatter(x, y, s=area, c=colors, alpha=0.5) self.update_canvas() class PieGraphWidget(GraphWidget): def plot(self): labels = ['Eaten', 'Uneaten', 'Eat next'] n = len(labels) data = np.random.rand(n) * 100 # control how the percentages are displayed autopct = '%1.1f%%' # colors = ['r', 'g', 'b'] explode = np.zeros(n) explode[-1] = 0.1 self._figure.clear() ax = self._figure.add_subplot(111) ax.pie(data, explode=explode, labels=labels, autopct=autopct, shadow=True, startangle=90) self.update_canvas() class BarGraphWidget(GraphWidget): def plot(self): self._figure.clear() n = 10 y = np.random.rand(n) * 100 x = range(n) width = 1 / 1.5 ax = self._figure.add_subplot(111) ax.bar(x, y, width, color='blue') self.update_canvas() if __name__ == '__main__': main()
bsd-3-clause
-3,165,528,811,247,220,000
26.548148
81
0.596666
false
3.541905
false
false
false
Sisky/sdk
bindings/python/setup.py
6
3647
#!/bin/env python # -*- coding: utf-8 -*- """ This setup is loosely following the instructions adapted from https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/ """ import os import re from setuptools import setup def read(fname): """ Utility function to read the README file. Used for the long_description. It's nice, because now 1) we have a top level README file and 2) it's easier to type in the README file than to put a raw string in below ... """ return open(os.path.join(os.path.dirname(__file__), fname)).read() def get_version(): """ Grabs and returns the version and release numbers from autotools. """ configure_ac = open(os.path.join('..', '..', 'configure.ac')).read() major = re.search('m4_define\(\[mega_major_version\], \[([0-9]+)\]', configure_ac) minor = re.search('m4_define\(\[mega_minor_version\], \[([0-9]+)\]', configure_ac) micro = re.search('m4_define\(\[mega_micro_version\], \[(.+?)\]', configure_ac) if major: major, minor, micro = major.group(1), minor.group(1), micro.group(1) version = '.'.join([major, minor]) else: version = 'raw_development' if micro: release = '.'.join([major, minor, micro]) else: release = 'raw_development' return version, release def make_symlink(src, dst): """Makes a symlink, ignores errors if it's there already.""" try: os.symlink(src, dst) except OSError as e: if e.strerror != 'File exists': raise e def remove_file(fname): """Removes a file/link, ignores errors if it's not there any more.""" try: os.remove(fname) except OSError as e: if e.strerror != 'No such file or directory': raise e # Put native library modules into a "good place" for the package. make_symlink('../../src/.libs/libmega.so', 'libmega.so') make_symlink('.libs/_mega.so', '_mega.so') # Create a dummy __init__.py if not present. _init_file = '__init__.py' _init_file_created = False if not os.path.exists(_init_file): with open(_init_file, 'wb') as fd: _init_file_created = True setup( name='megasdk', version=get_version()[1], description='Python bindings to the Mega file storage SDK.', long_description=read('DESCRIPTION.rst'), url='http://github.com/meganz/sdk/', license='Simplified BSD', author='Guy Kloss', author_email='gk@mega.co.nz', packages=['mega'], package_dir={'mega': '.'}, package_data = { 'mega': ['libmega.so', '_mega.so'], }, exclude_package_data = {'': ['test_libmega.py']}, include_package_data=True, keywords=['MEGA', 'privacy', 'cloud', 'storage', 'API'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries :: Python Modules', ], ) # Clean up some temporary stuff. remove_file('libmega.so') remove_file('_mega.so') if _init_file_created: remove_file(_init_file)
bsd-2-clause
-864,611,293,662,145,200
32.154545
83
0.598026
false
3.710071
false
false
false
apache/incubator-trafodion
install/python-installer/scripts/dcs_setup.py
2
3475
#!/usr/bin/env python # @@@ START COPYRIGHT @@@ # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # @@@ END COPYRIGHT @@@ ### this script should be run on all nodes with trafodion user ### import os import sys import json from constants import TRAF_CFG_DIR, TRAF_CFG_FILE from common import append_file, write_file, mod_file, cmd_output, \ ParseInI, ParseXML, err, run_cmd def run(): dbcfgs = json.loads(dbcfgs_json) hbase_xml_file = dbcfgs['hbase_xml_file'] dcs_conf_dir = '%s/dcs' % (TRAF_CFG_DIR) dcs_srv_file = dcs_conf_dir + '/servers' dcs_master_file = dcs_conf_dir + '/masters' dcs_site_file = dcs_conf_dir + '/dcs-site.xml' rest_site_file = '%s/rest/rest-site.xml' % (TRAF_CFG_DIR) ### dcs setting ### # servers nodes = dbcfgs['node_list'].split(',') dcs_cnt = dbcfgs['dcs_cnt_per_node'] dcs_servers = '' for node in nodes: dcs_servers += '%s %s\n' % (node, dcs_cnt) write_file(dcs_srv_file, dcs_servers) ### modify dcs config files ### # modify master dcs_master = nodes[0] append_file(dcs_master_file, dcs_master+'\n') # modify dcs-site.xml net_interface = run_cmd('ip route |grep default|awk \'{print $5}\'') hb = ParseXML(hbase_xml_file) zk_hosts = hb.get_property('hbase.zookeeper.quorum') zk_port = hb.get_property('hbase.zookeeper.property.clientPort') p = ParseXML(dcs_site_file) p.add_property('dcs.zookeeper.property.clientPort', zk_port) p.add_property('dcs.zookeeper.quorum', zk_hosts) p.add_property('dcs.dns.interface', net_interface) if dbcfgs['dcs_ha'] == 'Y': dcs_floating_ip = dbcfgs['dcs_floating_ip'] dcs_backup_nodes = dbcfgs['dcs_backup_nodes'] p.add_property('dcs.master.floating.ip', 'true') p.add_property('dcs.master.floating.ip.external.interface', net_interface) p.add_property('dcs.master.floating.ip.external.ip.address', dcs_floating_ip) p.rm_property('dcs.dns.interface') # set DCS_MASTER_FLOATING_IP ENV for trafci dcs_floating_ip_cfg = 'export DCS_MASTER_FLOATING_IP=%s' % dcs_floating_ip append_file(TRAF_CFG_FILE, dcs_floating_ip_cfg) # modify master with backup master host for dcs_backup_node in dcs_backup_nodes.split(','): append_file(dcs_master_file, dcs_backup_node) p.write_xml() ### rest setting ### p = ParseXML(rest_site_file) p.add_property('rest.zookeeper.property.clientPort', zk_port) p.add_property('rest.zookeeper.quorum', zk_hosts) p.write_xml() ### run sqcertgen ### run_cmd('sqcertgen') # main try: dbcfgs_json = sys.argv[1] except IndexError: err('No db config found') run()
apache-2.0
-5,526,937,498,630,151,000
33.068627
85
0.66446
false
3.099911
false
false
false
UAVCAN/pyuavcan
pyuavcan/transport/can/media/_filter.py
1
7507
# Copyright (c) 2019 UAVCAN Consortium # This software is distributed under the terms of the MIT License. # Author: Pavel Kirienko <pavel@uavcan.org> from __future__ import annotations import typing import itertools import dataclasses from ._frame import FrameFormat @dataclasses.dataclass(frozen=True) class FilterConfiguration: identifier: int """The reference CAN ID value.""" mask: int """Mask applies to the identifier only. It does not contain any special flags.""" format: typing.Optional[FrameFormat] """None means no preference -- both formats will be accepted.""" def __post_init__(self) -> None: max_bit_length = 2 ** self.identifier_bit_length - 1 if not (0 <= self.identifier <= max_bit_length): raise ValueError(f"Invalid identifier: {self.identifier}") if not (0 <= self.mask <= max_bit_length): raise ValueError(f"Invalid mask: {self.mask}") @property def identifier_bit_length(self) -> int: # noinspection PyTypeChecker return int(self.format if self.format is not None else max(FrameFormat)) @staticmethod def new_promiscuous(frame_format: typing.Optional[FrameFormat] = None) -> FilterConfiguration: """ Returns a configuration that accepts all frames of the specified format. If the format is not specified, no distinction will be made. Note that some CAN controllers may have difficulty supporting both formats on a single filter. """ return FilterConfiguration(identifier=0, mask=0, format=frame_format) @property def rank(self) -> int: """ This is the number of set bits in the mask. This is a part of the CAN acceptance filter configuration optimization algorithm; see :func:`optimize_filter_configurations`. We return negative rank for configurations which do not distinguish between extended and base frames in order to discourage merger of configurations of different frame types, since they are hard to support in certain CAN controllers. The effect of this is that we guarantee that an ambivalent filter configuration will never appear if the controller has at least two acceptance filters. Negative rank is computed by subtracting the number of bits in the CAN ID (or 29 if the filter accepts both base and extended identifiers) from the original rank. """ mask_mask = 2 ** self.identifier_bit_length - 1 rank = bin(self.mask & mask_mask).count("1") if self.format is None: rank -= int(self.identifier_bit_length) # Discourage merger of ambivalent filters. return rank def merge(self, other: FilterConfiguration) -> FilterConfiguration: """ This is a part of the CAN acceptance filter configuration optimization algorithm; see :func:`optimize_filter_configurations`. Given two filter configurations ``A`` and ``B``, where ``A`` accepts CAN frames whose identifiers belong to ``Ca`` and likewise ``Cb`` for ``B``, the merge product of ``A`` and ``B`` would be a new filter configuration that accepts CAN frames belonging to a new set which is a superset of the union of ``Ca`` and ``Cb``. """ mask = self.mask & other.mask & ~(self.identifier ^ other.identifier) identifier = self.identifier & mask fmt = self.format if self.format == other.format else None return FilterConfiguration(identifier=identifier, mask=mask, format=fmt) def __str__(self) -> str: out = "".join( (str((self.identifier >> bit) & 1) if self.mask & (1 << bit) != 0 else "x") for bit in reversed(range(int(self.format or FrameFormat.EXTENDED))) ) return (self.format.name[:3].lower() if self.format else "any") + ":" + out def optimize_filter_configurations( configurations: typing.Iterable[FilterConfiguration], target_number_of_configurations: int ) -> typing.Sequence[FilterConfiguration]: """ Implements the CAN acceptance filter configuration optimization algorithm described in the Specification. The algorithm was originally proposed by P. Kirienko and I. Sheremet. Given a set of ``K`` filter configurations that accept CAN frames whose identifiers belong to the set ``C``, and ``N`` acceptance filters implemented in hardware, where ``1 <= N < K``, find a new set of ``K'`` filter configurations that accept CAN frames whose identifiers belong to the set ``C'``, such that ``K' <= N``, ``C'`` is a superset of ``C``, and ``|C'|`` is minimized. The algorithm is not defined for ``N >= K`` because this configuration is considered optimal. The function returns the input set unchanged in this case. If the target number of configurations is not positive, a ValueError is raised. The time complexity of this implementation is ``O(K!)``; it should be optimized. """ if target_number_of_configurations < 1: raise ValueError(f"The number of configurations must be positive; found {target_number_of_configurations}") configurations = list(configurations) while len(configurations) > target_number_of_configurations: options = itertools.starmap( lambda ia, ib: (ia[0], ib[0], ia[1].merge(ib[1])), itertools.permutations(enumerate(configurations), 2) ) index_replace, index_remove, merged = max(options, key=lambda x: int(x[2].rank)) configurations[index_replace] = merged del configurations[index_remove] # Invalidates indexes assert all(map(lambda x: isinstance(x, FilterConfiguration), configurations)) return configurations def _unittest_can_media_filter_faults() -> None: from pytest import raises with raises(ValueError): FilterConfiguration(0, -1, None) with raises(ValueError): FilterConfiguration(-1, 0, None) for fmt in FrameFormat: with raises(ValueError): FilterConfiguration(2 ** int(fmt), 0, fmt) with raises(ValueError): FilterConfiguration(0, 2 ** int(fmt), fmt) with raises(ValueError): optimize_filter_configurations([], 0) # noinspection SpellCheckingInspection def _unittest_can_media_filter_str() -> None: assert str(FilterConfiguration(0b10101010, 0b11101000, FrameFormat.EXTENDED)) == "ext:xxxxxxxxxxxxxxxxxxxxx101x1xxx" assert ( str(FilterConfiguration(0b10101010101010101010101010101, 0b10111111111111111111111111111, FrameFormat.EXTENDED)) == "ext:1x101010101010101010101010101" ) assert str(FilterConfiguration(0b10101010101, 0b11111111111, FrameFormat.BASE)) == "bas:10101010101" assert str(FilterConfiguration(123, 456, None)) == "any:xxxxxxxxxxxxxxxxxxxx001xx1xxx" assert str(FilterConfiguration.new_promiscuous()) == "any:xxxxxxxxxxxxxxxxxxxxxxxxxxxxx" assert repr(FilterConfiguration(123, 456, None)) == "FilterConfiguration(identifier=123, mask=456, format=None)" def _unittest_can_media_filter_merge() -> None: assert FilterConfiguration(123456, 0, None).rank == -29 # Worst rank assert FilterConfiguration(123456, 0b110, None).rank == -27 # Two better assert FilterConfiguration(1234, 0b110, FrameFormat.BASE).rank == 2 assert ( FilterConfiguration(0b111, 0b111, FrameFormat.EXTENDED) .merge(FilterConfiguration(0b111, 0b111, FrameFormat.BASE)) .rank == -29 + 3 )
mit
2,260,021,227,886,618,000
42.900585
120
0.685094
false
4.253258
true
false
false
google/grr
grr/server/grr_response_server/databases/db_test_utils.py
1
6907
#!/usr/bin/env python """Mixin class to be used in tests for DB implementations.""" import itertools import random from typing import Any, Callable, Dict, Iterable, Optional, Text from grr_response_server.databases import db class QueryTestHelpersMixin(object): """Mixin containing helper methods for list/query methods tests.""" def DoOffsetAndCountTest(self, fetch_all_fn: Callable[[], Iterable[Any]], fetch_range_fn: Callable[[int, int], Iterable[Any]], error_desc: Optional[Text] = None): """Tests a DB API method with different offset/count combinations. This helper method works by first fetching all available objects with fetch_all_fn and then fetching all possible ranges using fetch_fn. The test passes if subranges returned by fetch_fn match subranges of values in the list returned by fetch_all_fn. Args: fetch_all_fn: Function without arguments that fetches all available objects using the API method that's being tested. fetch_range_fn: Function that calls an API method that's being tested passing 2 positional arguments: offset and count. It should return a list of objects. error_desc: Optional string to be used in error messages. May be useful to identify errors from a particular test. """ all_objects = fetch_all_fn() self.assertNotEmpty(all_objects, "Fetched objects can't be empty (%s)." % error_desc) for i in range(len(all_objects)): for l in range(1, len(all_objects) + 1): results = list(fetch_range_fn(i, l)) expected = list(all_objects[i:i + l]) self.assertListEqual( results, expected, "Results differ from expected (offset %d, count %d%s): %s vs %s" % (i, l, (", " + error_desc) if error_desc else "", results, expected)) def DoFilterCombinationsTest(self, fetch_fn: Callable[..., Iterable[Any]], conditions: Dict[Text, Any], error_desc: Optional[Text] = None): """Tests a DB API method with different keyword arguments combinations. This test method works by fetching sets of objects for each individual condition and then checking that combinations of conditions produce expected sets of objects. Args: fetch_fn: Function accepting keyword "query filter" arguments and returning a list of fetched objects. When called without arguments, fetch_fn is expected to return all available objects. conditions: A dictionary of key -> value, where key is a string identifying a keyword argument to be passed to fetch_fn and value is a value to be passed. All possible permutations of conditions will be tried on fetch_fn. error_desc: Optional string to be used in error messages. May be useful to identify errors from a particular test. """ perms = list( itertools.chain.from_iterable([ itertools.combinations(sorted(conditions.keys()), i) for i in range(1, len(conditions) + 1) ])) self.assertNotEmpty(perms) all_objects = fetch_fn() expected_objects = {} for k, v in conditions.items(): expected_objects[k] = fetch_fn(**{k: v}) for condition_perm in perms: expected = all_objects kw_args = {} for k in condition_perm: expected = [e for e in expected if e in expected_objects[k]] kw_args[k] = conditions[k] got = fetch_fn(**kw_args) # Make sure that the order of keys->values is stable in the error message. kw_args_str = ", ".join( "%r: %r" % (k, kw_args[k]) for k in sorted(kw_args)) self.assertListEqual( got, expected, "Results differ from expected ({%s}%s): %s vs %s" % (kw_args_str, (", " + error_desc) if error_desc else "", got, expected)) def DoFilterCombinationsAndOffsetCountTest(self, fetch_fn: Callable[..., Iterable[Any]], conditions: Dict[Text, Any], error_desc: Optional[Text] = None): """Tests a DB API methods with combinations of offset/count args and kwargs. This test methods works in 2 steps: 1. It tests that different conditions combinations work fine when offset and count are 0 and db.MAX_COUNT respectively. 2. For every condition combination it tests all possible offset and count combinations to make sure correct subsets of results are returned. Args: fetch_fn: Function accepting positional offset and count arguments and keyword "query filter" arguments and returning a list of fetched objects. conditions: A dictionary of key -> value, where key is a string identifying a keyword argument to be passed to fetch_fn and value is a value to be passed. All possible permutations of conditions will be tried on fetch_fn. error_desc: Optional string to be used in error messages. May be useful to identify errors from a particular test. """ self.DoFilterCombinationsTest( lambda **kw_args: fetch_fn(0, db.MAX_COUNT, **kw_args), conditions, error_desc=error_desc) perms = list( itertools.chain.from_iterable([ itertools.combinations(sorted(conditions.keys()), i) for i in range(1, len(conditions) + 1) ])) self.assertNotEmpty(perms) for condition_perm in perms: kw_args = {} for k in condition_perm: kw_args[k] = conditions[k] # Make sure that the order of keys->values is stable in the error message. kw_args_str = ", ".join( "%r: %r" % (k, kw_args[k]) for k in sorted(kw_args)) self.DoOffsetAndCountTest( lambda: fetch_fn(0, db.MAX_COUNT, **kw_args), # pylint: disable=cell-var-from-loop lambda offset, count: fetch_fn(offset, count, **kw_args), # pylint: disable=cell-var-from-loop error_desc="{%s}%s" % (kw_args_str, ", " + error_desc) if error_desc else "") def InitializeClient(db_obj, client_id=None): """Initializes a test client. Args: db_obj: A database object. client_id: A specific client id to use for initialized client. If none is provided a randomly generated one is used. Returns: A client id for initialized client. """ if client_id is None: client_id = "C." for _ in range(16): client_id += random.choice("0123456789abcdef") db_obj.WriteClientMetadata(client_id, fleetspeak_enabled=False) return client_id
apache-2.0
3,371,174,344,154,678,000
39.629412
105
0.618503
false
4.324984
true
false
false
SoftwareHeritage/swh-storage
swh/storage/proxies/filter.py
1
4046
# Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from typing import Dict, Iterable, List, Set from swh.model.model import Content, Directory, Revision, Sha1Git, SkippedContent from swh.storage import get_storage from swh.storage.interface import StorageInterface class FilteringProxyStorage: """Filtering Storage implementation. This is in charge of transparently filtering out known objects prior to adding them to storage. Sample configuration use case for filtering storage: .. code-block: yaml storage: cls: filter storage: cls: remote url: http://storage.internal.staging.swh.network:5002/ """ object_types = ["content", "skipped_content", "directory", "revision"] def __init__(self, storage): self.storage: StorageInterface = get_storage(**storage) def __getattr__(self, key): if key == "storage": raise AttributeError(key) return getattr(self.storage, key) def content_add(self, content: List[Content]) -> Dict[str, int]: contents_to_add = self._filter_missing_contents(content) return self.storage.content_add( [x for x in content if x.sha256 in contents_to_add] ) def skipped_content_add(self, content: List[SkippedContent]) -> Dict[str, int]: contents_to_add = self._filter_missing_skipped_contents(content) return self.storage.skipped_content_add( [x for x in content if x.sha1_git is None or x.sha1_git in contents_to_add] ) def directory_add(self, directories: List[Directory]) -> Dict[str, int]: missing_ids = self._filter_missing_ids("directory", (d.id for d in directories)) return self.storage.directory_add( [d for d in directories if d.id in missing_ids] ) def revision_add(self, revisions: List[Revision]) -> Dict[str, int]: missing_ids = self._filter_missing_ids("revision", (r.id for r in revisions)) return self.storage.revision_add([r for r in revisions if r.id in missing_ids]) def _filter_missing_contents(self, contents: List[Content]) -> Set[bytes]: """Return only the content keys missing from swh Args: content_hashes: List of sha256 to check for existence in swh storage """ missing_contents = [] for content in contents: missing_contents.append(content.hashes()) return set(self.storage.content_missing(missing_contents, key_hash="sha256",)) def _filter_missing_skipped_contents( self, contents: List[SkippedContent] ) -> Set[Sha1Git]: """Return only the content keys missing from swh Args: content_hashes: List of sha1_git to check for existence in swh storage """ missing_contents = [c.hashes() for c in contents if c.sha1_git is not None] ids = set() for c in self.storage.skipped_content_missing(missing_contents): if c is None or c.get("sha1_git") is None: continue ids.add(c["sha1_git"]) return ids def _filter_missing_ids(self, object_type: str, ids: Iterable[bytes]) -> Set[bytes]: """Filter missing ids from the storage for a given object type. Args: object_type: object type to use {revision, directory} ids: List of object_type ids Returns: Missing ids from the storage for object_type """ missing_ids = [] for id in ids: missing_ids.append(id) fn_by_object_type = { "revision": self.storage.revision_missing, "directory": self.storage.directory_missing, } fn = fn_by_object_type[object_type] return set(fn(missing_ids))
gpl-3.0
-6,670,559,346,741,618,000
33.87931
88
0.629758
false
4.099291
false
false
false
pcolmant/repanier
repanier_v2/views/published_customer_view.py
1
7951
from os import sep as os_sep from django.contrib.auth import get_user_model from django.contrib.auth.decorators import login_required from django.forms import widgets, forms, fields from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext_lazy as _ from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from repanier_v2.const import DECIMAL_ZERO, EMPTY_STRING from repanier_v2.models.customer import Customer from repanier_v2.picture.const import SIZE_S from repanier_v2.tools import get_repanier_template_name from repanier_v2.widget.checkbox import RepanierCheckboxWidget from repanier_v2.widget.picture import RepanierPictureWidget class CustomerForm(forms.Form): long_name = fields.CharField(label=_("My name is"), max_length=100) zero_waste = fields.BooleanField( label=EMPTY_STRING, required=False, widget=RepanierCheckboxWidget(label=_("Family zero waste")), ) subscribe_to_email = fields.BooleanField( label=EMPTY_STRING, required=False, widget=RepanierCheckboxWidget( label=_("I agree to receive mails from this site") ), ) email1 = fields.EmailField( label=_( "My main email address, used to reset the password and connect to the site" ) ) email2 = fields.EmailField( label=_("My secondary email address (does not allow to connect to the site)"), required=False, ) phone1 = fields.CharField(label=_("My main phone number"), max_length=25) phone2 = fields.CharField( label=_("My secondary phone number"), max_length=25, required=False ) city = fields.CharField(label=_("My city"), max_length=50, required=False) address = fields.CharField( label=_("My address"), widget=widgets.Textarea(attrs={"cols": "40", "rows": "3"}), required=False, ) picture = fields.CharField( label=_("My picture"), widget=RepanierPictureWidget(upload_to="customer", size=SIZE_S, bootstrap=True), required=False, ) about_me = fields.CharField( label=_("About me"), widget=widgets.Textarea(attrs={"cols": "40", "rows": "3"}), required=False, ) def clean_email1(self): email1 = self.cleaned_data["email1"] user_model = get_user_model() qs = ( user_model.objects.filter(email=email1, is_staff=False) .exclude(id=self.request.customer_id) .order_by("?") ) if qs.exists(): self.add_error( "email1", _("The email {} is already used by another user.").format(email1), ) return email1 # def __init__(self, *args, **kwargs): # self.request = kwargs.pop("request", None) # super().__init__(*args, **kwargs) @login_required() @csrf_protect @never_cache def published_customer_view(request, customer_id=0): print("######### customer_id : {}".format(customer_id)) user = request.user if user.is_repanier_staff: customer = Customer.objects.filter(id=customer_id, is_active=True).first() else: customer = ( Customer.objects.filter(id=user.customer_id, is_active=True) .filter(id=customer_id) .first() ) if not customer: raise Http404 from repanier_v2.globals import ( REPANIER_SETTINGS_MEMBERSHIP_FEE, REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO, ) if REPANIER_SETTINGS_MEMBERSHIP_FEE > DECIMAL_ZERO: membership_fee_valid_until = customer.membership_fee_valid_until else: membership_fee_valid_until = None template_name = get_repanier_template_name("published_customer_form.html") if request.method == "POST": # If the form has been submitted... form = CustomerForm(request.POST) # A form bound to the POST data if form.is_valid(): # All validation rules pass # Process the data in form.cleaned_data # ... if customer is not None: customer.long_name = form.cleaned_data.get("long_name") customer.phone1 = form.cleaned_data.get("phone1") customer.phone2 = form.cleaned_data.get("phone2") customer.email2 = form.cleaned_data.get("email2").lower() customer.subscribe_to_email = form.cleaned_data.get( "subscribe_to_email" ) customer.city = form.cleaned_data.get("city") customer.address = form.cleaned_data.get("address") customer.picture = form.cleaned_data.get("picture") customer.about_me = form.cleaned_data.get("about_me") customer.zero_waste = form.cleaned_data.get("zero_waste") customer.save() # Important : place this code after because form = CustomerForm(data, request=request) delete form.cleaned_data email = form.cleaned_data.get("email1") user_model = get_user_model() user = user_model.objects.filter(email=email).order_by("?").first() if user is None or user.email != email: # user.email != email for case unsensitive SQL query customer.user.username = customer.user.email = email.lower() # customer.user.first_name = EMPTY_STRING # customer.user.last_name = customer.short_name customer.user.save() # User feed back : Display email in lower case. data = form.data.copy() data["email1"] = customer.user.email data["email2"] = customer.email2 form = CustomerForm(data, request=request) return render( request, template_name, { "form": form, "membership_fee_valid_until": membership_fee_valid_until, "display_who_is_who": REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO, "update": True, }, ) return render( request, template_name, { "form": form, "membership_fee_valid_until": membership_fee_valid_until, "display_who_is_who": REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO, "update": False, }, ) else: form = CustomerForm() # An unbound form field = form.fields["long_name"] field.initial = customer.long_name field = form.fields["phone1"] field.initial = customer.phone1 field = form.fields["phone2"] field.initial = customer.phone2 field = form.fields["email1"] field.initial = customer.user.email field = form.fields["email2"] field.initial = customer.email2 field = form.fields["subscribe_to_email"] field.initial = customer.subscribe_to_email field = form.fields["city"] field.initial = customer.city field = form.fields["address"] field.initial = customer.address field = form.fields["picture"] field.initial = customer.picture if hasattr(field.widget, "upload_to"): field.widget.upload_to = "{}{}{}".format("customer", os_sep, customer.id) field = form.fields["about_me"] field.initial = customer.about_me field = form.fields["zero_waste"] field.initial = customer.zero_waste return render( request, template_name, { "form": form, "membership_fee_valid_until": membership_fee_valid_until, "display_who_is_who": REPANIER_SETTINGS_DISPLAY_WHO_IS_WHO, "update": None, }, )
gpl-3.0
-919,595,278,697,615,900
38.361386
127
0.58936
false
4.011604
false
false
false
jinjin123/devops2.0
devops/ops/views/zabbix/zabbix.py
1
5284
#!/usr/bin/python env # coding=utf-8 import os,sys from ops.views.ssh_settings import zabbixurl,zabbixpwd,zabbixuser import time from pyzabbix import ZabbixAPI # 登录zabbix zabbix = ZabbixAPI(zabbixurl) zabbix.session.verify = False zabbix.login(zabbixuser, zabbixpwd) def group_list(): return zabbix.hostgroup.get( output=['groupid', 'name'] ) def host_list(group=None): if group: return zabbix.host.get( output=['host', 'hostid', 'name', 'available'], groupids=[group], selectGroups=['name'] ) else: return zabbix.host.get( output=['host', 'hostid', 'name', 'available'], selectGroups=['name'] ) def cpu_list(hostid): if hostid: item = zabbix.item.get(hostids=[hostid], output=["name", "key_", "value_type", "hostid", "status", "state"], filter={'key_': 'system.cpu.load[percpu,avg1]'}) itemid = item[0]['itemid'] t_till = int(time.time()) t_from = t_till - 2 * 24 * 60 * 60 return zabbix.history.get( # hostids=[hostid], itemids=[itemid], history=0, output='extend', sortfield='clock', sortorder='ASC', time_from=t_from, time_till=t_till ) def memory_list(hostid): if hostid: item = zabbix.item.get(hostids=[hostid], output=["name", "key_", "value_type", "hostid", "status", "state"], filter={'key_': 'vm.memory.size[available]'}) itemid = item[0]['itemid'] t_till = int(time.time()) t_from = t_till - 2 * 24 * 60 * 60 return zabbix.history.get( # hostids=[hostid], itemids=[itemid], history=3, output='extend', sortfield='clock', sortorder='ASC', time_from=t_from, time_till=t_till ) def disk_list(hostid): if hostid: item = zabbix.item.get(hostids=[hostid], output=["name", "key_", "value_type", "hostid", "status", "state"], filter={'key_': 'vfs.fs.size[/,free]'}) itemid = item[0]['itemid'] t_till = int(time.time()) t_from = t_till - 2 * 24 * 60 * 60 return zabbix.history.get( # hostids=[hostid], itemids=[itemid], history=3, output='extend', sortfield='clock', sortorder='ASC', time_from=t_from, time_till=t_till ) def event_list(): t_till = int(time.time()) t_from = t_till - 7 * 24 * 60 * 60 triggers = zabbix.trigger.get( output=['triggerid', 'description', 'priority'] ) triggerDict = {} for trigger in triggers: triggerDict[trigger['triggerid']] = trigger events = zabbix.event.get( output='extend', selectHosts=['name', 'host'], sortfield='clock', sortorder='DESC', time_from=t_from, time_till=t_till ) return [{ 'clock': event['clock'], 'eventid': event['eventid'], 'acknowledged': event['acknowledged'], 'hosts': event['hosts'], 'trigger': triggerDict.get(event['objectid']) } for event in events] def usage(hostid): diskItemids = zabbix.item.get( hostids=[hostid], output=["name", "key_", "value_type", "hostid", "status", "state"], filter={'key_': 'vfs.fs.size[/,pfree]'} ) diskUsage = zabbix.history.get(itemids=[diskItemids[0]['itemid']], history=0, output='extend', sortfield='clock', sortorder='ASC', limit=1) return [{ 'diskUsage': diskUsage, }] def service_history_list(service): if service: t_till = int(time.time()) t_from = t_till - 7 * 24 * 60 * 60 # 所有监控项 items = zabbix.item.get( output=['itemid'], filter={'name': service}, selectHosts=['name', 'host'], ) history = [] for item in items: history.append( zabbix.history.get( itemids=[item['itemid']], history=3, output='extend', sortfield='clock', sortorder='ASC', time_from=t_from, time_till=t_till, ) ) return { 'items': items, 'history': history } def service_item_list(service): if service: # 所有监控项 items = zabbix.item.get( output=['itemid'], filter={'name': service}, selectHosts=['name', 'host'], ) return items def history_list(itemid): if itemid: t_till = int(time.time()) t_from = t_till - 7 * 24 * 60 * 60 return zabbix.history.get( itemids=[itemid], history=3, output='extend', sortfield='clock', sortorder='ASC', time_from=t_from, time_till=t_till, )
mit
458,557,950,834,580,860
25.974359
117
0.486122
false
3.733144
false
false
false
tiltfactor/mg-game
nlp/config.py
1
1091
#!/usr/bin/env python import logging, os # Flask app debug value. #DEBUG = False # DEBUG AND INFO log levels won't be shown.. DEBUG = True # Used for running the flask server independent of mod_wsgi # using SERVER_NAME as the var name makes views fail.. # Set servername to gameserver base url #SERVERNAME = "127.0.0.1" SERVERNAME = "localhost" SERVER_PORT = 8139 #SERVER_PORT = 5000 # URLs from which request (ajax) can be made to this server ALLOWED_DOMAINS = "*" # all #ALLOWED_DOMAINS = "http://"+SERVERNAME # allow calls from elsewhere in the same server #PREFERRED_URL_SCHEME='https' # Get the current dir for the application APPLICATION_PATH = os.path.dirname(os.path.realpath(__file__)) # If mod_wsgi is used, messages will also get logged to the apache log files LOG_FILE = os.path.join(APPLICATION_PATH, "nlpserver.log") DEFAULT_LOG_FORMATTER = logging.Formatter(\ "%(asctime)s - %(levelname)s - %(message)s") # these not really needed since DEBUG_VAL above influences this #DEFAULT_LOG_LEVEL = logging.DEBUG #DEFAULT_LOG_LEVEL = logging.WARNING
agpl-3.0
741,703,959,555,211,600
32.060606
87
0.719523
false
3.430818
false
false
false
pyro-ppl/numpyro
numpyro/distributions/truncated.py
1
14900
# Copyright Contributors to the Pyro project. # SPDX-License-Identifier: Apache-2.0 from jax import lax import jax.numpy as jnp import jax.random as random from jax.scipy.special import logsumexp from jax.tree_util import tree_map from numpyro.distributions import constraints from numpyro.distributions.continuous import ( Cauchy, Laplace, Logistic, Normal, SoftLaplace, StudentT, ) from numpyro.distributions.distribution import Distribution from numpyro.distributions.util import ( is_prng_key, lazy_property, promote_shapes, validate_sample, ) class LeftTruncatedDistribution(Distribution): arg_constraints = {"low": constraints.real} reparametrized_params = ["low"] supported_types = (Cauchy, Laplace, Logistic, Normal, SoftLaplace, StudentT) def __init__(self, base_dist, low=0.0, validate_args=None): assert isinstance(base_dist, self.supported_types) assert ( base_dist.support is constraints.real ), "The base distribution should be univariate and have real support." batch_shape = lax.broadcast_shapes(base_dist.batch_shape, jnp.shape(low)) self.base_dist = tree_map( lambda p: promote_shapes(p, shape=batch_shape)[0], base_dist ) (self.low,) = promote_shapes(low, shape=batch_shape) self._support = constraints.greater_than(low) super().__init__(batch_shape, validate_args=validate_args) @constraints.dependent_property(is_discrete=False, event_dim=0) def support(self): return self._support @lazy_property def _tail_prob_at_low(self): # if low < loc, returns cdf(low); otherwise returns 1 - cdf(low) loc = self.base_dist.loc sign = jnp.where(loc >= self.low, 1.0, -1.0) return self.base_dist.cdf(loc - sign * (loc - self.low)) @lazy_property def _tail_prob_at_high(self): # if low < loc, returns cdf(high) = 1; otherwise returns 1 - cdf(high) = 0 return jnp.where(self.low <= self.base_dist.loc, 1.0, 0.0) def sample(self, key, sample_shape=()): assert is_prng_key(key) u = random.uniform(key, sample_shape + self.batch_shape) loc = self.base_dist.loc sign = jnp.where(loc >= self.low, 1.0, -1.0) return (1 - sign) * loc + sign * self.base_dist.icdf( (1 - u) * self._tail_prob_at_low + u * self._tail_prob_at_high ) @validate_sample def log_prob(self, value): sign = jnp.where(self.base_dist.loc >= self.low, 1.0, -1.0) return self.base_dist.log_prob(value) - jnp.log( sign * (self._tail_prob_at_high - self._tail_prob_at_low) ) def tree_flatten(self): base_flatten, base_aux = self.base_dist.tree_flatten() if isinstance(self._support.lower_bound, (int, float)): return base_flatten, ( type(self.base_dist), base_aux, self._support.lower_bound, ) else: return (base_flatten, self.low), (type(self.base_dist), base_aux) @classmethod def tree_unflatten(cls, aux_data, params): if len(aux_data) == 2: base_flatten, low = params base_cls, base_aux = aux_data else: base_flatten = params base_cls, base_aux, low = aux_data base_dist = base_cls.tree_unflatten(base_aux, base_flatten) return cls(base_dist, low=low) class RightTruncatedDistribution(Distribution): arg_constraints = {"high": constraints.real} reparametrized_params = ["high"] supported_types = (Cauchy, Laplace, Logistic, Normal, SoftLaplace, StudentT) def __init__(self, base_dist, high=0.0, validate_args=None): assert isinstance(base_dist, self.supported_types) assert ( base_dist.support is constraints.real ), "The base distribution should be univariate and have real support." batch_shape = lax.broadcast_shapes(base_dist.batch_shape, jnp.shape(high)) self.base_dist = tree_map( lambda p: promote_shapes(p, shape=batch_shape)[0], base_dist ) (self.high,) = promote_shapes(high, shape=batch_shape) self._support = constraints.less_than(high) super().__init__(batch_shape, validate_args=validate_args) @constraints.dependent_property(is_discrete=False, event_dim=0) def support(self): return self._support @lazy_property def _cdf_at_high(self): return self.base_dist.cdf(self.high) def sample(self, key, sample_shape=()): assert is_prng_key(key) u = random.uniform(key, sample_shape + self.batch_shape) return self.base_dist.icdf(u * self._cdf_at_high) @validate_sample def log_prob(self, value): return self.base_dist.log_prob(value) - jnp.log(self._cdf_at_high) def tree_flatten(self): base_flatten, base_aux = self.base_dist.tree_flatten() if isinstance(self._support.upper_bound, (int, float)): return base_flatten, ( type(self.base_dist), base_aux, self._support.upper_bound, ) else: return (base_flatten, self.high), (type(self.base_dist), base_aux) @classmethod def tree_unflatten(cls, aux_data, params): if len(aux_data) == 2: base_flatten, high = params base_cls, base_aux = aux_data else: base_flatten = params base_cls, base_aux, high = aux_data base_dist = base_cls.tree_unflatten(base_aux, base_flatten) return cls(base_dist, high=high) class TwoSidedTruncatedDistribution(Distribution): arg_constraints = {"low": constraints.dependent, "high": constraints.dependent} reparametrized_params = ["low", "high"] supported_types = (Cauchy, Laplace, Logistic, Normal, SoftLaplace, StudentT) def __init__(self, base_dist, low=0.0, high=1.0, validate_args=None): assert isinstance(base_dist, self.supported_types) assert ( base_dist.support is constraints.real ), "The base distribution should be univariate and have real support." batch_shape = lax.broadcast_shapes( base_dist.batch_shape, jnp.shape(low), jnp.shape(high) ) self.base_dist = tree_map( lambda p: promote_shapes(p, shape=batch_shape)[0], base_dist ) (self.low,) = promote_shapes(low, shape=batch_shape) (self.high,) = promote_shapes(high, shape=batch_shape) self._support = constraints.interval(low, high) super().__init__(batch_shape, validate_args=validate_args) @constraints.dependent_property(is_discrete=False, event_dim=0) def support(self): return self._support @lazy_property def _tail_prob_at_low(self): # if low < loc, returns cdf(low); otherwise returns 1 - cdf(low) loc = self.base_dist.loc sign = jnp.where(loc >= self.low, 1.0, -1.0) return self.base_dist.cdf(loc - sign * (loc - self.low)) @lazy_property def _tail_prob_at_high(self): # if low < loc, returns cdf(high); otherwise returns 1 - cdf(high) loc = self.base_dist.loc sign = jnp.where(loc >= self.low, 1.0, -1.0) return self.base_dist.cdf(loc - sign * (loc - self.high)) def sample(self, key, sample_shape=()): assert is_prng_key(key) u = random.uniform(key, sample_shape + self.batch_shape) # NB: we use a more numerically stable formula for a symmetric base distribution # A = icdf(cdf(low) + (cdf(high) - cdf(low)) * u) = icdf[(1 - u) * cdf(low) + u * cdf(high)] # will suffer by precision issues when low is large; # If low < loc: # A = icdf[(1 - u) * cdf(low) + u * cdf(high)] # Else # A = 2 * loc - icdf[(1 - u) * cdf(2*loc-low)) + u * cdf(2*loc - high)] loc = self.base_dist.loc sign = jnp.where(loc >= self.low, 1.0, -1.0) return (1 - sign) * loc + sign * self.base_dist.icdf( (1 - u) * self._tail_prob_at_low + u * self._tail_prob_at_high ) @validate_sample def log_prob(self, value): # NB: we use a more numerically stable formula for a symmetric base distribution # if low < loc # cdf(high) - cdf(low) = as-is # if low > loc # cdf(high) - cdf(low) = cdf(2 * loc - low) - cdf(2 * loc - high) sign = jnp.where(self.base_dist.loc >= self.low, 1.0, -1.0) return self.base_dist.log_prob(value) - jnp.log( sign * (self._tail_prob_at_high - self._tail_prob_at_low) ) def tree_flatten(self): base_flatten, base_aux = self.base_dist.tree_flatten() if isinstance(self._support.lower_bound, (int, float)) and isinstance( self._support.upper_bound, (int, float) ): return base_flatten, ( type(self.base_dist), base_aux, self._support.lower_bound, self._support.upper_bound, ) else: return (base_flatten, self.low, self.high), (type(self.base_dist), base_aux) @classmethod def tree_unflatten(cls, aux_data, params): if len(aux_data) == 2: base_flatten, low, high = params base_cls, base_aux = aux_data else: base_flatten = params base_cls, base_aux, low, high = aux_data base_dist = base_cls.tree_unflatten(base_aux, base_flatten) return cls(base_dist, low=low, high=high) def TruncatedDistribution(base_dist, low=None, high=None, validate_args=None): """ A function to generate a truncated distribution. :param base_dist: The base distribution to be truncated. This should be a univariate distribution. Currently, only the following distributions are supported: Cauchy, Laplace, Logistic, Normal, and StudentT. :param low: the value which is used to truncate the base distribution from below. Setting this parameter to None to not truncate from below. :param high: the value which is used to truncate the base distribution from above. Setting this parameter to None to not truncate from above. """ if high is None: if low is None: return base_dist else: return LeftTruncatedDistribution( base_dist, low=low, validate_args=validate_args ) elif low is None: return RightTruncatedDistribution( base_dist, high=high, validate_args=validate_args ) else: return TwoSidedTruncatedDistribution( base_dist, low=low, high=high, validate_args=validate_args ) class TruncatedCauchy(LeftTruncatedDistribution): arg_constraints = { "low": constraints.real, "loc": constraints.real, "scale": constraints.positive, } reparametrized_params = ["low", "loc", "scale"] def __init__(self, low=0.0, loc=0.0, scale=1.0, validate_args=None): self.low, self.loc, self.scale = promote_shapes(low, loc, scale) super().__init__( Cauchy(self.loc, self.scale), low=self.low, validate_args=validate_args ) @property def mean(self): return jnp.full(self.batch_shape, jnp.nan) @property def variance(self): return jnp.full(self.batch_shape, jnp.nan) def tree_flatten(self): if isinstance(self._support.lower_bound, (int, float)): aux_data = self._support.lower_bound else: aux_data = None return (self.low, self.loc, self.scale), aux_data @classmethod def tree_unflatten(cls, aux_data, params): d = cls(*params) if aux_data is not None: d._support = constraints.greater_than(aux_data) return d class TruncatedNormal(LeftTruncatedDistribution): arg_constraints = { "low": constraints.real, "loc": constraints.real, "scale": constraints.positive, } reparametrized_params = ["low", "loc", "scale"] def __init__(self, low=0.0, loc=0.0, scale=1.0, validate_args=None): self.low, self.loc, self.scale = promote_shapes(low, loc, scale) super().__init__( Normal(self.loc, self.scale), low=self.low, validate_args=validate_args ) @property def mean(self): low_prob = jnp.exp(self.log_prob(self.low)) return self.loc + low_prob * self.scale ** 2 @property def variance(self): low_prob = jnp.exp(self.log_prob(self.low)) return (self.scale ** 2) * ( 1 + (self.low - self.loc) * low_prob - (low_prob * self.scale) ** 2 ) def tree_flatten(self): if isinstance(self._support.lower_bound, (int, float)): aux_data = self._support.lower_bound else: aux_data = None return (self.low, self.loc, self.scale), aux_data @classmethod def tree_unflatten(cls, aux_data, params): d = cls(*params) if aux_data is not None: d._support = constraints.greater_than(aux_data) return d class TruncatedPolyaGamma(Distribution): truncation_point = 2.5 num_log_prob_terms = 7 num_gamma_variates = 8 assert num_log_prob_terms % 2 == 1 arg_constraints = {} support = constraints.interval(0.0, truncation_point) def __init__(self, batch_shape=(), validate_args=None): super(TruncatedPolyaGamma, self).__init__( batch_shape, validate_args=validate_args ) def sample(self, key, sample_shape=()): assert is_prng_key(key) denom = jnp.square(jnp.arange(0.5, self.num_gamma_variates)) x = random.gamma( key, jnp.ones(self.batch_shape + sample_shape + (self.num_gamma_variates,)) ) x = jnp.sum(x / denom, axis=-1) return jnp.clip(x * (0.5 / jnp.pi ** 2), a_max=self.truncation_point) @validate_sample def log_prob(self, value): value = value[..., None] all_indices = jnp.arange(0, self.num_log_prob_terms) two_n_plus_one = 2.0 * all_indices + 1.0 log_terms = ( jnp.log(two_n_plus_one) - 1.5 * jnp.log(value) - 0.125 * jnp.square(two_n_plus_one) / value ) even_terms = jnp.take(log_terms, all_indices[::2], axis=-1) odd_terms = jnp.take(log_terms, all_indices[1::2], axis=-1) sum_even = jnp.exp(logsumexp(even_terms, axis=-1)) sum_odd = jnp.exp(logsumexp(odd_terms, axis=-1)) return jnp.log(sum_even - sum_odd) - 0.5 * jnp.log(2.0 * jnp.pi) def tree_flatten(self): return (), self.batch_shape @classmethod def tree_unflatten(cls, aux_data, params): return cls(batch_shape=aux_data)
apache-2.0
-6,576,997,587,028,816,000
35.881188
102
0.599732
false
3.463505
false
false
false
ray6/sdn
simple_django_switch.py
1
1137
import shortest_path_switch import simple_switch from ryu.controller.handler import MAIN_DISPATCHER, set_ev_cls from ryu.controller import ofp_event import os import socket import json from ryu.lib import hub SOCKFILE = '/tmp/hello_sock' class ShortestRestSwitch(simple_switch.SimpleSwitch13): def __init__(self, *args, **kwargs): super(ShortestRestSwitch, self).__init__(*args, **kwargs) self.sock = None self.config = {} self.start_sock_server() def set_vtable(self, host, vlan): if self.vtable[host] != vlan: self.vtable.update({host:vlan}) self.SimpleSwitchDeleteFlow(self.default_datapath, host) def recv_loop(self): print('start loop') while True: print('wait for recev') data = self.sock.recv(1024) print('Receive new vtable from web') print(data) msg = json.loads(data) if msg: print('get msg') for host, vlan in msg.items(): self.set_vtable(host, vlan) def start_sock_server(self): if os.path.exists(SOCKFILE): os.unlink(SOCKFILE) self.sock = hub.socket.socket(hub.socket.AF_UNIX, hub.socket.SOCK_DGRAM) self.sock.bind(SOCKFILE) hub.spawn(self.recv_loop)
mit
-3,184,735,867,046,060,000
23.717391
74
0.708004
false
2.930412
false
false
false
uwcirg/true_nth_usa_portal
portal/migrations/versions/883fd1095361_.py
1
2350
from datetime import timedelta from alembic import op import sqlalchemy as sa from sqlalchemy.orm import sessionmaker from portal.models.audit import Audit from portal.models.role import Role from portal.models.user import User, UserRoles from portal.models.user_consent import UserConsent """Correct user_consent acceptance_date as default arg wasn't updating Revision ID: 883fd1095361 Revises: 67c2bea62313 Create Date: 2018-10-11 12:48:33.980877 """ # revision identifiers, used by Alembic. revision = '883fd1095361' down_revision = '67c2bea62313' Session = sessionmaker() def upgrade(): bind = op.get_bind() session = Session(bind=bind) admin = User.query.filter_by(email='bob25mary@gmail.com').first() admin = admin or User.query.join( UserRoles).join(Role).filter( sa.and_( Role.id == UserRoles.role_id, UserRoles.user_id == User.id, Role.name == 'admin')).first() admin_id = admin.id query = session.query(UserConsent).join( Audit, UserConsent.audit_id == Audit.id).with_entities( UserConsent, Audit.timestamp) eligible_uc_ids = {} for uc, timestamp in query: if uc.acceptance_date.microsecond != 0: # skip the honest ones, that differ by milliseconds if timestamp - uc.acceptance_date < timedelta(seconds=5): continue if timestamp - uc.acceptance_date > timedelta(days=8): raise ValueError( "too big of a jump - please review {} {} {}".format( uc.user_id, timestamp, uc.acceptance_date)) eligible_uc_ids[uc.id] = ( uc.acceptance_date, timestamp.replace(microsecond=0)) # now update each in eligible list outside of initial query for uc_id, dates in eligible_uc_ids.items(): old_acceptance_date, new_acceptance_date = dates msg = "Correct stale default acceptance_date {} to {}".format( old_acceptance_date, new_acceptance_date) uc = session.query(UserConsent).get(uc_id) audit = Audit( user_id=admin_id, subject_id=uc.user_id, context='consent', comment=msg) uc.audit = audit uc.acceptance_date = new_acceptance_date session.commit() def downgrade(): # no value in undoing that mess pass
bsd-3-clause
-7,790,015,832,514,473,000
32.098592
72
0.645957
false
3.637771
false
false
false
sql-machine-learning/sqlflow
python/runtime/dbapi/connection.py
1
6425
# Copyright 2020 The SQLFlow Authors. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License from abc import ABCMeta, abstractmethod import six from six.moves.urllib.parse import parse_qs, urlparse class ResultSet(six.Iterator): """Base class for DB query result, caller can iteratable this object to get all result rows""" def __init__(self): self._generator = None def __iter__(self): return self def _gen(self): fetch_size = 128 while True: rows = self._fetch(fetch_size) or [] for r in rows: yield r if len(rows) < fetch_size: break def __next__(self): if self._generator is None: self._generator = self._gen() return next(self._generator) @abstractmethod def _fetch(self, fetch_size): """Fetch given count of records in the result set Args: fetch_size: max record to retrive Returns: A list of records, each record is a list represent a row in the result set """ pass def raw_column_info(self): return self.column_info() @abstractmethod def column_info(self): """Get the result column meta, type in the meta maybe DB-specific Returns: A list of column metas, like [(field_a, INT), (field_b, STRING)] """ pass @abstractmethod def success(self): """Return True if the query is success""" return False @abstractmethod def close(self): """Close the ResultSet explicitly, release any resource incurred by this query implementation should support close multi-times""" pass def error(self): """Get the error message if self.success()==False Returns: The error message """ return "" @six.add_metaclass(ABCMeta) class Connection(object): """Base class for DB connection Args: conn_uri: a connection uri in the schema://name:passwd@host/path?params format. """ def __init__(self, conn_uri): self.uristr = conn_uri self.uripts = self._parse_uri() self.driver = self.uripts.scheme self.params = parse_qs( self.uripts.query, keep_blank_values=True, ) for k, l in self.params.items(): if len(l) == 1: self.params[k] = l[0] def __enter__(self, *args, **kwargs): return self def __exit__(self, *args, **kwargs): self.close() def param(self, param_name, default_value=""): if not self.params: return default_value return self.params.get(param_name, default_value) def _parse_uri(self): """Parse the connection string into URI parts Returns: A ParseResult, different implementations should always pack the result into ParseResult """ return urlparse(self.uristr) @abstractmethod def _get_result_set(self, statement): """Get the ResultSet for given statement Args: statement: the statement to execute Returns: A ResultSet object """ pass def query(self, statement): """Execute given statement and return a ResultSet Typical usage will be: rs = conn.query("SELECT * FROM a;") result_rows = [r for r in rs] rs.close() Args: statement: the statement to execute Returns: A ResultSet object which is iteratable, each generated record in the iterator is a result-row wrapped by list """ rs = self._get_result_set(statement) if rs.success(): return rs else: raise Exception('Execute "%s" error\n%s' % (statement, rs.error())) def is_query(self, statement): """Return true if the statement is a query SQL statement.""" s = statement.strip() s = s.upper() if s.startswith("SELECT") and s.find("INTO") == -1: return True if s.startswith("SHOW") and s.find("CREATE") >= 0 or s.find( "DATABASES") >= 0 or s.find("TABLES") >= 0: return True if s.startswith("DESC") or s.startswith("EXPLAIN"): return True return False def execute(self, statement): """Execute given statement and return True on success Args: statement: the statement to execute Returns: True on success, False otherwise """ rs = None try: rs = self._get_result_set(statement) if rs.success(): # NOTE(sneaxiy): must execute commit! # Otherwise, the `INSERT` statement # would have no effect even though # the connection is closed. self.commit() return True else: raise Exception('Execute "%s" error\n%s' % (statement, rs.error())) finally: if rs is not None: rs.close() def get_table_schema(self, table_name): """Get table schema for given table Args: table_name: name of the table to get schema Returns: A list of (column_name, column_type) tuples """ rs = self.query("SELECT * FROM %s limit 0" % table_name) column_info = rs.column_info() rs.close() return column_info @abstractmethod def close(self): """ Close the connection, implementation should support close multi-times """ pass def commit(self): pass def persist_table(self, table): pass def __del__(self): self.close()
apache-2.0
8,163,382,623,264,675,000
27.056769
86
0.564825
false
4.486732
false
false
false
DemocracyClub/UK-Polling-Stations
polling_stations/apps/data_importers/management/commands/import_neath-pt.py
1
2759
from data_importers.management.commands import BaseHalaroseCsvImporter from django.contrib.gis.geos import Point class Command(BaseHalaroseCsvImporter): council_id = "NTL" addresses_name = ( "2021-03-25T10:36:12.245581/Neath PT polling_station_export-2021-03-23.csv" ) stations_name = ( "2021-03-25T10:36:12.245581/Neath PT polling_station_export-2021-03-23.csv" ) elections = ["2021-05-06"] csv_delimiter = "," def station_record_to_dict(self, record): if record.pollingstationname == "St. Joseph's R.C Church Hall": rec = super().station_record_to_dict(record) rec["location"] = Point(-3.800506, 51.6536899, srid=4326) return rec # Godrergraig Workingmens Club Glanyrafon Road Ystalyfera SA9 2HA if ( record.pollingstationnumber == "22" and record.pollingstationpostcode == "SA9 2HA" ): record = record._replace(pollingstationpostcode="SA9 2DE") # Dyffryn Clydach Memorial Hall The Drive Longford Neath if record.pollingstationname == "Dyffryn Clydach Memorial Hall": record = record._replace(pollingstationpostcode="SA10 7HD") # Clyne Community Centre Clyne Resolven if record.pollingstationname == "Clyne Community Centre": record = record._replace(pollingstationpostcode="SA11 4BP") return super().station_record_to_dict(record) def address_record_to_dict(self, record): uprn = record.uprn.strip().lstrip("0") if uprn in [ "10009182194", # FFRWD VALE BUNGALOW, DWR Y FELIN ROAD, NEATH "100100609132", # 35B PENYWERN ROAD, NEATH "10009177319", # MAES MELYN BUNGALOW, DRUMMAU ROAD, SKEWEN, NEATH "100101040244", # 11A REGENT STREET EAST, NEATH "100100598607", # 134 SHELONE ROAD, NEATH "10023946967", # 134A SHELONE ROAD, BRITON FERRY "100100600419", # 113 GNOLL PARK ROAD, NEATH "100100991905", # BROOKLYN, TONMAWR ROAD, PONTRHYDYFEN, PORT TALBOT "10009184466", # CILGARN FARM COTTAGE, CWMAVON, PORT TALBOT "10009186526", # TYN Y CAEAU, MARGAM ROAD, MARGAM, PORT TALBOT "10014164971", # FLAT TYN-Y-CAEAU MARGAM ROAD, MARGAM "10009184513", # 1 ALMS HOUSE, MARGAM, PORT TALBOT ]: return None if record.housepostcode in [ "SA12 8EP", "SA10 9DJ", "SA10 6DE", "SA11 3PW", "SA11 1TS", "SA11 1TW", "SA12 9ST", "SA8 4PX", "SA11 3QE", ]: return None return super().address_record_to_dict(record)
bsd-3-clause
-5,597,754,226,547,029,000
37.319444
83
0.59913
false
3.093049
false
false
false
djgroen/flee-release
datamanager/DataTable.py
1
9598
import sys import numpy as np import csv from datetime import datetime from datetime import timedelta def subtract_dates(date1, date2): """ Takes two dates %Y-%m-%d format. Returns date1 - date2, measured in days. """ date_format = "%Y-%m-%d" a = datetime.strptime(date1, date_format) b = datetime.strptime(date2, date_format) delta = a - b #print(date1,"-",date2,"=",delta.days) return delta.days def steps_to_date(steps, start_date): date_format = "%Y-%m-%d" date_1 = datetime.strptime(start_date, "%Y-%m-%d") new_date = (date_1 + timedelta(days=steps)).date() return new_date def _processEntry(row, table, data_type, date_column, count_column, start_date): """ Code to process a population count from a CSV file. column <date_column> contains the corresponding date in %Y-%m-%d format. column <count_column> contains the population size on that date. """ if len(row) < 2: return table if row[0][0] == "#": return table if row[1]=="": return table # Make sure the date column becomes an integer, which contains the offset in days relative to the start date. row[date_column] = subtract_dates(row[date_column], start_date) if data_type == "int": table = np.vstack([table,[int(row[date_column]), int(row[count_column])]]) else: table = np.vstack([table,[float(row[date_column]), float(row[count_column])]]) return table def AddCSVTables(table1, table2): """ Add two time series tables. This version does not yet support interpolation between values. (The UNHCR data website also does not do this, by the way) """ table = np.zeros([0,2]) offset = 0 last_c2 = np.zeros(([1,2])) for c2 in table2: # If table 2 date value is higher, then keep adding entries from table 1 while c2[0] > table1[offset][0]: table = np.vstack([table,[table1[offset][0], last_c2[1]+table1[offset][1]]]) if(offset < len(table1)-1): offset += 1 else: break # If the two match, add a total. if c2[0] == table1[offset][0]: table = np.vstack([table,[c2[0], c2[1]+table1[offset][1]]]) if(offset < len(table1)-1): offset += 1 last_c2 = c2 continue # If table 1 value is higher, add an aggregate entry, and go to the next iteration without increasing the offset. if c2[0] < table1[offset][0]: table = np.vstack([table,[c2[0], c2[1]+table1[offset][1]]]) last_c2 = c2 continue return table def ConvertCsvFileToNumPyTable(csv_name, data_type="int", date_column=0, count_column=1, start_date="2012-02-29"): """ Converts a CSV file to a table with date offsets from 29 feb 2012. CSV format for each line is: yyyy-mm-dd,number Default settings: - subtract_dates is used on column 0. - Use # sign to comment out lines. (first line is NOT ignored by default) """ table = np.zeros([0,2]) with open(csv_name, newline='') as csvfile: values = csv.reader(csvfile) row = next(values) if(len(row)>1): if len(row[0])>0 and "DateTime" not in row[0]: table = _processEntry(row, table, data_type, date_column, count_column, start_date) for row in values: table = _processEntry(row, table, data_type, date_column, count_column, start_date) return table class DataTable: def __init__(self, data_directory="mali2012", data_layout="data_layout_refugee.csv", start_date="2012-02-29", csvformat="generic"): """ read in CSV data files containing refugee data. """ self.csvformat = csvformat self.total_refugee_column = 1 self.days_column = 0 self.header = [] self.data_table = [] self.start_date = start_date self.override_refugee_input = False # Use modified input data for FLEE simulations self.override_refugee_input_file = "" self.data_directory = data_directory if self.csvformat=="generic": with open("%s/%s" % (data_directory, data_layout), newline='') as csvfile: values = csv.reader(csvfile) for row in values: if(len(row)>1): if(row[0][0] == "#"): continue self.header.append(row[0]) #print("%s/%s" % (data_directory, row[1])) csv_total = ConvertCsvFileToNumPyTable("%s/%s" % (data_directory, row[1]), start_date=start_date) for added_csv in row[2:]: csv_total = AddCSVTables(csv_total, ConvertCsvFileToNumPyTable("%s/%s" % (data_directory, added_csv), start_date=start_date)) self.data_table.append(csv_total) #print(self.header, self.data_table) def override_input(self, data_file_name): """ Do not use the total refugee count data as the input value, but instead take values from a separate file. """ self.override_refugee_input_file = data_file_name self.override_refugee_input = True self.header.append("total (modified input)") self.data_table.append(ConvertCsvFileToNumPyTable("%s" % (data_file_name), start_date=self.start_date)) def get_daily_difference(self, day, day_column=0, count_column=1, Debug=False, FullInterpolation=True): """ Extrapolate count of new refugees at a given time point, based on input data. count_column = column which contains the relevant difference. FullInterpolation: when disabled, the function ignores any decreases in refugee count. when enabled, the function can return negative numbers when the new total is higher than the older one. """ self.total_refugee_column = count_column self.days_column = day_column ref_table = self.data_table[0] if self.override_refugee_input == True: ref_table = self.data_table[self._find_headerindex("total (modified input)")] # Refugees only come in *after* day 0. if int(day) == 0: ref_table = self.data_table[0] new_refugees = 0 for i in self.header[1:]: new_refugees += self.get_field(i, 0, FullInterpolation) #print("Day 0 data:",i,self.get_field(i, 0, FullInterpolation)) return int(new_refugees) else: new_refugees = 0 for i in self.header[1:]: new_refugees += self.get_field(i, day, FullInterpolation) - self.get_field(i, day-1, FullInterpolation) #print self.get_field("Mbera", day), self.get_field("Mbera", day-1) return int(new_refugees) # If the day exceeds the validation data table, then we return 0 return 0 def get_interpolated_data(self, column, day): """ Gets in a given column for a given day. Interpolates between days as needed. """ ref_table = self.data_table[column] old_val = ref_table[0,self.total_refugee_column] #print(ref_table[0][self.days_column]) old_day = ref_table[0,self.days_column] if day <= old_day: return old_val for i in range(1, len(ref_table)): #print(day, ref_table[i][self.days_column]) if day < ref_table[i,self.days_column]: old_val = ref_table[i-1,self.total_refugee_column] old_day = ref_table[i-1,self.days_column] fraction = float(day - old_day) / float(ref_table[i,self.days_column] - old_day) if fraction > 1.0: print("Error with days_column: ", ref_table[i,self.days_column]) return -1 #print(day, old_day, ref_table[i][self.total_refugee_column], old_val) return int(old_val + fraction * float(ref_table[i,self.total_refugee_column] - old_val)) #print("# warning: ref_table length exceeded for column: ",day, self.header[column], ", last ref_table values: ", ref_table[i-1][self.total_refugee_column], ref_table[i][self.days_column]) return int(ref_table[-1,self.total_refugee_column]) def get_raw_data(self, column, day): """ Gets in a given column for a given day. Does not Interpolate. """ ref_table = self.data_table[column] old_val = ref_table[0][self.total_refugee_column] old_day = 0 for i in range (0,len(ref_table)): if day >= ref_table[i][self.days_column]: old_val = ref_table[i][self.total_refugee_column] old_day = ref_table[i][self.days_column] else: break return int(old_val) def _find_headerindex(self, name): """ Finds matching index number for a particular name in the list of headers. """ for i in range(0,len(self.header)): if self.header[i] == name: return i print(self.header) sys.exit("Error: can't find the header %s in the header list" % (name)) def get_field(self, name, day, FullInterpolation=True): """ Gets in a given named column for a given day. Interpolates between days if needed. """ i = self._find_headerindex(name) if FullInterpolation: #print(name, i, day, self.get_interpolated_data(i, day)) return self.get_interpolated_data(i, day) else: return self.get_raw_data(i, day) def print_data_values_for_location(self, name, last_day): """ print all data values for selected location. """ for i in range(0,last_day): print(i, self.get_field(name,i)) def is_interpolated(self, name, day): """ Checks if data for a given day is inter/extrapolated or not. """ for i in range(0,len(self.header)): if self.header[i] == name: ref_table = self.data_table[i] for j in range(0, len(ref_table)): if int(day) == int(ref_table[j][self.days_column]): return False if int(day) < int(ref_table[j][self.days_column]): return True return True #def d.correctLevel1Registrations(name, date): # correct for start date.
bsd-3-clause
-6,825,362,137,081,100,000
31.535593
192
0.64055
false
3.294885
false
false
false
victor-rene/bisector
bone.py
1
2297
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Bone. Base component of the StickMan. """ import math from kivy.graphics.context_instructions import PopMatrix, PushMatrix, Rotate from kivy.properties import NumericProperty from kivy.uix.image import Image __author__ = "Victor RENÉ" __copyright__ = "Copyright 2015, bisector" __credits__ = ["Kivy Team"] __license__ = "MIT" __version__ = "0.1" __maintainer__ = "Victor RENÉ" __email__ = "victor-rene@outlook.com" __status__ = "Production" class Bone(Image): angle = NumericProperty() def __init__(self, **kw): super(Bone, self).__init__(**kw) self.name = kw['name'] if 'name' in kw else None self.allow_stretch = True self.keep_ratio = False self.source = 'img/bone.png' self.next = [] self.prev = None self.head = None self.tip = None self.bone_length = 0 self.radius = None with self.canvas.before: PushMatrix() self.rotation = Rotate() with self.canvas.after: PopMatrix() self.bind(pos=self.update, size=self.update, angle=self.rotate) def attach(self, bone): bone.prev = self self.next.append(bone) def attach_all(self, bones): for bone in bones: self.attach(bone) def rotate(self, *args): if self.prev: self.rotation.angle = self.prev.rotation.angle + self.angle else: self.rotation.angle = self.angle self.tip = self.get_tip_pos() for bone in self.next: self.coerce(bone) def update(self, *args): self.radius = self.width / 2 # approximate for head / tip radii self.bone_length = self.height - self.radius * 2 self.head = self.x + self.radius, self.top - self.radius self.tip = self.get_tip_pos() self.rotation.origin = self.head for bone in self.next: self.coerce(bone) def get_tip_pos(self): a = (self.rotation.angle - 90) * math.pi / 180 dx = math.cos(a) * self.bone_length dy = math.sin(a) * self.bone_length return self.x + self.radius + dx, self.top - self.radius + dy def set_head_pos(self, pos): radius = self.width / 2 head_x, head_y = pos self.pos = head_x - radius, head_y - radius - self.bone_length def coerce(self, bone): bone.set_head_pos(self.tip) bone.rotate()
mit
6,350,086,245,058,398,000
25.697674
76
0.624401
false
3.17867
false
false
false
sektioneins/sandbox_toolkit
sb2dot/outputdot.py
1
2858
# # sb2dot - a sandbox binary profile to dot convertor for iOS 9 and OS X 10.11 # Copyright (C) 2015 Stefan Esser / SektionEins GmbH <stefan@sektioneins.de> # uses and extends code from Dionysus Blazakis with his permission # # module: outputdot.py # task: cheap .dot file generator # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # import os def dump_node_to_dot(g, u, visited): if visited.has_key(u): return "" tag = g.getTag(u) tag = str(tag) tag = tag.replace("\\", "\\\\") tag = tag.replace("\"", "\\\"") tag = tag.replace("\0", "") edges = list(g.edges[u]) visited[u] = True; out = "n%u [label=\"%s\"];\n" % (u, tag) if len(edges) == 0: return out out+= "n%u -> n%u [color=\"green\"];\n" % (u, edges[0]); out+= "n%u -> n%u [color=\"red\"];\n" % (u, edges[1]); out+=dump_node_to_dot(g, edges[0], visited) out+=dump_node_to_dot(g, edges[1], visited) return out; def dump_to_dot(g, offset, name, cleanname, profile_name): u = offset * 8 visited = {} orig_name = name if len(name) > 128: name = name[0:128] name = name + ".dot" name = name.replace("*", "") name = name.replace(" ", "_") cleanname = cleanname.replace("\\", "\\\\") cleanname = cleanname.replace("\"", "\\\"") cleanname = cleanname.replace("\0", "") profile_name = os.path.basename(profile_name) profile_name = profile_name.replace("\\", "\\\\") profile_name = profile_name.replace("\"", "\\\"") profile_name = profile_name.replace("\0", "") f = open(profile_name + "_" + name, 'w') print "[+] generating " + profile_name + "_" + name f.write("digraph sandbox_decision { rankdir=HR; labelloc=\"t\";label=\"sandbox decision graph for\n\n%s\n\nextracted from %s\n\n\n\"; \n" % (cleanname, profile_name)) out = "n0 [label=\"%s\";shape=\"doubleoctagon\"];\n" % (cleanname) out+= "n0 -> n%u [color=\"black\"];\n" % (u); out = out + dump_node_to_dot(g, u, visited) f.write(out) f.write("} \n") f.close()
gpl-2.0
628,843,913,595,146,000
32.433735
170
0.582225
false
3.229379
false
false
false
googlecodelabs/iot-data-pipeline
checkWeather.py
1
3019
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/usr/bin/python import time import datetime import json from google.cloud import pubsub from oauth2client.client import GoogleCredentials from Adafruit_BME280 import * from tendo import singleton me = singleton.SingleInstance() # will sys.exit(-1) if other instance is running # constants - change to fit your project and location SEND_INTERVAL = 60 #seconds sensor = BME280(t_mode=BME280_OSAMPLE_8, p_mode=BME280_OSAMPLE_8, h_mode=BME280_OSAMPLE_8) credentials = GoogleCredentials.get_application_default() # change project to your Project ID project="weatherproject" # change topic to your PubSub topic name topic = "weatherdata" # set the following four constants to be indicative of where you are placing your weather sensor sensorID = "s-Googleplex" sensorZipCode = "94043" sensorLat = "37.421655" sensorLong = "-122.085637" def read_sensor(weathersensor): tempF = weathersensor.read_temperature_f() # pascals = sensor.read_pressure() # hectopascals = pascals / 100 pressureInches = weathersensor.read_pressure_inches() dewpoint = weathersensor.read_dewpoint_f() humidity = weathersensor.read_humidity() temp = '{0:0.2f}'.format(tempF) hum = '{0:0.2f}'.format(humidity) dew = '{0:0.2f}'.format(dewpoint) pres = '{0:0.2f}'.format(pressureInches) return (temp, hum, dew, pres) def createJSON(id, timestamp, zip, lat, long, temperature, humidity, dewpoint, pressure): data = { 'sensorID' : id, 'timecollected' : timestamp, 'zipcode' : zip, 'latitude' : lat, 'longitude' : long, 'temperature' : temperature, 'humidity' : humidity, 'dewpoint' : dewpoint, 'pressure' : pressure } json_str = json.dumps(data) return json_str def main(): publisher = pubsub.PublisherClient() topicName = 'projects/' + project + '/topics/' + topic last_checked = 0 while True: if time.time() - last_checked > SEND_INTERVAL: last_checked = time.time() temp, hum, dew, pres = read_sensor(sensor) currentTime = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') s = ", " weatherJSON = createJSON(sensorID, currentTime, sensorZipCode, sensorLat, sensorLong, temp, hum, dew, pres) try: publisher.publish(topicName, weatherJSON, placeholder='') print weatherJSON except: print "There was an error publishing weather data." time.sleep(0.5) if __name__ == '__main__': main()
apache-2.0
-474,512,927,489,647,500
32.921348
113
0.699901
false
3.419026
false
false
false
GoogleCloudPlatform/declarative-resource-client-library
python/services/storage/object.py
1
13575
# Copyright 2021 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from connector import channel from google3.cloud.graphite.mmv2.services.google.storage import object_pb2 from google3.cloud.graphite.mmv2.services.google.storage import object_pb2_grpc from typing import List class Object(object): def __init__( self, name: str = None, bucket: str = None, generation: int = None, metageneration: int = None, id: str = None, self_link: str = None, content_type: str = None, time_created: str = None, updated: str = None, custom_time: str = None, time_deleted: str = None, temporary_hold: bool = None, event_based_hold: bool = None, retention_expiration_time: str = None, storage_class: str = None, time_storage_class_updated: str = None, size: int = None, md5_hash: str = None, media_link: str = None, metadata: dict = None, owner: dict = None, crc32c: str = None, component_count: int = None, etag: str = None, customer_encryption: dict = None, kms_key_name: str = None, content: str = None, service_account_file: str = "", ): channel.initialize() self.name = name self.bucket = bucket self.content_type = content_type self.custom_time = custom_time self.temporary_hold = temporary_hold self.event_based_hold = event_based_hold self.storage_class = storage_class self.md5_hash = md5_hash self.metadata = metadata self.crc32c = crc32c self.customer_encryption = customer_encryption self.kms_key_name = kms_key_name self.content = content self.service_account_file = service_account_file def apply(self): stub = object_pb2_grpc.StorageObjectServiceStub(channel.Channel()) request = object_pb2.ApplyStorageObjectRequest() if Primitive.to_proto(self.name): request.resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.bucket): request.resource.bucket = Primitive.to_proto(self.bucket) if Primitive.to_proto(self.content_type): request.resource.content_type = Primitive.to_proto(self.content_type) if Primitive.to_proto(self.custom_time): request.resource.custom_time = Primitive.to_proto(self.custom_time) if Primitive.to_proto(self.temporary_hold): request.resource.temporary_hold = Primitive.to_proto(self.temporary_hold) if Primitive.to_proto(self.event_based_hold): request.resource.event_based_hold = Primitive.to_proto( self.event_based_hold ) if Primitive.to_proto(self.storage_class): request.resource.storage_class = Primitive.to_proto(self.storage_class) if Primitive.to_proto(self.md5_hash): request.resource.md5_hash = Primitive.to_proto(self.md5_hash) if Primitive.to_proto(self.metadata): request.resource.metadata = Primitive.to_proto(self.metadata) if Primitive.to_proto(self.crc32c): request.resource.crc32c = Primitive.to_proto(self.crc32c) if ObjectCustomerEncryption.to_proto(self.customer_encryption): request.resource.customer_encryption.CopyFrom( ObjectCustomerEncryption.to_proto(self.customer_encryption) ) else: request.resource.ClearField("customer_encryption") if Primitive.to_proto(self.kms_key_name): request.resource.kms_key_name = Primitive.to_proto(self.kms_key_name) if Primitive.to_proto(self.content): request.resource.content = Primitive.to_proto(self.content) request.service_account_file = self.service_account_file response = stub.ApplyStorageObject(request) self.name = Primitive.from_proto(response.name) self.bucket = Primitive.from_proto(response.bucket) self.generation = Primitive.from_proto(response.generation) self.metageneration = Primitive.from_proto(response.metageneration) self.id = Primitive.from_proto(response.id) self.self_link = Primitive.from_proto(response.self_link) self.content_type = Primitive.from_proto(response.content_type) self.time_created = Primitive.from_proto(response.time_created) self.updated = Primitive.from_proto(response.updated) self.custom_time = Primitive.from_proto(response.custom_time) self.time_deleted = Primitive.from_proto(response.time_deleted) self.temporary_hold = Primitive.from_proto(response.temporary_hold) self.event_based_hold = Primitive.from_proto(response.event_based_hold) self.retention_expiration_time = Primitive.from_proto( response.retention_expiration_time ) self.storage_class = Primitive.from_proto(response.storage_class) self.time_storage_class_updated = Primitive.from_proto( response.time_storage_class_updated ) self.size = Primitive.from_proto(response.size) self.md5_hash = Primitive.from_proto(response.md5_hash) self.media_link = Primitive.from_proto(response.media_link) self.metadata = Primitive.from_proto(response.metadata) self.owner = ObjectOwner.from_proto(response.owner) self.crc32c = Primitive.from_proto(response.crc32c) self.component_count = Primitive.from_proto(response.component_count) self.etag = Primitive.from_proto(response.etag) self.customer_encryption = ObjectCustomerEncryption.from_proto( response.customer_encryption ) self.kms_key_name = Primitive.from_proto(response.kms_key_name) self.content = Primitive.from_proto(response.content) def delete(self): stub = object_pb2_grpc.StorageObjectServiceStub(channel.Channel()) request = object_pb2.DeleteStorageObjectRequest() request.service_account_file = self.service_account_file if Primitive.to_proto(self.name): request.resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.bucket): request.resource.bucket = Primitive.to_proto(self.bucket) if Primitive.to_proto(self.content_type): request.resource.content_type = Primitive.to_proto(self.content_type) if Primitive.to_proto(self.custom_time): request.resource.custom_time = Primitive.to_proto(self.custom_time) if Primitive.to_proto(self.temporary_hold): request.resource.temporary_hold = Primitive.to_proto(self.temporary_hold) if Primitive.to_proto(self.event_based_hold): request.resource.event_based_hold = Primitive.to_proto( self.event_based_hold ) if Primitive.to_proto(self.storage_class): request.resource.storage_class = Primitive.to_proto(self.storage_class) if Primitive.to_proto(self.md5_hash): request.resource.md5_hash = Primitive.to_proto(self.md5_hash) if Primitive.to_proto(self.metadata): request.resource.metadata = Primitive.to_proto(self.metadata) if Primitive.to_proto(self.crc32c): request.resource.crc32c = Primitive.to_proto(self.crc32c) if ObjectCustomerEncryption.to_proto(self.customer_encryption): request.resource.customer_encryption.CopyFrom( ObjectCustomerEncryption.to_proto(self.customer_encryption) ) else: request.resource.ClearField("customer_encryption") if Primitive.to_proto(self.kms_key_name): request.resource.kms_key_name = Primitive.to_proto(self.kms_key_name) if Primitive.to_proto(self.content): request.resource.content = Primitive.to_proto(self.content) response = stub.DeleteStorageObject(request) @classmethod def list(self, bucket, service_account_file=""): stub = object_pb2_grpc.StorageObjectServiceStub(channel.Channel()) request = object_pb2.ListStorageObjectRequest() request.service_account_file = service_account_file request.Bucket = bucket return stub.ListStorageObject(request).items def to_proto(self): resource = object_pb2.StorageObject() if Primitive.to_proto(self.name): resource.name = Primitive.to_proto(self.name) if Primitive.to_proto(self.bucket): resource.bucket = Primitive.to_proto(self.bucket) if Primitive.to_proto(self.content_type): resource.content_type = Primitive.to_proto(self.content_type) if Primitive.to_proto(self.custom_time): resource.custom_time = Primitive.to_proto(self.custom_time) if Primitive.to_proto(self.temporary_hold): resource.temporary_hold = Primitive.to_proto(self.temporary_hold) if Primitive.to_proto(self.event_based_hold): resource.event_based_hold = Primitive.to_proto(self.event_based_hold) if Primitive.to_proto(self.storage_class): resource.storage_class = Primitive.to_proto(self.storage_class) if Primitive.to_proto(self.md5_hash): resource.md5_hash = Primitive.to_proto(self.md5_hash) if Primitive.to_proto(self.metadata): resource.metadata = Primitive.to_proto(self.metadata) if Primitive.to_proto(self.crc32c): resource.crc32c = Primitive.to_proto(self.crc32c) if ObjectCustomerEncryption.to_proto(self.customer_encryption): resource.customer_encryption.CopyFrom( ObjectCustomerEncryption.to_proto(self.customer_encryption) ) else: resource.ClearField("customer_encryption") if Primitive.to_proto(self.kms_key_name): resource.kms_key_name = Primitive.to_proto(self.kms_key_name) if Primitive.to_proto(self.content): resource.content = Primitive.to_proto(self.content) return resource class ObjectOwner(object): def __init__(self, entity: str = None, entity_id: str = None): self.entity = entity self.entity_id = entity_id @classmethod def to_proto(self, resource): if not resource: return None res = object_pb2.StorageObjectOwner() if Primitive.to_proto(resource.entity): res.entity = Primitive.to_proto(resource.entity) if Primitive.to_proto(resource.entity_id): res.entity_id = Primitive.to_proto(resource.entity_id) return res @classmethod def from_proto(self, resource): if not resource: return None return ObjectOwner( entity=Primitive.from_proto(resource.entity), entity_id=Primitive.from_proto(resource.entity_id), ) class ObjectOwnerArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [ObjectOwner.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [ObjectOwner.from_proto(i) for i in resources] class ObjectCustomerEncryption(object): def __init__( self, encryption_algorithm: str = None, key_sha256: str = None, key: str = None ): self.encryption_algorithm = encryption_algorithm self.key_sha256 = key_sha256 self.key = key @classmethod def to_proto(self, resource): if not resource: return None res = object_pb2.StorageObjectCustomerEncryption() if Primitive.to_proto(resource.encryption_algorithm): res.encryption_algorithm = Primitive.to_proto(resource.encryption_algorithm) if Primitive.to_proto(resource.key_sha256): res.key_sha256 = Primitive.to_proto(resource.key_sha256) if Primitive.to_proto(resource.key): res.key = Primitive.to_proto(resource.key) return res @classmethod def from_proto(self, resource): if not resource: return None return ObjectCustomerEncryption( encryption_algorithm=Primitive.from_proto(resource.encryption_algorithm), key_sha256=Primitive.from_proto(resource.key_sha256), key=Primitive.from_proto(resource.key), ) class ObjectCustomerEncryptionArray(object): @classmethod def to_proto(self, resources): if not resources: return resources return [ObjectCustomerEncryption.to_proto(i) for i in resources] @classmethod def from_proto(self, resources): return [ObjectCustomerEncryption.from_proto(i) for i in resources] class Primitive(object): @classmethod def to_proto(self, s): if not s: return "" return s @classmethod def from_proto(self, s): return s
apache-2.0
-7,514,142,920,335,237,000
38.577259
88
0.654733
false
3.835829
false
false
false
vespero89/Snoring_Challenge
feat_extraction/spectrogram_extraction.py
1
5018
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Feb 3 16:00:29 2017 @author: daniele """ #import matplotlib.pyplot as plt import numpy as np from scipy import signal import librosa from os import walk, path, makedirs def wav_file_list(source): # list all file in source directory filenames = [] for (dirpath, dirnames, filenames) in walk(source): break # drop all non wav file wav_filenames = [f for f in filenames if f.lower().endswith('.wav')] return wav_filenames # calcola uno spettrogramma def spectrogram(filepath, fs, N, overlap, win_type='hamming'): # Load an audio file as a floating point time series x, fs = librosa.core.load(filepath,sr=fs) # Returns: np.ndarray [shape=(1 + n_fft/2, t), dtype=dtype], dtype=64-bit complex X = librosa.core.stft(x, n_fft=N, window=signal.get_window(win_type,N), hop_length=N-overlap, center=False) #Sxx = np.abs(X)**2 Sxx = librosa.logamplitude(np.abs(X)**2,ref_power=np.max) return Sxx # estrae gli spettrogrammi dai file contenuti in source e li salva in dest def extract_spectrograms(source, dest, fs, N, overlap, win_type='hamming'): wav_filenames = wav_file_list(source) for w in wav_filenames: Sxx=spectrogram(path.join(source,w), fs, N, overlap, win_type) np.save(path.join(dest,w[0:-4]),Sxx) # calcola i mel, i delta e i delta-deltas def log_mel(filepath, fs, N, overlap, win_type='hamming', n_mels=128, fmin=0.0, fmax=None, htk=True): coefficients = [] # Load an audio file as a floating point time series x, fs = librosa.core.load(filepath,sr=fs) # Power spectrum S = np.abs(librosa.core.stft(x, n_fft=N, window=signal.get_window(win_type,N), hop_length=N-overlap, center=False))**2 # Build a Mel filter mel_basis = librosa.filters.mel(fs, N, n_mels, fmin, fmax, htk) # Filtering mel_filtered = np.dot(mel_basis, S) mel_filtered = librosa.logamplitude(mel_filtered) coefficients.append(mel_filtered) # add delta e delta-deltas #coefficients.append(librosa.feature.delta(mel_filtered, delta_width*2+1, order=1, axis=-1)) #coefficients.append(librosa.feature.delta(mel_filtered, delta_width*2+1, order=2, axis=-1)) return coefficients def mfcc_e(filepath, fs, N, overlap, n_mels=26, fmin=0.0, fmax=None, htk=True, delta_width=None): x, fs = librosa.core.load(filepath, sr=fs) mfcc = librosa.feature.mfcc(y=x, sr=fs, n_mfcc=n_mels, hop_length=N - overlap, fmin=fmin, fmax=fmax, htk=htk) delta = librosa.feature.delta(mfcc, delta_width * 2 + 1, order=1, axis=-1) acc = librosa.feature.delta(mfcc, delta_width * 2 + 1, order=2, axis=-1) coefficients = np.vstack((mfcc,delta,acc)) coefficients = mfcc return coefficients # estrae i mel e ci calcola i delta e i delta-deltas dai file contenuti in source e li salva in dest # per la versione log è sufficiente eseguire librosa.logamplitude(.) alla singola sottomatrice o all'intera matrice def extract_log_mel(source, dest, fs, N, overlap, win_type='hamming', n_mels=128, fmin=0.0, fmax=None, htk=True): wav_filenames = wav_file_list(source) for w in wav_filenames: mels=log_mel(path.join(source,w), fs, N, overlap, win_type, n_mels, fmin, fmax, htk) np.save(path.join(dest,w[0:-4]),mels) # calcola i m, i delta e i delta-deltas def extract_MFCC(source, dest, fs, n_mels, N, overlap, fmin, fmax, htk, delta_width): wav_filenames = wav_file_list(source) for w in wav_filenames: mfcc=mfcc_e(path.join(source,w), fs, N, overlap, n_mels, fmin, fmax, htk, delta_width) x, fs = librosa.core.load(path.join(source,w), sr=fs) centr=librosa.feature.spectral_centroid(y=x, sr=fs, n_fft=2048, hop_length=N-overlap, freq=None) mfcc = np.vstack((mfcc,centr)) #zcr=librosa.feature.zero_crossing_rate(y=x, frame_length=N, hop_length=N-overlap, center=True) #mfcc = np.vstack((mfcc, zcr)) np.save(path.join(dest, w[0:-4]), mfcc) if __name__ == "__main__": root_dir = path.realpath('../../') wav_dir_path = path.join(root_dir,'wav') dest_path_spec=path.join(root_dir,'dataset','spectrograms') dest_path_log_mel=path.join(root_dir,'dataset','logmel_NEW') dest_path_mfcc = path.join(root_dir, 'dataset', 'MFCC_D_A') dest_path = path.join(root_dir, 'dataset', 'MFCC_CENTR') if (not path.exists(dest_path)): makedirs(dest_path) window_type = 'hamming' fft_length = 256 window_length = 480 overlap = 160 Fs = 16000 n_mels = 26 fmin=0.0 fmax=Fs/2 htk=True delta_width=2 # extract_spectrograms(wav_dir_path, dest_path_spec, Fs, fft_length, overlap, window_type) # extract_log_mel(wav_dir_path, dest_path_log_mel, Fs, window_length, overlap, window_type, n_mels, fmin, fmax, htk) extract_MFCC(wav_dir_path, dest_path, Fs, n_mels, window_length, overlap, fmin, fmax, htk, delta_width) import os print(os.path.realpath('.'))
gpl-3.0
-6,006,183,952,996,391,000
36.162963
122
0.664142
false
2.752057
false
false
false
Andre-Tan/Pincer
pincer/objects/Sequence.py
1
1818
from Bio import SeqIO from os import path from sys import exit from pincer.objects.Contig import Contig class FileNotInPathException(Exception): pass class NotFastaException(Exception): pass class Sequence(object): def __init__(self, filename): self.file, self.name = self.get_file_and_name(filename) self.contigs = self.iterateAndAppend_toContigs(filename) def __repr__(self): string = "Sequence {} with {} contigs totaling {}bps" return string.format(self.name, self.get_contig_number(), self.get_total_seq_length()) def __len__(self): return self.get_total_seq_length() def __iter__(self): return iter(self.contigs) def __getitem__(self, i): return self.contigs[i] def get_contig_number(self): return len(self.contigs) def get_total_seq_length(self): lengths = map(len, self.contigs) return sum(lengths) def get_file_and_name(self, filename): if not path.isfile(filename): raise FileNotInPathException("{} is not a file!".format(filename)) exit(1) file = filename.split("/")[-1] type = file[file.index(".")+1:] name = file[0:file.index(".")] if type not in ["fasta", "fa"]: raise NotFastaException("{} is not a fasta file!".format(filename)) exit(1) return file, name def iterateAndAppend_toContigs(self, filename): tmp_contigs = [] with open(filename, "r") as handle: for record in SeqIO.parse(handle, "fasta"): tmp_contigs.append(Contig(record.id, record.seq)) return tmp_contigs
gpl-3.0
626,050,070,194,473,500
27.836066
94
0.562156
false
3.892934
false
false
false
datamade/la-metro-councilmatic
lametro/management/commands/refresh_guid.py
1
5846
from collections import ChainMap from itertools import chain from django.core.management.base import BaseCommand from django.conf import settings from django.db.utils import IntegrityError from legistar.bills import LegistarAPIBillScraper from opencivicdata.legislative.models import Bill from lametro.models import LAMetroSubject from lametro.smartlogic import SmartLogic class ClassificationMixin: DEFAULT_FACET = 'topics_exact' FACET_CLASSES = { 'bill_type_exact': ( 'Board Report Type', ), 'lines_and_ways_exact': ( 'Transportation Method', 'Bus Line', 'Bus Way', 'Rail Line', ), 'phase_exact': ( 'Transportation Phase', ), 'project_exact': ( 'Project', 'Project Development', 'Project Finance', 'Capital Project', 'Construction Project', 'Grant Project', 'Other Working Project', ), 'metro_location_exact': ( 'All Transportation Locations', 'Alignment', 'Division', 'Employee Parking Lot', 'Pank ‘n’ Ride', 'Radio Station', 'Route', 'Station', 'Surplus, Temporary And Miscellaneous Property', 'Terminal', 'Transportation Location', ), 'geo_admin_location_exact': ( 'All Location', 'Administrative Division', 'Electoral Districts', 'Sector', 'Corridor', 'Geographic Location', 'City', 'Country', 'County', 'Neighborhood', 'State', 'Unincorporated Area', 'Point of Interest', 'Subregion', ), 'significant_date_exact': ( 'Dates', ), 'motion_by_exact': ( 'Board Member', ), 'plan_program_policy_exact': ( 'Plan', 'Program', 'Policy' ), } @property def smartlogic(self): if not hasattr(self, '_smartlogic'): self._smartlogic = SmartLogic(settings.SMART_LOGIC_KEY) return self._smartlogic @property def classifications(self): if not hasattr(self, '_classifications'): self._classifications = ChainMap(*[ {subject: facet for subject in list(self.get_subjects_from_classes(facet, classes))} for facet, classes in self.FACET_CLASSES.items() ]) return self._classifications def get_subjects_from_classes(self, facet_name, classes): self.stdout.write('Getting {}'.format(facet_name)) # Per Steve from SmartLogic, "multiple filters can be combined into an # OR type filter". So, string all classes together to query for terms # belonging to any of them. # # Use an array of tuples instead of a dictionary, because each param # uses the FILTER key (and dictionaries can't contain duplicate keys). params = [('FILTER', 'CL={}'.format(cls)) for cls in classes] params.append(('FILTER', 'AT=System: Legistar')) response = self.smartlogic.terms(params) yield from (t['term']['name'] for t in response['terms']) class Command(BaseCommand, ClassificationMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.legistar = LegistarAPIBillScraper() self.legistar.BASE_URL = 'https://webapi.legistar.com/v1/metro' self.legistar.retry_attempts = 0 self.legistar.requests_per_minute = 0 def handle(self, *args, **options): current_topics = set(chain(*Bill.objects.values_list('subject', flat=True))) # Delete topics no longer associated with any bills. deleted, _ = LAMetroSubject.objects.exclude(name__in=current_topics).delete() self.stdout.write('Removed {0} stale topics'.format(deleted)) # Create LAMetroSubject instances for all existing topics. Subjects are # unique on name. Ignore conflicts so we can bulk create instances # without querying for or introducing duplicates. LAMetroSubject.objects.bulk_create([ LAMetroSubject(name=s, classification=self.DEFAULT_FACET) for s in current_topics ], ignore_conflicts=True) for_update = [] for topic in self.legistar.topics(): try: subject = LAMetroSubject.objects.get(name=topic['IndexName']) except LAMetroSubject.DoesNotExist: # The database only contains topics that are related to at least # one bill. By contrast, the API contains all topics, regardless # of whether they're currently in use. Skip unused topics. pass else: self.stdout.write('Updating {}'.format(subject)) subject.guid = topic['api_metadata'] subject.classification = self.classifications.get(subject.name, self.DEFAULT_FACET) self.stdout.write('Classification: {}'.format(subject.classification)) for_update.append(subject) LAMetroSubject.objects.bulk_update(for_update, ['guid', 'classification']) update_count = len(for_update) topic_count = LAMetroSubject.objects.count() try: assert update_count == topic_count except AssertionError: raise AssertionError('Updated only {0} of {1} total topics'.format(update_count, topic_count)) else: self.stdout.write('Updated all {0} topics'.format(topic_count))
mit
-3,789,554,340,112,683,500
32.965116
106
0.575145
false
4.366218
false
false
false
scrbrd/scoreboard
model/api/loader.py
1
6251
""" Module: loader Provide a Sqoreboard API for loading graph data into Sqoreboard objects. Provides: def load_node def load_node_by_unique_property def load_nodes_by_property def load_edge def load_edges def load_neighbors """ from model.graph import GraphOutputError from model.graph import reader import sqfactory # TODO make this a Singleton object and create a reference to SqFactory # so we don't grab it all over the place. def load_node(node_id): """ Return a SqNode subclass for the given id. Wrap a call to a Graph API that returns a GraphNode and call on sqfactory to parse it into a SqNode subclass. Required: id node_id id of node to fetch Returns: SqNode single instance of concrete SqNode subclass """ node = None try: graph_node = reader.get_node(node_id) if graph_node: factory = sqfactory.get_factory() node = factory.construct_node_and_edges(graph_node) except GraphOutputError as e: #logger.debug(e.reason) print e.reason return node def load_node_by_unique_property(key, value, node_type_return_filter=None): """ Return a single SqNode for a property and node type filter. Wrap a call to load_nodes_by_property(). pop and return the first and only node returned. The sqfactory module will parse it into a SqNode subclass. This should only be used for combinations of property key/value and return node type which the caller knows to be restricted to exactly one stored node. The canonical example is guaranteeing unique email address or third party ID in User storage. Ideally, this property would be indexed in the database. In fact, the underlying graph or data layer may impose this restriction on queries and throw an error if no index exists for this property. Required: str key property to look up mixed value property value to look up Optional: list node_type_return_filter node types to filter for Returns: SqNode single SqNode instance """ nodes = load_nodes_by_property(key, value, node_type_return_filter) return nodes.values()[0] if nodes else None def load_nodes_by_property(key, value, node_type_return_filter=None): """ Return a list of SqNodes for a given property and node type. Wrap a call to a Graph API that returns a GraphNode and call on sqfactory to parse it into a SqNode subclass. Ideally, this property would be indexed in the database. In fact, the underlying graph or data layer may impose this restriction on queries and throw an error if no index exists for this property. Required: str key property key to look up mixed value property value to look up Optional: list node_type_return_filter list of SqNode types to return Returns: dict SqNodes keyed on ID (or None) """ nodes = None try: graph_nodes = reader.get_nodes_by_index( key, value, node_type_return_filter) nodes = {} factory = sqfactory.get_factory() for id, graph_node in graph_nodes.items(): nodes[id] = factory.construct_node_and_edges(graph_node) except GraphOutputError as e: #logger.debug(e.reason) print e.reason return nodes def load_edge(edge_id): """ Return a SqEdge subclass for the given id. Wrap a call to a Graph API that returns a GraphEdge and call on sqfactory to parse it into a SqEdge subclass. Required: id edge_id id of edge to fetch Returns: SqEdge single instance of concrete SqEdge subclass """ edge = None try: factory = sqfactory.get_factory() edge = factory.construct_edge(reader.get_edge(edge_id)) except GraphOutputError as e: #logger.debug(e.reason) print e.reason return edge def load_edges(node_id): """ Return a dict of SqEdge subclasses for the given SqNode ID. Wrap a call to a Graph API that returns a GraphEdge and call on sqfactory to parse it into a SqEdge subclass. Required: id node_id id of edge to fetch Returns: dict concrete SqEdge subclasses keyed on ID """ edges = None try: graph_node = reader.get_node(node_id) edges = {} factory = sqfactory.get_factory() for id, graph_edge in graph_node.edges().items(): edges[id] = factory.construct_edge(graph_edge) except GraphOutputError as e: #logger.debug(e.reason) print e.reason return edges def load_neighbors( node_id, edge_type_pruner=None, node_type_return_filter=None): """ Load a SqNode and its specified SqEdges and neighbor SqNodes. Required: id node_id SqNode id Optional: list edge_type_pruner list of SqEdge types to traverse list node_type_return_filter list of SqNode types to return Returns: tuple (SqNode, dict) => (start, neighbors) """ node = None neighbor_nodes = None try: # get node, outgoing edges, neighbor nodes graph_path = reader.get_path_to_neighbor_nodes( node_id, edge_type_pruner, node_type_return_filter) # load nodes and edges into SqNodes and SqEdges factory = sqfactory.get_factory() node = factory.construct_node_and_edges(graph_path.get_start_node()) neighbor_nodes = {} for id, graph_node in graph_path.get_neighbor_nodes().items(): neighbor_nodes[id] = factory.construct_node_and_edges(graph_node) except GraphOutputError as e: #logger.debug(e.reason) print e.reason # TODO: this only works for depth-1 queries because of graph fan-out, so # we need something different for queries of depth-2 and up. return (node, neighbor_nodes)
mit
2,964,769,322,987,463,700
26.416667
77
0.629019
false
4.090969
false
false
false
nowls/gnuradio
grc/core/Param.py
4
28068
""" Copyright 2008-2015 Free Software Foundation, Inc. This file is part of GNU Radio GNU Radio Companion is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GNU Radio Companion is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA """ import ast import weakref import re from . import Constants from .Constants import VECTOR_TYPES, COMPLEX_TYPES, REAL_TYPES, INT_TYPES from .Element import Element from .utils import odict # Blacklist certain ids, its not complete, but should help import __builtin__ ID_BLACKLIST = ['self', 'options', 'gr', 'blks2', 'wxgui', 'wx', 'math', 'forms', 'firdes'] + dir(__builtin__) try: from gnuradio import gr ID_BLACKLIST.extend(attr for attr in dir(gr.top_block()) if not attr.startswith('_')) except ImportError: pass _check_id_matcher = re.compile('^[a-z|A-Z]\w*$') _show_id_matcher = re.compile('^(variable\w*|parameter|options|notebook)$') def _get_keys(lst): return [elem.get_key() for elem in lst] def _get_elem(lst, key): try: return lst[_get_keys(lst).index(key)] except ValueError: raise ValueError('Key "{0}" not found in {1}.'.format(key, _get_keys(lst))) def num_to_str(num): """ Display logic for numbers """ def eng_notation(value, fmt='g'): """Convert a number to a string in engineering notation. E.g., 5e-9 -> 5n""" template = '{0:' + fmt + '}{1}' magnitude = abs(value) for exp, symbol in zip(range(9, -15-1, -3), 'GMk munpf'): factor = 10 ** exp if magnitude >= factor: return template.format(value / factor, symbol.strip()) return template.format(value, '') if isinstance(num, COMPLEX_TYPES): num = complex(num) # Cast to python complex if num == 0: return '0' output = eng_notation(num.real) if num.real else '' output += eng_notation(num.imag, '+g' if output else 'g') + 'j' if num.imag else '' return output else: return str(num) class Option(Element): def __init__(self, param, n): Element.__init__(self, param) self._name = n.find('name') self._key = n.find('key') self._opts = dict() opts = n.findall('opt') # Test against opts when non enum if not self.get_parent().is_enum() and opts: raise Exception('Options for non-enum types cannot have sub-options') # Extract opts for opt in opts: # Separate the key:value try: key, value = opt.split(':') except: raise Exception('Error separating "{0}" into key:value'.format(opt)) # Test against repeated keys if key in self._opts: raise Exception('Key "{0}" already exists in option'.format(key)) # Store the option self._opts[key] = value def __str__(self): return 'Option {0}({1})'.format(self.get_name(), self.get_key()) def get_name(self): return self._name def get_key(self): return self._key ############################################## # Access Opts ############################################## def get_opt_keys(self): return self._opts.keys() def get_opt(self, key): return self._opts[key] def get_opts(self): return self._opts.values() class TemplateArg(object): """ A cheetah template argument created from a param. The str of this class evaluates to the param's to code method. The use of this class as a dictionary (enum only) will reveal the enum opts. The __call__ or () method can return the param evaluated to a raw python data type. """ def __init__(self, param): self._param = weakref.proxy(param) def __getitem__(self, item): return str(self._param.get_opt(item)) if self._param.is_enum() else NotImplemented def __str__(self): return str(self._param.to_code()) def __call__(self): return self._param.get_evaluated() class Param(Element): is_param = True def __init__(self, block, n): """ Make a new param from nested data. Args: block: the parent element n: the nested odict """ # If the base key is a valid param key, copy its data and overlay this params data base_key = n.find('base_key') if base_key and base_key in block.get_param_keys(): n_expanded = block.get_param(base_key)._n.copy() n_expanded.update(n) n = n_expanded # Save odict in case this param will be base for another self._n = n # Parse the data self._name = n.find('name') self._key = n.find('key') value = n.find('value') or '' self._type = n.find('type') or 'raw' self._hide = n.find('hide') or '' self._tab_label = n.find('tab') or block.get_param_tab_labels()[0] if self._tab_label not in block.get_param_tab_labels(): block.get_param_tab_labels().append(self._tab_label) # Build the param Element.__init__(self, block) # Create the Option objects from the n data self._options = list() self._evaluated = None for option in map(lambda o: Option(param=self, n=o), n.findall('option')): key = option.get_key() # Test against repeated keys if key in self.get_option_keys(): raise Exception('Key "{0}" already exists in options'.format(key)) # Store the option self.get_options().append(option) # Test the enum options if self.is_enum(): # Test against options with identical keys if len(set(self.get_option_keys())) != len(self.get_options()): raise Exception('Options keys "{0}" are not unique.'.format(self.get_option_keys())) # Test against inconsistent keys in options opt_keys = self.get_options()[0].get_opt_keys() for option in self.get_options(): if set(opt_keys) != set(option.get_opt_keys()): raise Exception('Opt keys "{0}" are not identical across all options.'.format(opt_keys)) # If a value is specified, it must be in the options keys if value or value in self.get_option_keys(): self._value = value else: self._value = self.get_option_keys()[0] if self.get_value() not in self.get_option_keys(): raise Exception('The value "{0}" is not in the possible values of "{1}".'.format(self.get_value(), self.get_option_keys())) else: self._value = value or '' self._default = value self._init = False self._hostage_cells = list() self.template_arg = TemplateArg(self) def get_types(self): return ( 'raw', 'enum', 'complex', 'real', 'float', 'int', 'complex_vector', 'real_vector', 'float_vector', 'int_vector', 'hex', 'string', 'bool', 'file_open', 'file_save', '_multiline', '_multiline_python_external', 'id', 'stream_id', 'grid_pos', 'notebook', 'gui_hint', 'import', ) def __repr__(self): """ Get the repr (nice string format) for this param. Returns: the string representation """ ################################################## # Truncate helper method ################################################## def _truncate(string, style=0): max_len = max(27 - len(self.get_name()), 3) if len(string) > max_len: if style < 0: # Front truncate string = '...' + string[3-max_len:] elif style == 0: # Center truncate string = string[:max_len/2 - 3] + '...' + string[-max_len/2:] elif style > 0: # Rear truncate string = string[:max_len-3] + '...' return string ################################################## # Simple conditions ################################################## if not self.is_valid(): return _truncate(self.get_value()) if self.get_value() in self.get_option_keys(): return self.get_option(self.get_value()).get_name() ################################################## # Split up formatting by type ################################################## # Default center truncate truncate = 0 e = self.get_evaluated() t = self.get_type() if isinstance(e, bool): return str(e) elif isinstance(e, COMPLEX_TYPES): dt_str = num_to_str(e) elif isinstance(e, VECTOR_TYPES): # Vector types if len(e) > 8: # Large vectors use code dt_str = self.get_value() truncate = 1 else: # Small vectors use eval dt_str = ', '.join(map(num_to_str, e)) elif t in ('file_open', 'file_save'): dt_str = self.get_value() truncate = -1 else: # Other types dt_str = str(e) # Done return _truncate(dt_str, truncate) def __repr2__(self): """ Get the repr (nice string format) for this param. Returns: the string representation """ if self.is_enum(): return self.get_option(self.get_value()).get_name() return self.get_value() def __str__(self): return 'Param - {0}({1})'.format(self.get_name(), self.get_key()) def get_color(self): """ Get the color that represents this param's type. Returns: a hex color code. """ try: return { # Number types 'complex': Constants.COMPLEX_COLOR_SPEC, 'real': Constants.FLOAT_COLOR_SPEC, 'float': Constants.FLOAT_COLOR_SPEC, 'int': Constants.INT_COLOR_SPEC, # Vector types 'complex_vector': Constants.COMPLEX_VECTOR_COLOR_SPEC, 'real_vector': Constants.FLOAT_VECTOR_COLOR_SPEC, 'float_vector': Constants.FLOAT_VECTOR_COLOR_SPEC, 'int_vector': Constants.INT_VECTOR_COLOR_SPEC, # Special 'bool': Constants.INT_COLOR_SPEC, 'hex': Constants.INT_COLOR_SPEC, 'string': Constants.BYTE_VECTOR_COLOR_SPEC, 'id': Constants.ID_COLOR_SPEC, 'stream_id': Constants.ID_COLOR_SPEC, 'grid_pos': Constants.INT_VECTOR_COLOR_SPEC, 'notebook': Constants.INT_VECTOR_COLOR_SPEC, 'raw': Constants.WILDCARD_COLOR_SPEC, }[self.get_type()] except: return '#FFFFFF' def get_hide(self): """ Get the hide value from the base class. Hide the ID parameter for most blocks. Exceptions below. If the parameter controls a port type, vlen, or nports, return part. If the parameter is an empty grid position, return part. These parameters are redundant to display in the flow graph view. Returns: hide the hide property string """ hide = self.get_parent().resolve_dependencies(self._hide).strip() if hide: return hide # Hide ID in non variable blocks if self.get_key() == 'id' and not _show_id_matcher.match(self.get_parent().get_key()): return 'part' # Hide port controllers for type and nports if self.get_key() in ' '.join(map(lambda p: ' '.join([p._type, p._nports]), self.get_parent().get_ports())): return 'part' # Hide port controllers for vlen, when == 1 if self.get_key() in ' '.join(map( lambda p: p._vlen, self.get_parent().get_ports()) ): try: if int(self.get_evaluated()) == 1: return 'part' except: pass # Hide empty grid positions if self.get_key() in ('grid_pos', 'notebook') and not self.get_value(): return 'part' return hide def validate(self): """ Validate the param. The value must be evaluated and type must a possible type. """ Element.validate(self) if self.get_type() not in self.get_types(): self.add_error_message('Type "{0}" is not a possible type.'.format(self.get_type())) self._evaluated = None try: self._evaluated = self.evaluate() except Exception, e: self.add_error_message(str(e)) def get_evaluated(self): return self._evaluated def evaluate(self): """ Evaluate the value. Returns: evaluated type """ self._init = True self._lisitify_flag = False self._stringify_flag = False self._hostage_cells = list() t = self.get_type() v = self.get_value() ######################### # Enum Type ######################### if self.is_enum(): return v ######################### # Numeric Types ######################### elif t in ('raw', 'complex', 'real', 'float', 'int', 'hex', 'bool'): # Raise exception if python cannot evaluate this value try: e = self.get_parent().get_parent().evaluate(v) except Exception, e: raise Exception('Value "{0}" cannot be evaluated:\n{1}'.format(v, e)) # Raise an exception if the data is invalid if t == 'raw': return e elif t == 'complex': if not isinstance(e, COMPLEX_TYPES): raise Exception('Expression "{0}" is invalid for type complex.'.format(str(e))) return e elif t == 'real' or t == 'float': if not isinstance(e, REAL_TYPES): raise Exception('Expression "{0}" is invalid for type float.'.format(str(e))) return e elif t == 'int': if not isinstance(e, INT_TYPES): raise Exception('Expression "{0}" is invalid for type integer.'.format(str(e))) return e elif t == 'hex': return hex(e) elif t == 'bool': if not isinstance(e, bool): raise Exception('Expression "{0}" is invalid for type bool.'.format(str(e))) return e else: raise TypeError('Type "{0}" not handled'.format(t)) ######################### # Numeric Vector Types ######################### elif t in ('complex_vector', 'real_vector', 'float_vector', 'int_vector'): if not v: # Turn a blank string into an empty list, so it will eval v = '()' # Raise exception if python cannot evaluate this value try: e = self.get_parent().get_parent().evaluate(v) except Exception, e: raise Exception('Value "{0}" cannot be evaluated:\n{1}'.format(v, e)) # Raise an exception if the data is invalid if t == 'complex_vector': if not isinstance(e, VECTOR_TYPES): self._lisitify_flag = True e = [e] if not all([isinstance(ei, COMPLEX_TYPES) for ei in e]): raise Exception('Expression "{0}" is invalid for type complex vector.'.format(str(e))) return e elif t == 'real_vector' or t == 'float_vector': if not isinstance(e, VECTOR_TYPES): self._lisitify_flag = True e = [e] if not all([isinstance(ei, REAL_TYPES) for ei in e]): raise Exception('Expression "{0}" is invalid for type float vector.'.format(str(e))) return e elif t == 'int_vector': if not isinstance(e, VECTOR_TYPES): self._lisitify_flag = True e = [e] if not all([isinstance(ei, INT_TYPES) for ei in e]): raise Exception('Expression "{0}" is invalid for type integer vector.'.format(str(e))) return e ######################### # String Types ######################### elif t in ('string', 'file_open', 'file_save', '_multiline', '_multiline_python_external'): # Do not check if file/directory exists, that is a runtime issue try: e = self.get_parent().get_parent().evaluate(v) if not isinstance(e, str): raise Exception() except: self._stringify_flag = True e = str(v) if t == '_multiline_python_external': ast.parse(e) # Raises SyntaxError return e ######################### # Unique ID Type ######################### elif t == 'id': # Can python use this as a variable? if not _check_id_matcher.match(v): raise Exception('ID "{0}" must begin with a letter and may contain letters, numbers, and underscores.'.format(v)) ids = [param.get_value() for param in self.get_all_params(t, 'id')] if v in ID_BLACKLIST: raise Exception('ID "{0}" is blacklisted.'.format(v)) if self._key == 'id': # Id should only appear once, or zero times if block is disabled if ids.count(v) > 1: raise Exception('ID "{0}" is not unique.'.format(v)) else: # Id should exist to be a reference if ids.count(v) < 1: raise Exception('ID "{0}" does not exist.'.format(v)) return v ######################### # Stream ID Type ######################### elif t == 'stream_id': # Get a list of all stream ids used in the virtual sinks ids = [param.get_value() for param in filter( lambda p: p.get_parent().is_virtual_sink(), self.get_all_params(t), )] # Check that the virtual sink's stream id is unique if self.get_parent().is_virtual_sink(): # Id should only appear once, or zero times if block is disabled if ids.count(v) > 1: raise Exception('Stream ID "{0}" is not unique.'.format(v)) # Check that the virtual source's steam id is found if self.get_parent().is_virtual_source(): if v not in ids: raise Exception('Stream ID "{0}" is not found.'.format(v)) return v ######################### # GUI Position/Hint ######################### elif t == 'gui_hint': if ':' in v: tab, pos = v.split(':') elif '@' in v: tab, pos = v, '' else: tab, pos = '', v if '@' in tab: tab, index = tab.split('@') else: index = '?' # TODO: Problem with this code. Produces bad tabs widget_str = ({ (True, True): 'self.%(tab)s_grid_layout_%(index)s.addWidget(%(widget)s, %(pos)s)', (True, False): 'self.%(tab)s_layout_%(index)s.addWidget(%(widget)s)', (False, True): 'self.top_grid_layout.addWidget(%(widget)s, %(pos)s)', (False, False): 'self.top_layout.addWidget(%(widget)s)', }[bool(tab), bool(pos)]) % {'tab': tab, 'index': index, 'widget': '%s', 'pos': pos} # FIXME: Move replace(...) into the make template of the qtgui blocks # Return a string here class GuiHint(object): def __init__(self, ws): self._ws = ws def __call__(self, w): return (self._ws.replace('addWidget', 'addLayout') if 'layout' in w else self._ws) % w def __str__(self): return self._ws return GuiHint(widget_str) ######################### # Grid Position Type ######################### elif t == 'grid_pos': if not v: # Allow for empty grid pos return '' e = self.get_parent().get_parent().evaluate(v) if not isinstance(e, (list, tuple)) or len(e) != 4 or not all([isinstance(ei, int) for ei in e]): raise Exception('A grid position must be a list of 4 integers.') row, col, row_span, col_span = e # Check row, col if row < 0 or col < 0: raise Exception('Row and column must be non-negative.') # Check row span, col span if row_span <= 0 or col_span <= 0: raise Exception('Row and column span must be greater than zero.') # Get hostage cell parent try: my_parent = self.get_parent().get_param('notebook').evaluate() except: my_parent = '' # Calculate hostage cells for r in range(row_span): for c in range(col_span): self._hostage_cells.append((my_parent, (row+r, col+c))) # Avoid collisions params = filter(lambda p: p is not self, self.get_all_params('grid_pos')) for param in params: for parent, cell in param._hostage_cells: if (parent, cell) in self._hostage_cells: raise Exception('Another graphical element is using parent "{0}", cell "{1}".'.format(str(parent), str(cell))) return e ######################### # Notebook Page Type ######################### elif t == 'notebook': if not v: # Allow for empty notebook return '' # Get a list of all notebooks notebook_blocks = filter(lambda b: b.get_key() == 'notebook', self.get_parent().get_parent().get_enabled_blocks()) # Check for notebook param syntax try: notebook_id, page_index = map(str.strip, v.split(',')) except: raise Exception('Bad notebook page format.') # Check that the notebook id is valid try: notebook_block = filter(lambda b: b.get_id() == notebook_id, notebook_blocks)[0] except: raise Exception('Notebook id "{0}" is not an existing notebook id.'.format(notebook_id)) # Check that page index exists if int(page_index) not in range(len(notebook_block.get_param('labels').evaluate())): raise Exception('Page index "{0}" is not a valid index number.'.format(page_index)) return notebook_id, page_index ######################### # Import Type ######################### elif t == 'import': # New namespace n = dict() try: exec v in n except ImportError: raise Exception('Import "{0}" failed.'.format(v)) except Exception: raise Exception('Bad import syntax: "{0}".'.format(v)) return filter(lambda k: str(k) != '__builtins__', n.keys()) ######################### else: raise TypeError('Type "{0}" not handled'.format(t)) def to_code(self): """ Convert the value to code. For string and list types, check the init flag, call evaluate(). This ensures that evaluate() was called to set the xxxify_flags. Returns: a string representing the code """ v = self.get_value() t = self.get_type() # String types if t in ('string', 'file_open', 'file_save', '_multiline', '_multiline_python_external'): if not self._init: self.evaluate() return repr(v) if self._stringify_flag else v # Vector types elif t in ('complex_vector', 'real_vector', 'float_vector', 'int_vector'): if not self._init: self.evaluate() if self._lisitify_flag: return '(%s, )' % v else: return '(%s)' % v else: return v def get_all_params(self, type, key=None): """ Get all the params from the flowgraph that have the given type and optionally a given key Args: type: the specified type key: the key to match against Returns: a list of params """ return sum([filter(lambda p: ((p.get_type() == type) and ((key is None) or (p.get_key() == key))), block.get_params()) for block in self.get_parent().get_parent().get_enabled_blocks()], []) def is_enum(self): return self._type == 'enum' def get_value(self): value = self._value if self.is_enum() and value not in self.get_option_keys(): value = self.get_option_keys()[0] self.set_value(value) return value def set_value(self, value): # Must be a string self._value = str(value) def set_default(self, value): if self._default == self._value: self.set_value(value) self._default = str(value) def get_type(self): return self.get_parent().resolve_dependencies(self._type) def get_tab_label(self): return self._tab_label def get_name(self): return self.get_parent().resolve_dependencies(self._name).strip() def get_key(self): return self._key ############################################## # Access Options ############################################## def get_option_keys(self): return _get_keys(self.get_options()) def get_option(self, key): return _get_elem(self.get_options(), key) def get_options(self): return self._options ############################################## # Access Opts ############################################## def get_opt_keys(self): return self.get_option(self.get_value()).get_opt_keys() def get_opt(self, key): return self.get_option(self.get_value()).get_opt(key) def get_opts(self): return self.get_option(self.get_value()).get_opts() ############################################## # Import/Export Methods ############################################## def export_data(self): """ Export this param's key/value. Returns: a nested data odict """ n = odict() n['key'] = self.get_key() n['value'] = self.get_value() return n
gpl-3.0
-1,952,335,247,916,471,800
36.574297
197
0.500499
false
4.318818
false
false
false
joewie/PySyft
tests/test_integrations_locally.py
2
2952
import unittest import numpy as np import pickle from syft.nn.linear import LinearClassifier from syft.he.paillier import KeyPair, PaillierTensor from capsule.django_client import LocalDjangoCapsuleClient class PySonarNotebooks(unittest.TestCase): def model_training_demo_notebook(self): """If this test fails, you probably broke the demo notebook located at PySonar/notebooks/Sonar - Decentralized Model Training Simulation (local blockchain).ipynb """ c = LocalDjangoCapsuleClient() d = LinearClassifier(desc="DiabetesClassifier", n_inputs=10, n_labels=1, capsule_client=c) d.encrypt() self.assertTrue(True) class PySyftNotebooks(unittest.TestCase): def paillier_HE_example_notebook(self): """If this test fails, you probably broke the demo notebook located at PySyft/notebooks/Syft - Paillier Homomorphic Encryption Example.ipynb """ pubkey, prikey = KeyPair().generate() x = PaillierTensor(pubkey, np.array([1, 2, 3, 4, 5.])) out1 = x.decrypt(prikey) self.assertEqual(out1, np.array([1., 2., 3., 4., 5.])) out2 = (x + x[0]).decrypt(prikey) self.assertEqual(out2, np.array([2., 3., 4., 5., 6.])) out3 = (x * 5).decrypt(prikey) self.assertEqual(out3, np.array([5., 10., 15., 20., 25.])) out4 = (x + x / 5).decrypt(prikey) self.assertEqual(out4, np.array([1.2, 2.4, 3.6, 4.8, 6.])) pubkey_str = pubkey.serialize() prikey_str = prikey.serialize() pubkey2, prikey2 = KeyPair().deserialize(pubkey_str, prikey_str) out5 = prikey2.decrypt(x) self.assertEqual(out5, np.array([1., 2., 3., 4., 5.])) y = PaillierTensor(pubkey, (np.ones(5)) / 2) out6 = prikey.decrypt(y) self.assertEqual(out6, np.array([.5, .5, .5, .5, .5])) y_str = pickle.dumps(y) y2 = pickle.loads(y_str) out7 = prikey.decrypt(y2) self.assertEqual(out7, np.array([.5, .5, .5, .5, .5])) def test_paillier_linear_classifier_notebook(self): """If this test fails, you probably broke the demo notebook located at PySyft/notebooks/Syft - Paillier Homomorphic Encryption Example.ipynb """ capsule = LocalDjangoCapsuleClient() model = LinearClassifier(capsule_client=capsule) assert(model.capsule == capsule) try: model = model.encrypt() encrypted = True except Exception as e: encrypted = False print('[!]', e) input = np.array([[0, 0, 1, 1], [0, 0, 1, 0], [1, 0, 1, 1], [0, 0, 1, 0]]) target = np.array([[0, 1], [0, 0], [1, 1], [0, 0]]) for iter in range(3): model.learn(input, target, alpha=0.5) if encrypted: model = model.decrypt() for i in range(len(input)): model.forward(input[i])
apache-2.0
-4,713,357,466,685,288,000
31.8
98
0.590447
false
3.320585
true
false
false
RishiRamraj/interviews
solutions/algorithms/max_subarray.py
1
1728
''' Problem: Given [int], find a non-empty sub-array with the max sum. ''' def subarray(target): # Exit early. if all(item >= 0 for item in target): return target if all(item <= 0 for item in target): return [max(target)] # Build an incremental sum lookup. lookup = {} sum = 0 for index, item in enumerate(target): sum += item lookup[index] = sum # Find all points that cross 0. The start and end of target # should also be included. ups = [0] downs = [] last = None # Build the cross lookup. for index, item in enumerate(target): # Set last. if last is None: last = item continue # Check for a hit; we only care about positives. if last <= 0 and item > 0: ups.append(index) elif last > 0 and item <= 0: downs.append(index-1) # Update last. last = item # Add the end. downs.append(len(target)-1) # Permute the cross points of interest. max = None result = None for start in ups: for end in downs: if end <= start: continue # Find the integral. sum = lookup[end] - lookup[start] + target[start] # Set max. if not max: max = sum result = (start, end) continue # Check for a hit. if sum > max: max = sum result = (start, end) # Return result. start, end = result return target[start: end+1] test = [13, -3, -25, -20, -16, -23, 18, 20, -7, 12, -5, -22, 15, -4, 7] print(test) print(subarray(test))
mit
-1,594,383,956,390,017,800
21.153846
71
0.506944
false
3.848552
false
false
false
sam-m888/gramps
gramps/gui/views/navigationview.py
1
17849
# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2001-2007 Donald N. Allingham # Copyright (C) 2009-2010 Nick Hall # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # """ Provide the base classes for GRAMPS' DataView classes """ #---------------------------------------------------------------- # # python modules # #---------------------------------------------------------------- from abc import abstractmethod import html import logging _LOG = logging.getLogger('.navigationview') #---------------------------------------------------------------- # # gtk # #---------------------------------------------------------------- from gi.repository import Gdk from gi.repository import Gtk #---------------------------------------------------------------- # # Gramps # #---------------------------------------------------------------- from gramps.gen.const import GRAMPS_LOCALE as glocale _ = glocale.translation.sgettext from .pageview import PageView from ..uimanager import ActionGroup from gramps.gen.utils.db import navigation_label from gramps.gen.constfunc import mod_key from ..utils import match_primary_mask DISABLED = -1 MRU_SIZE = 10 MRU_TOP = '<section id="CommonHistory">' MRU_BTM = '</section>' #------------------------------------------------------------------------------ # # NavigationView # #------------------------------------------------------------------------------ class NavigationView(PageView): """ The NavigationView class is the base class for all Data Views that require navigation functionalilty. Views that need bookmarks and forward/backward should derive from this class. """ def __init__(self, title, pdata, state, uistate, bm_type, nav_group): PageView.__init__(self, title, pdata, state, uistate) self.bookmarks = bm_type(self.dbstate, self.uistate, self.change_active) self.fwd_action = None self.back_action = None self.book_action = None self.other_action = None self.active_signal = None self.mru_signal = None self.nav_group = nav_group self.mru_active = DISABLED self.uimanager = uistate.uimanager self.uistate.register(state, self.navigation_type(), self.nav_group) def navigation_type(self): """ Indictates the navigation type. Navigation type can be the string name of any of the primary Objects. A History object will be created for it, see DisplayState.History """ return None def define_actions(self): """ Define menu actions. """ PageView.define_actions(self) self.bookmark_actions() self.navigation_actions() def disable_action_group(self): """ Normally, this would not be overridden from the base class. However, in this case, we have additional action groups that need to be handled correctly. """ PageView.disable_action_group(self) self.uimanager.set_actions_visible(self.fwd_action, False) self.uimanager.set_actions_visible(self.back_action, False) def enable_action_group(self, obj): """ Normally, this would not be overridden from the base class. However, in this case, we have additional action groups that need to be handled correctly. """ PageView.enable_action_group(self, obj) self.uimanager.set_actions_visible(self.fwd_action, True) self.uimanager.set_actions_visible(self.back_action, True) hobj = self.get_history() self.uimanager.set_actions_sensitive(self.fwd_action, not hobj.at_end()) self.uimanager.set_actions_sensitive(self.back_action, not hobj.at_front()) def change_page(self): """ Called when the page changes. """ hobj = self.get_history() self.uimanager.set_actions_sensitive(self.fwd_action, not hobj.at_end()) self.uimanager.set_actions_sensitive(self.back_action, not hobj.at_front()) self.uimanager.set_actions_sensitive(self.other_action, not self.dbstate.db.readonly) self.uistate.modify_statusbar(self.dbstate) def set_active(self): """ Called when the page becomes active (displayed). """ PageView.set_active(self) self.bookmarks.display() hobj = self.get_history() self.active_signal = hobj.connect('active-changed', self.goto_active) self.mru_signal = hobj.connect('mru-changed', self.update_mru_menu) self.update_mru_menu(hobj.mru, update_menu=False) self.goto_active(None) def set_inactive(self): """ Called when the page becomes inactive (not displayed). """ if self.active: PageView.set_inactive(self) self.bookmarks.undisplay() hobj = self.get_history() hobj.disconnect(self.active_signal) hobj.disconnect(self.mru_signal) self.mru_disable() def navigation_group(self): """ Return the navigation group. """ return self.nav_group def get_history(self): """ Return the history object. """ return self.uistate.get_history(self.navigation_type(), self.navigation_group()) def goto_active(self, active_handle): """ Callback (and usable function) that selects the active person in the display tree. """ active_handle = self.uistate.get_active(self.navigation_type(), self.navigation_group()) if active_handle: self.goto_handle(active_handle) hobj = self.get_history() self.uimanager.set_actions_sensitive(self.fwd_action, not hobj.at_end()) self.uimanager.set_actions_sensitive(self.back_action, not hobj.at_front()) def get_active(self): """ Return the handle of the active object. """ hobj = self.uistate.get_history(self.navigation_type(), self.navigation_group()) return hobj.present() def change_active(self, handle): """ Changes the active object. """ hobj = self.get_history() if handle and not hobj.lock and not (handle == hobj.present()): hobj.push(handle) @abstractmethod def goto_handle(self, handle): """ Needs to be implemented by classes derived from this. Used to move to the given handle. """ def selected_handles(self): """ Return the active person's handle in a list. Used for compatibility with those list views that can return multiply selected items. """ active_handle = self.uistate.get_active(self.navigation_type(), self.navigation_group()) return [active_handle] if active_handle else [] #################################################################### # BOOKMARKS #################################################################### def add_bookmark(self, *obj): """ Add a bookmark to the list. """ from gramps.gen.display.name import displayer as name_displayer active_handle = self.uistate.get_active('Person') active_person = self.dbstate.db.get_person_from_handle(active_handle) if active_person: self.bookmarks.add(active_handle) name = name_displayer.display(active_person) self.uistate.push_message(self.dbstate, _("%s has been bookmarked") % name) else: from ..dialog import WarningDialog WarningDialog( _("Could Not Set a Bookmark"), _("A bookmark could not be set because " "no one was selected."), parent=self.uistate.window) def edit_bookmarks(self, *obj): """ Call the bookmark editor. """ self.bookmarks.edit() def bookmark_actions(self): """ Define the bookmark menu actions. """ self.book_action = ActionGroup(name=self.title + '/Bookmark') self.book_action.add_actions([ ('AddBook', self.add_bookmark, '<PRIMARY>d'), ('EditBook', self.edit_bookmarks, '<shift><PRIMARY>D'), ]) self._add_action_group(self.book_action) #################################################################### # NAVIGATION #################################################################### def navigation_actions(self): """ Define the navigation menu actions. """ # add the Forward action group to handle the Forward button self.fwd_action = ActionGroup(name=self.title + '/Forward') self.fwd_action.add_actions([('Forward', self.fwd_clicked, "%sRight" % mod_key())]) # add the Backward action group to handle the Forward button self.back_action = ActionGroup(name=self.title + '/Backward') self.back_action.add_actions([('Back', self.back_clicked, "%sLeft" % mod_key())]) self._add_action('HomePerson', self.home, "%sHome" % mod_key()) self.other_action = ActionGroup(name=self.title + '/PersonOther') self.other_action.add_actions([ ('SetActive', self.set_default_person)]) self._add_action_group(self.back_action) self._add_action_group(self.fwd_action) self._add_action_group(self.other_action) def set_default_person(self, *obj): """ Set the default person. """ active = self.uistate.get_active('Person') if active: self.dbstate.db.set_default_person_handle(active) def home(self, *obj): """ Move to the default person. """ defperson = self.dbstate.db.get_default_person() if defperson: self.change_active(defperson.get_handle()) else: from ..dialog import WarningDialog WarningDialog(_("No Home Person"), _("You need to set a 'default person' to go to. " "Select the People View, select the person you want as " "'Home Person', then confirm your choice " "via the menu Edit -> Set Home Person."), parent=self.uistate.window) def jump(self, *obj): """ A dialog to move to a Gramps ID entered by the user. """ dialog = Gtk.Dialog(title=_('Jump to by Gramps ID'), transient_for=self.uistate.window) dialog.set_border_width(12) label = Gtk.Label(label='<span weight="bold" size="larger">%s</span>' % _('Jump to by Gramps ID')) label.set_use_markup(True) dialog.vbox.add(label) dialog.vbox.set_spacing(10) dialog.vbox.set_border_width(12) hbox = Gtk.Box() hbox.pack_start(Gtk.Label(label=_("%s: ") % _('ID')), True, True, 0) text = Gtk.Entry() text.set_activates_default(True) hbox.pack_start(text, False, True, 0) dialog.vbox.pack_start(hbox, False, True, 0) dialog.add_buttons(_('_Cancel'), Gtk.ResponseType.CANCEL, _('_Jump to'), Gtk.ResponseType.OK) dialog.set_default_response(Gtk.ResponseType.OK) dialog.vbox.show_all() if dialog.run() == Gtk.ResponseType.OK: gid = text.get_text() handle = self.get_handle_from_gramps_id(gid) if handle is not None: self.change_active(handle) else: self.uistate.push_message( self.dbstate, _("Error: %s is not a valid Gramps ID") % gid) dialog.destroy() def get_handle_from_gramps_id(self, gid): """ Get an object handle from its Gramps ID. Needs to be implemented by the inheriting class. """ pass def fwd_clicked(self, *obj): """ Move forward one object in the history. """ hobj = self.get_history() hobj.lock = True if not hobj.at_end(): hobj.forward() self.uistate.modify_statusbar(self.dbstate) self.uimanager.set_actions_sensitive(self.fwd_action, not hobj.at_end()) self.uimanager.set_actions_sensitive(self.back_action, True) hobj.lock = False def back_clicked(self, *obj): """ Move backward one object in the history. """ hobj = self.get_history() hobj.lock = True if not hobj.at_front(): hobj.back() self.uistate.modify_statusbar(self.dbstate) self.uimanager.set_actions_sensitive(self.back_action, not hobj.at_front()) self.uimanager.set_actions_sensitive(self.fwd_action, True) hobj.lock = False #################################################################### # MRU functions #################################################################### def mru_disable(self): """ Remove the UI and action groups for the MRU list. """ if self.mru_active != DISABLED: self.uimanager.remove_ui(self.mru_active) self.uimanager.remove_action_group(self.mru_action) self.mru_active = DISABLED def mru_enable(self, update_menu=False): """ Enables the UI and action groups for the MRU list. """ if self.mru_active == DISABLED: self.uimanager.insert_action_group(self.mru_action) self.mru_active = self.uimanager.add_ui_from_string(self.mru_ui) if update_menu: self.uimanager.update_menu() def update_mru_menu(self, items, update_menu=True): """ Builds the UI and action group for the MRU list. """ menuitem = ''' <item> <attribute name="action">win.%s%02d</attribute> <attribute name="label">%s</attribute> </item> ''' menus = '' self.mru_disable() nav_type = self.navigation_type() hobj = self.get_history() menu_len = min(len(items) - 1, MRU_SIZE) data = [] for index in range(menu_len - 1, -1, -1): name, _obj = navigation_label(self.dbstate.db, nav_type, items[index]) menus += menuitem % (nav_type, index, html.escape(name)) data.append(('%s%02d' % (nav_type, index), make_callback(hobj.push, items[index]), "%s%d" % (mod_key(), menu_len - 1 - index))) self.mru_ui = [MRU_TOP + menus + MRU_BTM] self.mru_action = ActionGroup(name=self.title + '/MRU') self.mru_action.add_actions(data) self.mru_enable(update_menu) #################################################################### # Template functions #################################################################### @abstractmethod def build_tree(self): """ Rebuilds the current display. This must be overridden by the derived class. """ @abstractmethod def build_widget(self): """ Builds the container widget for the interface. Must be overridden by the the base class. Returns a gtk container widget. """ def key_press_handler(self, widget, event): """ Handle the control+c (copy) and control+v (paste), or pass it on. """ if self.active: if event.type == Gdk.EventType.KEY_PRESS: if (event.keyval == Gdk.KEY_c and match_primary_mask(event.get_state())): self.call_copy() return True return super(NavigationView, self).key_press_handler(widget, event) def call_copy(self): """ Navigation specific copy (control+c) hander. If the copy can be handled, it returns true, otherwise false. The code brings up the Clipboard (if already exists) or creates it. The copy is handled through the drag and drop system. """ nav_type = self.navigation_type() handles = self.selected_handles() return self.copy_to_clipboard(nav_type, handles) def make_callback(func, handle): """ Generates a callback function based off the passed arguments """ return lambda x, y: func(handle)
gpl-2.0
998,957,034,830,344,600
34.841365
80
0.535884
false
4.234638
false
false
false