text
stringlengths 28
881k
|
---|
# (C) Copyright Artificial Brain 2021.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINENEWLINEfrom quantumcat.gates.custom_gates.braket.u_gate import UGateNEWLINEfrom quantumcat.gates.custom_gates.braket.u1_gate import U1GateNEWLINEfrom quantumcat.gates.custom_gates.braket.u2_gate import U2GateNEWLINEfrom quantumcat.gates.custom_gates.braket.u3_gate import U3GateNEWLINEfrom quantumcat.gates.custom_gates.braket.cu_gate import CUGateNEWLINEfrom quantumcat.gates.custom_gates.braket.ch_gate import CHGateNEWLINEfrom quantumcat.gates.custom_gates.braket.crx_gate import CRXGateNEWLINEfrom quantumcat.gates.custom_gates.braket.r_gate import RGateNEWLINEfrom quantumcat.gates.custom_gates.braket.cry_gate import CRYGateNEWLINEfrom quantumcat.gates.custom_gates.braket.crz_gate import CRZGateNEWLINEfrom quantumcat.gates.custom_gates.braket.csx_gate import CSXGateNEWLINEfrom quantumcat.gates.custom_gates.braket.cu1_gate import CU1GateNEWLINEfrom quantumcat.gates.custom_gates.braket.dcx_gate import DCXGateNEWLINEfrom quantumcat.gates.custom_gates.braket.rc3x_gate import RC3XGateNEWLINEfrom quantumcat.gates.custom_gates.braket.rccx_gate import RCCXGateNEWLINEfrom quantumcat.gates.custom_gates.braket.rzx_gate import RZXGateNEWLINEfrom quantumcat.gates.custom_gates.braket.cu3_gate import CU3GateNEWLINE |
"""NEWLINEUnionFind.pyNEWLINENEWLINESource: http://www.ics.uci.edu/~eppstein/PADS/UnionFind.pyNEWLINENEWLINEUnion-find data structure. Based on Josiah Carlson's code,NEWLINEhttp://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912NEWLINEwith significant additional changes by D. Eppstein.NEWLINE"""NEWLINENEWLINEfrom collections import defaultdictNEWLINENEWLINENEWLINEclass UnionFind(object):NEWLINE """Union-find data structure.NEWLINENEWLINE Each unionFind instance X maintains a family of disjoint sets ofNEWLINE hashable objects, supporting the following two methods:NEWLINENEWLINE - X[item] returns a name for the set containing the given item.NEWLINE Each set is named by an arbitrarily-chosen one of its members; asNEWLINE long as the set remains unchanged it will keep the same name. IfNEWLINE the item is not yet part of a set in X, a new singleton set isNEWLINE created for it.NEWLINENEWLINE - X.union(item1, item2, ...) merges the sets containing each itemNEWLINE into a single larger set. If any item is not yet part of a setNEWLINE in X, it is added to X as one of the members of the merged set.NEWLINE """NEWLINENEWLINE def __init__(self):NEWLINE """Create a new empty union-find structure."""NEWLINE self.weights = {}NEWLINE self.parents = {}NEWLINENEWLINE def __getitem__(self, object):NEWLINE """Find and return the name of the set containing the object."""NEWLINE # check for previously unknown objectNEWLINE if object not in self.parents:NEWLINE self.parents[object] = objectNEWLINE self.weights[object] = 1NEWLINE return objectNEWLINENEWLINE # find path of objects leading to the rootNEWLINE path = [object]NEWLINE root = self.parents[object]NEWLINE while root != path[-1]:NEWLINE path.append(root)NEWLINE root = self.parents[root]NEWLINENEWLINE # compress the path and returnNEWLINE for ancestor in path:NEWLINE self.parents[ancestor] = rootNEWLINE return rootNEWLINENEWLINE def __iter__(self):NEWLINE """Iterate through all items ever found or unioned by this structure."""NEWLINE return iter(self.parents)NEWLINENEWLINE def union(self, *objects):NEWLINE """Find the sets containing the objects and merge them all."""NEWLINE roots = [self[x] for x in objects]NEWLINE heaviest = max([(self.weights[r],r) for r in roots])[1]NEWLINE for r in roots:NEWLINE if r != heaviest:NEWLINE self.weights[heaviest] += self.weights[r]NEWLINE self.parents[r] = heaviestNEWLINENEWLINE def sets(self):NEWLINE """Return a list of each disjoint set"""NEWLINE ret = defaultdict(list)NEWLINE for k, _ in self.parents.iteritems():NEWLINE ret[self[k]].append(k)NEWLINE return ret.values()NEWLINENEWLINE NEWLINEif __name__ == '__main__':NEWLINENEWLINE # testNEWLINE uf = UnionFind()NEWLINE uf.union(0, 1)NEWLINE uf.union(2, 3)NEWLINE uf.union(3, 0)NEWLINE assert uf.sets() == [[0, 1, 2, 3]]NEWLINE |
"""This test creates two top level actors and one sub-actor andNEWLINE verifies that the actors can exchange sequences of messages."""NEWLINENEWLINEimport timeNEWLINEfrom thespian.actors import *NEWLINEfrom thespian.test import *NEWLINENEWLINEclass rosaline(Actor):NEWLINE name = 'Rosaline'NEWLINENEWLINEclass Romeo(Actor):NEWLINE def receiveMessage(self, msg, sender):NEWLINE if isinstance(msg, JulietAppears):NEWLINE self.send(msg.juliet, "But, soft! what light through yonder window breaks?")NEWLINE elif isinstance(msg, ActorExitRequest):NEWLINE pass # nothing special, just dieNEWLINE elif msg == 'Ay me!':NEWLINE self.send(sender, 'She speaks!')NEWLINE elif msg == 'O Romeo, Romeo! wherefore art thou Romeo?':NEWLINE self.send(sender, 'Shall I hear more, or shall I speak at this?')NEWLINE elif 'rose' in msg:NEWLINE pass # wait for itNEWLINE elif 'sweet' in msg:NEWLINE self.send(sender, 'Like softest music to attending ears!')NEWLINE elif 'hello' in msg:NEWLINE print('Hello from %s'%(str(self)))NEWLINE elif 'who_are_you' == msg:NEWLINE self.send(sender, self.myAddress)NEWLINE # otherwise sit and swoonNEWLINENEWLINENEWLINEclass Capulet(Actor):NEWLINE def receiveMessage(self, msg, sender):NEWLINE if msg == "has a daughter?":NEWLINE self.send(sender, self.createActor(Juliet))NEWLINENEWLINENEWLINEclass Juliet(Actor):NEWLINE def __init__(self, *args, **kw):NEWLINE self.nurse = NoneNEWLINE self.recalled = FalseNEWLINE super(Juliet, self).__init__(*args, **kw)NEWLINE def receiveMessage(self, msg, sender):NEWLINE if isinstance(msg, ActorExitRequest):NEWLINE pass # nothing special, just dieNEWLINE elif "what light" in msg:NEWLINE self.send(sender, 'Ay me!')NEWLINE elif msg == 'She speaks!':NEWLINE self.send(sender, 'O Romeo, Romeo! wherefore art thou Romeo?')NEWLINE elif msg == 'Shall I hear more, or shall I speak at this?':NEWLINE self.send(sender, "What's in a name? That which we call a rose")NEWLINE self.send(sender, "By any other name would smell as sweet")NEWLINE elif msg == 'Like softest music to attending ears!':NEWLINE if self.nurse:NEWLINE self.send(self.nurse, 'Anon, good nurse!')NEWLINE else:NEWLINE self.recalled = TrueNEWLINE elif msg == 'Mistress!':NEWLINE self.nurse = senderNEWLINE if self.recalled:NEWLINE self.send(self.nurse, 'Anon, good nurse!')NEWLINE elif 'who_are_you' == msg:NEWLINE self.send(sender, self.myAddress)NEWLINENEWLINENEWLINEclass Nurse(Actor):NEWLINE def __init__(self, *args, **kw):NEWLINE self.heardItAll = FalseNEWLINE super(Nurse, self).__init__(*args, **kw)NEWLINE def receiveMessage(self, msg, sender):NEWLINE if type(msg) == type((1,2)) and msg[0] == 'begin':NEWLINE self.send(msg[1], JulietAppears(msg[2]))NEWLINE self.send(msg[2], 'Mistress!')NEWLINE elif msg == 'Anon, good nurse!':NEWLINE self.heardItAll = TrueNEWLINE elif msg == 'done?':NEWLINE self.send(sender, 'Fini' if self.heardItAll else 'not yet')NEWLINENEWLINENEWLINEclass JulietAppears:NEWLINE stage = 'Right'NEWLINE def __init__(self, julietAddr):NEWLINE self.juliet = julietAddrNEWLINENEWLINENEWLINEclass TestFuncActors():NEWLINENEWLINENEWLINE def test01_ActorSystemStartupShutdown(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE # just finish, make sure no exception is thrown.NEWLINENEWLINE def test01_1_ActorSystemMultipleShutdown(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE asys.shutdown()NEWLINE asys.shutdown()NEWLINENEWLINE def test02_PrimaryActorCreation(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE assert romeo != julietNEWLINENEWLINE def test03_CreateActorUniqueAddress(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE assert romeo != julietNEWLINE romeo2 = asys.createActor(Romeo)NEWLINE assert romeo != romeo2NEWLINENEWLINE def NOtest04_PossibleActorSystemResourceExhaustion(self):NEWLINE try:NEWLINE addresses = [asys.createActor(Juliet) for n in range(10000)]NEWLINE except OSError as err:NEWLINE import errnoNEWLINE if err.errno == errno.EGAIN:NEWLINE passNEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def test05_ManyActorsUniqueAddress(self, asys):NEWLINE addresses = [asys.createActor(Juliet) for n in range(50)]NEWLINE uniqueAddresses = []NEWLINE duplicates = []NEWLINE for A in addresses:NEWLINE if A in uniqueAddresses:NEWLINE duplicates.append(A)NEWLINE else:NEWLINE uniqueAddresses.append(A)NEWLINE if len(addresses) != len(uniqueAddresses):NEWLINE print('Duplicates: %s'%map(str, duplicates))NEWLINE if duplicates:NEWLINE for each in duplicates:NEWLINE print('... %s at: %s'%(str(each), str([N for N,A in enumerate(addresses) if A == each])))NEWLINE print('Note: if this is a UDPTransport test, be advised that Linux occasionally does seem to assign the same UDP port multiple times. Linux bug?')NEWLINE assert len(addresses) == len(uniqueAddresses)NEWLINENEWLINE def test06_ManyActorsValidAddresses(self, asys):NEWLINE import stringNEWLINE addresses = [asys.createActor(Juliet) for n in range(100)]NEWLINE for addr in addresses:NEWLINE invchar = ''.join([c for c in str(addr)NEWLINE if c not in string.ascii_letters + string.digits + "-~/():., '|>"])NEWLINE assert str(addr) == str(addr) + invchar # invchar should be blankNEWLINE if asys.base_name.startswith('multiprocUDP'):NEWLINE # Normally the asys.shutdown() following this test willNEWLINE # shutdown all actors, but for the multiprocUDP base, theNEWLINE # ActorSystem (and logger) process are left behind becauseNEWLINE # UDP does not have guaranteed delivery and 100 processesNEWLINE # sending a UDP message to the ActorSystem nearlyNEWLINE # simultaneously overloads and drops packets. Use a moreNEWLINE # regulated shutdown here for UDP to avoid this overflowNEWLINE # (which does not hurt anything but leaves actor processesNEWLINE # behind).NEWLINE per_loop = 10NEWLINE for ii in range(0, len(addresses), per_loop):NEWLINE for jj in range(ii, ii + per_loop):NEWLINE asys.tell(addresses[jj], ActorExitRequest())NEWLINE time.sleep(0.25)NEWLINENEWLINE def test07_SingleNonListeningActorTell(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE # rosaline does not override the receiveMessage method, so theNEWLINE # Actor default method will throw an exception. This willNEWLINE # Kill the rosaline Actor. It's a top level Actor, so it willNEWLINE # not be restarted. This will cause the 'hello' message to beNEWLINE # delivered to the DeadLetterBox. Verify that no exceptionNEWLINE # makes its way out of the ActorSystem here.NEWLINE asys.tell(rosalineA, 'hello')NEWLINE assert TrueNEWLINENEWLINE def test08_SingleActorTell(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE asys.tell(romeoA, 'hello')NEWLINE # Nothing much happens, Romeo is smitten and has no time for trivialities, butNEWLINE # he will try to generate str() of himself.NEWLINENEWLINE def test09_SingleActorAsk(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE resp = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert resp, 'Shall I hear more == or shall I speak at this?'NEWLINENEWLINE def test10_ActorAskWithNoResponse(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE # This test is possibly unique to the simpleSystemBase, whichNEWLINE # will run an process all messages on an ask (or tell) call.NEWLINE # Properly there is no way to determine if an answer isNEWLINE # forthcoming from an asynchronous system, so all this can doNEWLINE # is assert that there is no response within a particular timeNEWLINE # period. At this point, timing is not supported, so thisNEWLINE # test is underspecified and assumptive.NEWLINE resp = asys.ask(romeoA, "What's in a name? That which we call a rose", 1.5)NEWLINE assert resp is NoneNEWLINE # Now verify that the Actor and system are still alive and operating normally.NEWLINE resp = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert resp, 'Shall I hear more == or shall I speak at this?'NEWLINENEWLINE def test11_SingleActorAskMultipleTimes(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeoA, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINENEWLINE def test12_MultipleActorsAskMultipleTimes(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE r = asys.ask(romeo, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE juliet = asys.createActor(Juliet)NEWLINE r = asys.ask(romeo, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeo, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(juliet, 'She speaks!', 1)NEWLINE assert r == 'O Romeo, Romeo! wherefore art thou Romeo?'NEWLINE r = asys.ask(romeo, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(juliet, "Do you know what light that is?", 1)NEWLINE assert r == 'Ay me!'NEWLINENEWLINE def test13_SubActorCreation(self, asys):NEWLINE capulet = asys.createActor(Capulet)NEWLINE juliet = asys.ask(capulet, 'has a daughter?', 2.5)NEWLINE print ('Juliet is: %s'%str(juliet))NEWLINE assert juliet is not NoneNEWLINE if juliet:NEWLINE r = asys.ask(juliet, 'what light?')NEWLINE assert r == 'Ay me!', 0.75NEWLINE juliet2 = asys.ask(capulet, 'has a daughter?', 1)NEWLINE assert juliet2 is not NoneNEWLINE if juliet2:NEWLINE r = asys.ask(juliet2, 'what light?', 0.5)NEWLINE assert r == 'Ay me!'NEWLINE r = asys.ask(juliet, 'what light?', 0.5)NEWLINE assert r == 'Ay me!'NEWLINENEWLINE def test14_EntireActWithActorStart(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE nurse = asys.createActor(Nurse)NEWLINE assert asys.ask(nurse, 'done?', 1) == 'not yet'NEWLINE asys.tell(nurse, ('begin', romeo, juliet))NEWLINENEWLINE for X in range(50):NEWLINE if asys.ask(nurse, 'done?', 1) == 'Fini':NEWLINE breakNEWLINE time.sleep(0.01) # Allow some time for the entire actNEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'Fini'NEWLINENEWLINE def test15_IncompleteActMissingActor(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE # no nurse actor createdNEWLINE asys.tell(romeo, JulietAppears(juliet))NEWLINE # No error should occur here when Juliet reaches the end andNEWLINE # doesn't have a nurse to tell.NEWLINENEWLINE time.sleep(0.05) # Allow some time for the entire actNEWLINENEWLINE # Now create the nurse and tell her to talk to romeo andNEWLINE # juliet, which should cause completionNEWLINE nurse = asys.createActor(Nurse)NEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'not yet'NEWLINE asys.tell(nurse, ('begin', romeo, juliet))NEWLINENEWLINE for X in range(50):NEWLINE if asys.ask(nurse, 'done?', 1) == 'Fini':NEWLINE breakNEWLINE time.sleep(0.01) # Allow some time for the entire actNEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'Fini'NEWLINENEWLINE def test16_ActorProperties(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINENEWLINE r = asys.ask(romeo, 'who_are_you', 0.25)NEWLINE assert r is not NoneNEWLINE r = asys.ask(juliet, 'who_are_you', 0.25)NEWLINE assert r is not NoneNEWLINE r1 = asys.ask(romeo, 'who_are_you', 0.25)NEWLINE r2 = asys.ask(juliet, 'who_are_you', 0.25)NEWLINE assert r1 != r2NEWLINE |
"""This file and its contents are licensed under the Apache License 2.0. Please see the included NOTICE for copyright information and LICENSE for a copy of the license.NEWLINE"""NEWLINE"""NEWLINEDjango Base settings for Label Studio.NEWLINENEWLINEFor more information on this file, seeNEWLINEhttps://docs.djangoproject.com/en/3.1/topics/settings/NEWLINENEWLINEFor the full list of settings and their values, seeNEWLINEhttps://docs.djangoproject.com/en/3.1/ref/settings/NEWLINE"""NEWLINEimport osNEWLINEimport reNEWLINEimport loggingNEWLINENEWLINE# for printing messages before main logging config appliedNEWLINEif not logging.getLogger().hasHandlers():NEWLINE logging.basicConfig(level=logging.DEBUG, format='%(message)s')NEWLINENEWLINEfrom label_studio.core.utils.io import get_data_dirNEWLINEfrom label_studio.core.utils.params import get_bool_env, get_envNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINE# Hostname is used for proper path generation to the resources, pages, etcNEWLINEHOSTNAME = get_env('HOST', '')NEWLINEif HOSTNAME:NEWLINE if not HOSTNAME.startswith('http://') and not HOSTNAME.startswith('https://'):NEWLINE logger.info("! HOST variable found in environment, but it must start with http:// or https://, ignore it: %s", HOSTNAME)NEWLINE HOSTNAME = ''NEWLINE else:NEWLINE logger.info("=> Hostname correctly is set to: %s", HOSTNAME)NEWLINE if HOSTNAME.endswith('/'):NEWLINE HOSTNAME = HOSTNAME[0:-1]NEWLINENEWLINE # for django url resolverNEWLINE if HOSTNAME:NEWLINE # http[s]://domain.com:8080/script_name => /script_nameNEWLINE pattern = re.compile(r'^http[s]?:\/\/([^:\/\s]+(:\d*)?)(.*)?')NEWLINE match = pattern.match(HOSTNAME)NEWLINE FORCE_SCRIPT_NAME = match.group(3)NEWLINE if FORCE_SCRIPT_NAME:NEWLINE logger.info("=> Django URL prefix is set to: %s", FORCE_SCRIPT_NAME)NEWLINENEWLINEINTERNAL_PORT = '8080'NEWLINENEWLINE# SECURITY WARNING: keep the secret key used in production secret!NEWLINESECRET_KEY = '$(fefwefwef13;LFK{P!)@#*!)kdsjfWF2l+i5e3t(8a1n'NEWLINENEWLINE# SECURITY WARNING: don't run with debug turned on in production!NEWLINEDEBUG = get_bool_env('DEBUG', True)NEWLINEDEBUG_MODAL_EXCEPTIONS = get_bool_env('DEBUG_MODAL_EXCEPTIONS', True)NEWLINENEWLINENEWLINE# Build paths inside the project like this: os.path.join(BASE_DIR, ...)NEWLINEBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))NEWLINENEWLINE# Base path for media root and other uploaded filesNEWLINEBASE_DATA_DIR = get_env('BASE_DATA_DIR', get_data_dir())NEWLINEos.makedirs(BASE_DATA_DIR, exist_ok=True)NEWLINElogger.info('=> Database and media directory: %s', BASE_DATA_DIR)NEWLINENEWLINE# DatabasesNEWLINE# https://docs.djangoproject.com/en/2.1/ref/settings/#databasesNEWLINEDJANGO_DB_MYSQL = 'mysql'NEWLINEDJANGO_DB_SQLITE = 'sqlite'NEWLINEDJANGO_DB = 'default'NEWLINEDATABASE_NAME_DEFAULT = os.path.join(BASE_DATA_DIR, 'label_studio.sqlite3')NEWLINEDATABASE_NAME = get_env('DATABASE_NAME', DATABASE_NAME_DEFAULT)NEWLINEDATABASES_ALL = {NEWLINE 'default': {NEWLINE 'ENGINE': 'django.db.backends.postgresql',NEWLINE 'USER': get_env('POSTGRE_USER', 'postgres'),NEWLINE 'PASSWORD': get_env('POSTGRE_PASSWORD', 'postgres'),NEWLINE 'NAME': get_env('POSTGRE_NAME', 'postgres'),NEWLINE 'HOST': get_env('POSTGRE_HOST', 'localhost'),NEWLINE 'PORT': int(get_env('POSTGRE_PORT', '5432')),NEWLINE },NEWLINE DJANGO_DB_MYSQL: {NEWLINE 'ENGINE': 'django.db.backends.mysql',NEWLINE 'USER': get_env('MYSQL_USER', 'root'),NEWLINE 'PASSWORD': get_env('MYSQL_PASSWORD', ''),NEWLINE 'NAME': get_env('MYSQL_NAME', 'labelstudio'),NEWLINE 'HOST': get_env('MYSQL_HOST', 'localhost'),NEWLINE 'PORT': int(get_env('MYSQL_PORT', '3306')),NEWLINE },NEWLINE DJANGO_DB_SQLITE: {NEWLINE 'ENGINE': 'django.db.backends.sqlite3',NEWLINE 'NAME': DATABASE_NAME,NEWLINE 'OPTIONS': {NEWLINE # 'timeout': 20,NEWLINE }NEWLINE }NEWLINE}NEWLINEDATABASES = {'default': DATABASES_ALL.get(get_env('DJANGO_DB', 'default'))}NEWLINENEWLINELOGGING = {NEWLINE 'version': 1,NEWLINE 'disable_existing_loggers': False,NEWLINE 'formatters': {NEWLINE 'standard': {NEWLINE 'format': '[%(asctime)s] [%(name)s::%(funcName)s::%(lineno)d] [%(levelname)s] %(message)s',NEWLINE },NEWLINE 'message_only': {NEWLINE 'format': '%(message)s',NEWLINE },NEWLINE 'rq_console': {NEWLINE 'format': '%(asctime)s %(message)s',NEWLINE 'datefmt': '%H:%M:%S',NEWLINE },NEWLINE },NEWLINE 'handlers': {NEWLINE 'console_raw': {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'logging.StreamHandler',NEWLINE },NEWLINE 'console': {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'logging.StreamHandler',NEWLINE 'formatter': 'standard'NEWLINE },NEWLINE 'rq_console': {NEWLINE 'level': 'WARNING',NEWLINE 'class': 'rq.utils.ColorizingStreamHandler',NEWLINE 'formatter': 'rq_console',NEWLINE 'exclude': ['%(asctime)s'],NEWLINE }NEWLINE },NEWLINE 'root': {NEWLINE 'handlers': ['console'],NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE }NEWLINE}NEWLINENEWLINEif get_bool_env('GOOGLE_LOGGING_ENABLED', False):NEWLINE logging.info('Google Cloud Logging handler is enabled.')NEWLINE try:NEWLINE import google.cloud.loggingNEWLINE from google.auth.exceptions import GoogleAuthErrorNEWLINENEWLINE client = google.cloud.logging.Client()NEWLINE client.setup_logging()NEWLINENEWLINE LOGGING['handlers']['google_cloud_logging'] = {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'google.cloud.logging.handlers.CloudLoggingHandler',NEWLINE 'client': clientNEWLINE }NEWLINE LOGGING['root']['handlers'].append('google_cloud_logging')NEWLINE except GoogleAuthError as e:NEWLINE logger.exception('Google Cloud Logging handler could not be setup.')NEWLINENEWLINEINSTALLED_APPS = [NEWLINE 'django.contrib.admin',NEWLINE 'django.contrib.auth',NEWLINE 'django.contrib.contenttypes',NEWLINE 'django.contrib.sessions',NEWLINE 'django.contrib.messages',NEWLINE 'django.contrib.staticfiles',NEWLINE 'django.contrib.humanize',NEWLINENEWLINE 'drf_yasg',NEWLINE 'corsheaders',NEWLINE 'django_extensions',NEWLINE 'django_rq',NEWLINE 'django_filters',NEWLINE 'rules',NEWLINE 'annoying',NEWLINENEWLINE 'rest_framework',NEWLINE 'rest_framework_swagger',NEWLINE 'rest_framework.authtoken',NEWLINE 'drf_generators',NEWLINENEWLINE 'core',NEWLINE 'users',NEWLINE 'organizations',NEWLINE 'data_import',NEWLINE 'data_export',NEWLINENEWLINE 'projects',NEWLINE 'tasks',NEWLINE 'data_manager',NEWLINE 'io_storages',NEWLINE 'ml',NEWLINE 'webhooks',NEWLINE]NEWLINENEWLINEMIDDLEWARE = [NEWLINE 'corsheaders.middleware.CorsMiddleware',NEWLINE 'django.middleware.security.SecurityMiddleware',NEWLINE 'django.contrib.sessions.middleware.SessionMiddleware',NEWLINE 'django.middleware.locale.LocaleMiddleware',NEWLINE 'django.middleware.csrf.CsrfViewMiddleware',NEWLINE 'core.middleware.DisableCSRF',NEWLINE 'django.contrib.auth.middleware.AuthenticationMiddleware',NEWLINE 'django.contrib.messages.middleware.MessageMiddleware',NEWLINE 'django.middleware.clickjacking.XFrameOptionsMiddleware',NEWLINE 'core.middleware.CommonMiddlewareAppendSlashWithoutRedirect', # instead of 'CommonMiddleware'NEWLINE 'core.middleware.CommonMiddleware',NEWLINE 'django_user_agents.middleware.UserAgentMiddleware',NEWLINE 'core.middleware.SetSessionUIDMiddleware',NEWLINE 'core.middleware.ContextLogMiddleware',NEWLINE 'core.middleware.DatabaseIsLockedRetryMiddleware',NEWLINE]NEWLINENEWLINEREST_FRAMEWORK = {NEWLINE 'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],NEWLINE 'DEFAULT_AUTHENTICATION_CLASSES': (NEWLINE 'rest_framework.authentication.TokenAuthentication',NEWLINE 'rest_framework.authentication.SessionAuthentication',NEWLINE ),NEWLINE 'DEFAULT_PERMISSION_CLASSES': [NEWLINE 'core.api_permissions.HasObjectPermission',NEWLINE 'rest_framework.permissions.IsAuthenticated',NEWLINENEWLINE ],NEWLINE 'EXCEPTION_HANDLER': 'core.utils.common.custom_exception_handler',NEWLINE 'DEFAULT_RENDERER_CLASSES': (NEWLINE 'rest_framework.renderers.JSONRenderer',NEWLINE ),NEWLINE 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning'NEWLINE}NEWLINENEWLINE# CORS & Host settingsNEWLINEINTERNAL_IPS = [ # django debug toolbar for django==2.2 requirementNEWLINE '127.0.0.1',NEWLINE 'localhost',NEWLINE]NEWLINECORS_ORIGIN_ALLOW_ALL = TrueNEWLINECORS_ALLOW_METHODS = [NEWLINE 'DELETE',NEWLINE 'GET',NEWLINE 'OPTIONS',NEWLINE 'PATCH',NEWLINE 'POST',NEWLINE 'PUT',NEWLINE]NEWLINEALLOWED_HOSTS = ['*']NEWLINENEWLINE# Auth modulesNEWLINEAUTH_USER_MODEL = 'users.User'NEWLINEAUTHENTICATION_BACKENDS = [NEWLINE 'rules.permissions.ObjectPermissionBackend',NEWLINE 'django.contrib.auth.backends.ModelBackend'NEWLINE]NEWLINEUSE_USERNAME_FOR_LOGIN = FalseNEWLINENEWLINEDISABLE_SIGNUP_WITHOUT_LINK = get_bool_env('DISABLE_SIGNUP_WITHOUT_LINK', False)NEWLINENEWLINE# Password validation:NEWLINE# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validatorsNEWLINEAUTH_PASSWORD_VALIDATORS = [NEWLINE {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},NEWLINE]NEWLINENEWLINE# Django templatesNEWLINETEMPLATES_DIR = os.path.join(os.path.dirname(BASE_DIR), 'templates') # ../../from_this = 'web' dirNEWLINETEMPLATES = [NEWLINE {NEWLINE 'BACKEND': 'django.template.backends.django.DjangoTemplates',NEWLINE 'DIRS': [TEMPLATES_DIR],NEWLINE 'APP_DIRS': True,NEWLINE 'OPTIONS': {NEWLINE 'context_processors': [NEWLINE 'django.template.context_processors.debug',NEWLINE 'django.template.context_processors.request',NEWLINE 'django.contrib.auth.context_processors.auth',NEWLINE 'django.contrib.messages.context_processors.messages',NEWLINE 'core.context_processors.settings'NEWLINE ],NEWLINE 'builtins': ['django.templatetags.i18n'],NEWLINE },NEWLINE }NEWLINE]NEWLINENEWLINE# RQNEWLINERQ_QUEUES = {NEWLINE 'default': {NEWLINE 'HOST': 'localhost',NEWLINE 'PORT': 6379,NEWLINE 'DB': 0,NEWLINE 'DEFAULT_TIMEOUT': 180NEWLINE }NEWLINE}NEWLINENEWLINE# Swagger: automatic API documentationNEWLINESWAGGER_SETTINGS = {NEWLINE 'SECURITY_DEFINITIONS': {NEWLINE 'token': {NEWLINE 'type': 'token',NEWLINE 'name': 'Token',NEWLINE 'in': 'header',NEWLINE 'url': '/user/account'NEWLINE }NEWLINE },NEWLINE 'APIS_SORTER': 'alpha',NEWLINE 'SUPPORTED_SUBMIT_METHODS': ['get', 'post', 'put', 'delete', 'patch'],NEWLINE # "DEFAULT_AUTO_SCHEMA_CLASS": "core.utils.CustomAutoSchema",NEWLINE 'OPERATIONS_SORTER': 'alpha'NEWLINE}NEWLINENEWLINESENTRY_DSN = get_env('SENTRY_DSN', None)NEWLINESENTRY_RATE = float(get_env('SENTRY_RATE', 0.25))NEWLINESENTRY_ENVIRONMENT = get_env('SENTRY_ENVIRONMENT', 'stage.opensource')NEWLINESENTRY_REDIS_ENABLED = FalseNEWLINEFRONTEND_SENTRY_DSN = get_env('FRONTEND_SENTRY_DSN', None)NEWLINEFRONTEND_SENTRY_RATE = get_env('FRONTEND_SENTRY_RATE', 0.1)NEWLINEFRONTEND_SENTRY_ENVIRONMENT = get_env('FRONTEND_SENTRY_ENVIRONMENT', 'stage.opensource')NEWLINENEWLINEROOT_URLCONF = 'core.urls'NEWLINEWSGI_APPLICATION = 'core.wsgi.application'NEWLINEGRAPHIQL = TrueNEWLINENEWLINE# InternationalizationNEWLINE# https://docs.djangoproject.com/en/2.1/topics/i18n/NEWLINELANGUAGE_CODE = 'en-us'NEWLINETIME_ZONE = 'UTC'NEWLINEUSE_I18N = FalseNEWLINEUSE_L10N = TrueNEWLINEUSE_TZ = TrueNEWLINENEWLINE# Static files (CSS, JavaScript, Images)NEWLINE# https://docs.djangoproject.com/en/2.1/howto/static-files/NEWLINESTATIC_URL = '/static/'NEWLINE# if FORCE_SCRIPT_NAME:NEWLINE# STATIC_URL = FORCE_SCRIPT_NAME + STATIC_URLNEWLINElogger.info(f'=> Static URL is set to: {STATIC_URL}')NEWLINENEWLINESTATIC_ROOT = os.path.join(BASE_DIR, 'static_build')NEWLINESTATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]NEWLINESTATICFILES_FINDERS = (NEWLINE 'django.contrib.staticfiles.finders.FileSystemFinder',NEWLINE 'django.contrib.staticfiles.finders.AppDirectoriesFinder'NEWLINE)NEWLINESTATICFILES_STORAGE = 'core.storage.SkipMissedManifestStaticFilesStorage'NEWLINENEWLINE# Sessions and CSRFNEWLINESESSION_COOKIE_SECURE = bool(int(get_env('SESSION_COOKIE_SECURE', True)))NEWLINECSRF_COOKIE_SECURE = bool(int(get_env('CSRF_COOKIE_SECURE', SESSION_COOKIE_SECURE)))NEWLINECSRF_COOKIE_HTTPONLY = bool(int(get_env('CSRF_COOKIE_HTTPONLY', SESSION_COOKIE_SECURE)))NEWLINENEWLINE# user media filesNEWLINEMEDIA_ROOT = os.path.join(BASE_DATA_DIR, 'media')NEWLINEos.makedirs(MEDIA_ROOT, exist_ok=True)NEWLINEMEDIA_URL = '/data/'NEWLINEUPLOAD_DIR = 'upload'NEWLINEAVATAR_PATH = 'avatars'NEWLINENEWLINE# project exportsNEWLINEEXPORT_DIR = os.path.join(BASE_DATA_DIR, 'export')NEWLINEEXPORT_URL_ROOT = '/export/'NEWLINE# old export dirNEWLINEos.makedirs(EXPORT_DIR, exist_ok=True)NEWLINE# dir for delayed exportNEWLINEDELAYED_EXPORT_DIR = 'export'NEWLINEos.makedirs(os.path.join(BASE_DATA_DIR, MEDIA_ROOT, DELAYED_EXPORT_DIR), exist_ok=True)NEWLINENEWLINE# file / task size limitsNEWLINEDATA_UPLOAD_MAX_MEMORY_SIZE = int(get_env('DATA_UPLOAD_MAX_MEMORY_SIZE', 250 * 1024 * 1024))NEWLINETASKS_MAX_NUMBER = 1000000NEWLINETASKS_MAX_FILE_SIZE = DATA_UPLOAD_MAX_MEMORY_SIZENEWLINENEWLINETASK_LOCK_TTL = int(get_env('TASK_LOCK_TTL')) if get_env('TASK_LOCK_TTL') else NoneNEWLINETASK_LOCK_DEFAULT_TTL = int(get_env('TASK_LOCK_DEFAULT_TTL', 3600))NEWLINETASK_LOCK_MIN_TTL = int(get_env('TASK_LOCK_MIN_TTL', 120))NEWLINENEWLINE# Email backendNEWLINEFROM_EMAIL = get_env('FROM_EMAIL', 'Label Studio <hello@labelstud.io>')NEWLINEEMAIL_BACKEND = get_env('EMAIL_BACKEND', 'django.core.mail.backends.dummy.EmailBackend')NEWLINENEWLINEENABLE_LOCAL_FILES_STORAGE = get_bool_env('ENABLE_LOCAL_FILES_STORAGE', default=True)NEWLINELOCAL_FILES_SERVING_ENABLED = get_bool_env('LOCAL_FILES_SERVING_ENABLED', default=False)NEWLINENEWLINE""" React Libraries: do not forget to change this dir in /etc/nginx/nginx.conf """NEWLINE# EDITOR = label-studio-frontend repositoryNEWLINEEDITOR_ROOT = os.path.join(BASE_DIR, '../frontend/dist/lsf')NEWLINE# DM = data manager (included into FRONTEND due npm building, we need only version.json file from there)NEWLINEDM_ROOT = os.path.join(BASE_DIR, '../frontend/dist/dm')NEWLINE# FRONTEND = GUI for django backendNEWLINEREACT_APP_ROOT = os.path.join(BASE_DIR, '../frontend/dist/react-app')NEWLINENEWLINE# per project settingsNEWLINEBATCH_SIZE = 1000NEWLINEPROJECT_TITLE_MIN_LEN = 3NEWLINEPROJECT_TITLE_MAX_LEN = 50NEWLINELOGIN_REDIRECT_URL = '/'NEWLINELOGIN_URL = '/'NEWLINEMIN_GROUND_TRUTH = 10NEWLINEDATA_UNDEFINED_NAME = '$undefined$'NEWLINELICENSE = {}NEWLINEVERSIONS = {}NEWLINEVERSION_EDITION = 'Community Edition'NEWLINELATEST_VERSION_CHECK = TrueNEWLINEVERSIONS_CHECK_TIME = 0NEWLINEALLOW_ORGANIZATION_WEBHOOKS = get_bool_env('ALLOW_ORGANIZATION_WEBHOOKS', False)NEWLINECONVERTER_DOWNLOAD_RESOURCES = get_bool_env('CONVERTER_DOWNLOAD_RESOURCES', True)NEWLINEEXPERIMENTAL_FEATURES = get_bool_env('EXPERIMENTAL_FEATURES', False)NEWLINENEWLINECREATE_ORGANIZATION = 'organizations.functions.create_organization'NEWLINEGET_OBJECT_WITH_CHECK_AND_LOG = 'core.utils.get_object.get_object_with_check_and_log'NEWLINESAVE_USER = 'users.functions.save_user'NEWLINEUSER_SERIALIZER = 'users.serializers.BaseUserSerializer'NEWLINEDATA_MANAGER_GET_ALL_COLUMNS = 'data_manager.functions.get_all_columns'NEWLINEDATA_MANAGER_ANNOTATIONS_MAP = {}NEWLINEDATA_MANAGER_ACTIONS = {}NEWLINEDATA_MANAGER_CUSTOM_FILTER_EXPRESSIONS = ''NEWLINEUSER_LOGIN_FORM = 'users.forms.LoginForm'NEWLINEPROJECT_MIXIN = 'core.mixins.DummyModelMixin'NEWLINETASK_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEANNOTATION_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEORGANIZATION_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEUSER_MIXIN = 'users.mixins.UserMixin'NEWLINEGET_STORAGE_LIST = 'io_storages.functions.get_storage_list'NEWLINENEWLINESTORAGE_ANNOTATION_SERIALIZER = 'io_storages.serializers.StorageAnnotationSerializer'NEWLINENEWLINENEWLINEdef project_delete(project):NEWLINE project.delete()NEWLINENEWLINENEWLINEdef user_auth(user_model, email, password):NEWLINE return NoneNEWLINENEWLINENEWLINEdef collect_versions_dummy(**kwargs):NEWLINE return {}NEWLINENEWLINENEWLINEPROJECT_DELETE = project_deleteNEWLINEUSER_AUTH = user_authNEWLINECOLLECT_VERSIONS = collect_versions_dummyNEWLINENEWLINEWEBHOOK_TIMEOUT = float(get_env('WEBHOOK_TIMEOUT', 1.0))NEWLINENEWLINE# fix a problem with Windows mimetypes for JS and PNGNEWLINEimport mimetypesNEWLINEmimetypes.add_type("application/javascript", ".js", True)NEWLINEmimetypes.add_type("image/png", ".png", True)NEWLINE |
from enum import EnumNEWLINEimport loggingNEWLINEimport randomNEWLINEimport reNEWLINEimport requestsNEWLINEfrom r2d7.core import DroidCoreNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINEclass Talkback(DroidCore):NEWLINE """NEWLINE Chat responses unrelated to other commandsNEWLINE """NEWLINE pattern_fix = re.compile('^!((fix)|(typo))', re.I)NEWLINE pattern_data = re.compile('^!(data)', re.I)NEWLINE pattern_help = re.compile('^!(help)', re.I)NEWLINE pattern_stitchCrew = re.compile('^!(stitch ?crew)', re.I)NEWLINE pattern_stitchCard = re.compile('\[\[(stitch ?crew)\]\]', re.I)NEWLINE pattern_egg = re.compile('^!((egg)|(sooga))', re.I)NEWLINENEWLINE _data_url = 'https://github.com/guidokessels/xwing-data2'NEWLINE _r2d7_url = 'https://github.com/FreakyDug/r2-d7'NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.register_handler(Talkback.pattern_fix, self.fixHandler)NEWLINE self.register_handler(Talkback.pattern_data, self.dataHandler)NEWLINE self.register_handler(Talkback.pattern_help, self.helpHandler)NEWLINE self.register_handler(Talkback.pattern_stitchCrew, self.stitchCrewHandler)NEWLINE self.register_handler(Talkback.pattern_stitchCard, self.stitchCardHandler)NEWLINE self.register_handler(Talkback.pattern_egg, self.eggHandler)NEWLINENEWLINE def fixHandler(self, message):NEWLINE dataErrorText = 'For issues with card data, raise an issue or pull request at 'NEWLINE dataErrorText += self.link(self._data_url, self._data_url)NEWLINE squadErrorText = 'For issues with squad lists, raise an issue at 'NEWLINE squadErrorText += self.link(self._r2d7_url, self._r2d7_url)NEWLINE return [[dataErrorText, squadErrorText]]NEWLINENEWLINE def dataHandler(self, message):NEWLINE text = 'X-Wing card data taken from 'NEWLINE text += self.link(self._data_url, self._data_url)NEWLINE return [[text]]NEWLINENEWLINE def helpHandler(self, message):NEWLINE return [[self.helpMessage()]]NEWLINENEWLINE def stitchCrewHandler(self, message):NEWLINE lines = [NEWLINE ['Stitch who?'],NEWLINE ['STITCH CREW!'],NEWLINE [':sewing_needle::crew:'],NEWLINE [NEWLINE self.bold('Stitch Crew'),NEWLINE '4 players, 200pts, 2 ships per player, 2 obstacles per player. First player is random and player order proceeds clockwise.',NEWLINE f'{self.bold("Setup:")} Players place obstacles in player order until 6 obstacles have been placed. Players deploy ships within range 3 of their assigned table corner and range 1 of the table edge.',NEWLINE f'{self.bold("Rules:")} The last surviving player wins the game. Alliances are forbidden, but table talk is encouraged. When a ship engages, if it has one or more valid enemy targets, it must shoot.'NEWLINE ]NEWLINE ]NEWLINE return [random.choice(lines)]NEWLINENEWLINE def stitchCardHandler(self, message):NEWLINE lines = [NEWLINE [NEWLINE f':crew::crew::crew::crew:• {self.bold("Stitch Crew")} [0]',NEWLINE self.italics('Restrictions: Stitch Crew Only'),NEWLINE 'Pew Pew Pew'NEWLINE ],NEWLINE ]NEWLINE return [random.choice(lines)]NEWLINENEWLINE def eggHandler(self, message):NEWLINE lines = [NEWLINE ['Sooga! Sooga! Sooga!'],NEWLINE ['Utinni!'],NEWLINE [':egg:'],NEWLINE ['Maclunkey!'],NEWLINE ]NEWLINE return [random.choice(lines)]NEWLINE |
import numpy as npNEWLINEimport unittestNEWLINENEWLINEimport chainerNEWLINEfrom chainer import optimizersNEWLINEfrom chainer import testingNEWLINEfrom chainer.testing import attrNEWLINENEWLINEfrom chainercv.links.model.ssd import GradientScalingNEWLINENEWLINENEWLINEclass SimpleLink(chainer.Link):NEWLINENEWLINE def __init__(self, w, g):NEWLINE super(SimpleLink, self).__init__()NEWLINE with self.init_scope():NEWLINE self.param = chainer.Parameter(w)NEWLINE self.param.grad = gNEWLINENEWLINENEWLINEclass TestGradientScaling(unittest.TestCase):NEWLINENEWLINE def setUp(self):NEWLINE self.target = SimpleLink(NEWLINE np.arange(6, dtype=np.float32).reshape((2, 3)),NEWLINE np.arange(3, -3, -1, dtype=np.float32).reshape((2, 3)))NEWLINENEWLINE def check_gradient_scaling(self):NEWLINE w = self.target.param.arrayNEWLINE g = self.target.param.gradNEWLINENEWLINE rate = 0.2NEWLINE expect = w - g * rateNEWLINENEWLINE opt = optimizers.SGD(lr=1)NEWLINE opt.setup(self.target)NEWLINE opt.add_hook(GradientScaling(rate))NEWLINE opt.update()NEWLINENEWLINE testing.assert_allclose(expect, w)NEWLINENEWLINE def test_gradient_scaling_cpu(self):NEWLINE self.check_gradient_scaling()NEWLINENEWLINE @attr.gpuNEWLINE def test_gradient_scaling_gpu(self):NEWLINE self.target.to_gpu()NEWLINE self.check_gradient_scaling()NEWLINENEWLINENEWLINEtesting.run_module(__name__, __file__)NEWLINE |
# exported from PySB model 'model'NEWLINENEWLINEfrom pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILDNEWLINENEWLINEModel()NEWLINENEWLINEMonomer('Ligand', ['Receptor'])NEWLINEMonomer('ParpU', ['C3A'])NEWLINEMonomer('C8A', ['BidU', 'C3pro'])NEWLINEMonomer('SmacM', ['BaxA'])NEWLINEMonomer('BaxM', ['BidM', 'BaxA'])NEWLINEMonomer('Apop', ['C3pro', 'Xiap'])NEWLINEMonomer('Fadd', ['Receptor', 'C8pro'])NEWLINEMonomer('SmacC', ['Xiap'])NEWLINEMonomer('ParpC')NEWLINEMonomer('Xiap', ['SmacC', 'Apop', 'C3A'])NEWLINEMonomer('C9')NEWLINEMonomer('C3ub')NEWLINEMonomer('C8pro', ['Fadd', 'C6A'])NEWLINEMonomer('Bcl2', ['BidM', 'BaxA'])NEWLINEMonomer('C3pro', ['Apop', 'C8A'])NEWLINEMonomer('CytoCM', ['BaxA'])NEWLINEMonomer('CytoCC')NEWLINEMonomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])NEWLINEMonomer('ApafI')NEWLINEMonomer('BidU', ['C8A'])NEWLINEMonomer('BidT')NEWLINEMonomer('C3A', ['Xiap', 'ParpU', 'C6pro'])NEWLINEMonomer('ApafA')NEWLINEMonomer('BidM', ['BaxM', 'Bcl2'])NEWLINEMonomer('Receptor', ['Ligand', 'Fadd'])NEWLINEMonomer('C6A', ['C8pro'])NEWLINEMonomer('C6pro', ['C3A'])NEWLINENEWLINEParameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)NEWLINEParameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)NEWLINEParameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)NEWLINEParameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)NEWLINEParameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)NEWLINEParameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)NEWLINEParameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)NEWLINEParameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)NEWLINEParameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)NEWLINEParameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)NEWLINEParameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)NEWLINEParameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)NEWLINEParameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)NEWLINEParameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)NEWLINEParameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)NEWLINEParameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)NEWLINEParameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)NEWLINEParameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)NEWLINEParameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)NEWLINEParameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)NEWLINEParameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)NEWLINEParameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)NEWLINEParameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)NEWLINEParameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)NEWLINEParameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)NEWLINEParameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)NEWLINEParameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)NEWLINEParameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)NEWLINEParameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)NEWLINEParameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)NEWLINEParameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)NEWLINEParameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)NEWLINEParameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)NEWLINEParameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)NEWLINEParameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)NEWLINEParameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)NEWLINEParameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)NEWLINEParameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)NEWLINEParameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)NEWLINEParameter('pore_formation_0_BaxA_pore_2kf', 1.0)NEWLINEParameter('pore_formation_0_BaxA_pore_1kr', 1.0)NEWLINEParameter('pore_formation_1_BaxA_pore_2kf', 1.0)NEWLINEParameter('pore_formation_1_BaxA_pore_1kr', 1.0)NEWLINEParameter('pore_formation_2_BaxA_pore_2kf', 1.0)NEWLINEParameter('pore_formation_2_BaxA_pore_1kr', 1.0)NEWLINEParameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)NEWLINEParameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)NEWLINEParameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)NEWLINEParameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)NEWLINEParameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)NEWLINEParameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)NEWLINEParameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)NEWLINEParameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)NEWLINEParameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)NEWLINEParameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)NEWLINEParameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)NEWLINEParameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)NEWLINEParameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)NEWLINEParameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)NEWLINEParameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)NEWLINEParameter('Ligand_0', 1000.0)NEWLINEParameter('ParpU_0', 1000000.0)NEWLINEParameter('C8A_0', 0.0)NEWLINEParameter('SmacM_0', 100000.0)NEWLINEParameter('BaxM_0', 40000.0)NEWLINEParameter('Apop_0', 0.0)NEWLINEParameter('Fadd_0', 130000.0)NEWLINEParameter('SmacC_0', 0.0)NEWLINEParameter('ParpC_0', 0.0)NEWLINEParameter('Xiap_0', 75000.0)NEWLINEParameter('C9_0', 100000.0)NEWLINEParameter('C3ub_0', 0.0)NEWLINEParameter('C8pro_0', 130000.0)NEWLINEParameter('Bcl2_0', 130000.0)NEWLINEParameter('C3pro_0', 21000.0)NEWLINEParameter('CytoCM_0', 500000.0)NEWLINEParameter('CytoCC_0', 0.0)NEWLINEParameter('BaxA_0', 0.0)NEWLINEParameter('ApafI_0', 100000.0)NEWLINEParameter('BidU_0', 171000.0)NEWLINEParameter('BidT_0', 0.0)NEWLINEParameter('C3A_0', 0.0)NEWLINEParameter('ApafA_0', 0.0)NEWLINEParameter('BidM_0', 0.0)NEWLINEParameter('Receptor_0', 100.0)NEWLINEParameter('C6A_0', 0.0)NEWLINEParameter('C6pro_0', 100.0)NEWLINENEWLINEObservable('Ligand_obs', Ligand())NEWLINEObservable('ParpU_obs', ParpU())NEWLINEObservable('C8A_obs', C8A())NEWLINEObservable('SmacM_obs', SmacM())NEWLINEObservable('BaxM_obs', BaxM())NEWLINEObservable('Apop_obs', Apop())NEWLINEObservable('Fadd_obs', Fadd())NEWLINEObservable('SmacC_obs', SmacC())NEWLINEObservable('ParpC_obs', ParpC())NEWLINEObservable('Xiap_obs', Xiap())NEWLINEObservable('C9_obs', C9())NEWLINEObservable('C3ub_obs', C3ub())NEWLINEObservable('C8pro_obs', C8pro())NEWLINEObservable('Bcl2_obs', Bcl2())NEWLINEObservable('C3pro_obs', C3pro())NEWLINEObservable('CytoCM_obs', CytoCM())NEWLINEObservable('CytoCC_obs', CytoCC())NEWLINEObservable('BaxA_obs', BaxA())NEWLINEObservable('ApafI_obs', ApafI())NEWLINEObservable('BidU_obs', BidU())NEWLINEObservable('BidT_obs', BidT())NEWLINEObservable('C3A_obs', C3A())NEWLINEObservable('ApafA_obs', ApafA())NEWLINEObservable('BidM_obs', BidM())NEWLINEObservable('Receptor_obs', Receptor())NEWLINEObservable('C6A_obs', C6A())NEWLINEObservable('C6pro_obs', C6pro())NEWLINENEWLINERule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)NEWLINERule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)NEWLINERule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)NEWLINERule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)NEWLINERule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)NEWLINERule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)NEWLINERule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)NEWLINERule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)NEWLINERule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)NEWLINERule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)NEWLINERule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)NEWLINERule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)NEWLINERule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)NEWLINERule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)NEWLINERule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)NEWLINERule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)NEWLINERule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)NEWLINERule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)NEWLINERule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)NEWLINERule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)NEWLINERule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)NEWLINERule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)NEWLINERule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)NEWLINERule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)NEWLINERule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)NEWLINERule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)NEWLINERule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)NEWLINERule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)NEWLINERule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)NEWLINERule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)NEWLINERule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)NEWLINERule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)NEWLINERule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)NEWLINERule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)NEWLINERule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)NEWLINERule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)NEWLINENEWLINEInitial(Ligand(Receptor=None), Ligand_0)NEWLINEInitial(ParpU(C3A=None), ParpU_0)NEWLINEInitial(C8A(BidU=None, C3pro=None), C8A_0)NEWLINEInitial(SmacM(BaxA=None), SmacM_0)NEWLINEInitial(BaxM(BidM=None, BaxA=None), BaxM_0)NEWLINEInitial(Apop(C3pro=None, Xiap=None), Apop_0)NEWLINEInitial(Fadd(Receptor=None, C8pro=None), Fadd_0)NEWLINEInitial(SmacC(Xiap=None), SmacC_0)NEWLINEInitial(ParpC(), ParpC_0)NEWLINEInitial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)NEWLINEInitial(C9(), C9_0)NEWLINEInitial(C3ub(), C3ub_0)NEWLINEInitial(C8pro(Fadd=None, C6A=None), C8pro_0)NEWLINEInitial(Bcl2(BidM=None, BaxA=None), Bcl2_0)NEWLINEInitial(C3pro(Apop=None, C8A=None), C3pro_0)NEWLINEInitial(CytoCM(BaxA=None), CytoCM_0)NEWLINEInitial(CytoCC(), CytoCC_0)NEWLINEInitial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)NEWLINEInitial(ApafI(), ApafI_0)NEWLINEInitial(BidU(C8A=None), BidU_0)NEWLINEInitial(BidT(), BidT_0)NEWLINEInitial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)NEWLINEInitial(ApafA(), ApafA_0)NEWLINEInitial(BidM(BaxM=None, Bcl2=None), BidM_0)NEWLINEInitial(Receptor(Ligand=None, Fadd=None), Receptor_0)NEWLINEInitial(C6A(C8pro=None), C6A_0)NEWLINEInitial(C6pro(C3A=None), C6pro_0)NEWLINENEWLINE |
import torchNEWLINEimport torch.nn as nnNEWLINENEWLINEfrom . import EmbeddingFeedForwardNEWLINEfrom .. import utilNEWLINEfrom ..distributions import TruncatedNormal, MixtureNEWLINENEWLINENEWLINEclass ProposalUniformTruncatedNormalMixture(nn.Module):NEWLINE def __init__(self, input_shape, num_layers=2, mixture_components=10):NEWLINE super().__init__()NEWLINE # Currently only supports event_shape=torch.Size([]) for the mixture componentsNEWLINE self._mixture_components = mixture_componentsNEWLINE input_shape = util.to_size(input_shape)NEWLINE self._ff = EmbeddingFeedForward(input_shape=input_shape, output_shape=torch.Size([3 * self._mixture_components]), num_layers=num_layers, activation=torch.relu, activation_last=None)NEWLINE self._total_train_iterations = 0NEWLINENEWLINE def forward(self, x, prior_variables):NEWLINE batch_size = x.size(0)NEWLINE x = self._ff(x)NEWLINE means = x[:, :self._mixture_components].view(batch_size, -1)NEWLINE stddevs = x[:, self._mixture_components:2*self._mixture_components].view(batch_size, -1)NEWLINE coeffs = x[:, 2*self._mixture_components:].view(batch_size, -1)NEWLINE means = torch.sigmoid(means)NEWLINE stddevs = torch.sigmoid(stddevs)NEWLINE coeffs = torch.softmax(coeffs, dim=1)NEWLINE means = means.view(batch_size, -1)NEWLINE stddevs = stddevs.view(batch_size, -1)NEWLINE prior_lows = torch.stack([util.to_tensor(v.distribution.low) for v in prior_variables]).view(batch_size)NEWLINE prior_highs = torch.stack([util.to_tensor(v.distribution.high) for v in prior_variables]).view(batch_size)NEWLINE prior_range = (prior_highs - prior_lows).view(batch_size, -1)NEWLINE means = prior_lows.view(batch_size, -1) + (means * prior_range)NEWLINE # stddevs = stddevs * prior_stddevsNEWLINE stddevs = (prior_range / 1000) + (stddevs * prior_range * 10)NEWLINE distributions = [TruncatedNormal(means[:, i:i+1].view(batch_size), stddevs[:, i:i+1].view(batch_size), low=prior_lows, high=prior_highs) for i in range(self._mixture_components)]NEWLINE return Mixture(distributions, coeffs)NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Generated by Django 1.10.4 on 2018-03-13 00:29NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom django.db import migrationsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('board', '0006_merge_20180310_2200'),NEWLINE ('board', '0007_auto_20180311_1704'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE ]NEWLINE |
"""This test creates two top level actors and one sub-actor andNEWLINE verifies that the actors can exchange sequences of messages."""NEWLINENEWLINEimport timeNEWLINEfrom thespian.actors import *NEWLINEfrom thespian.test import *NEWLINENEWLINEclass rosaline(Actor):NEWLINE name = 'Rosaline'NEWLINENEWLINEclass Romeo(Actor):NEWLINE def receiveMessage(self, msg, sender):NEWLINE if isinstance(msg, JulietAppears):NEWLINE self.send(msg.juliet, "But, soft! what light through yonder window breaks?")NEWLINE elif isinstance(msg, ActorExitRequest):NEWLINE pass # nothing special, just dieNEWLINE elif msg == 'Ay me!':NEWLINE self.send(sender, 'She speaks!')NEWLINE elif msg == 'O Romeo, Romeo! wherefore art thou Romeo?':NEWLINE self.send(sender, 'Shall I hear more, or shall I speak at this?')NEWLINE elif 'rose' in msg:NEWLINE pass # wait for itNEWLINE elif 'sweet' in msg:NEWLINE self.send(sender, 'Like softest music to attending ears!')NEWLINE elif 'hello' in msg:NEWLINE print('Hello from %s'%(str(self)))NEWLINE elif 'who_are_you' == msg:NEWLINE self.send(sender, self.myAddress)NEWLINE # otherwise sit and swoonNEWLINENEWLINENEWLINEclass Capulet(Actor):NEWLINE def receiveMessage(self, msg, sender):NEWLINE if msg == "has a daughter?":NEWLINE self.send(sender, self.createActor(Juliet))NEWLINENEWLINENEWLINEclass Juliet(Actor):NEWLINE def __init__(self, *args, **kw):NEWLINE self.nurse = NoneNEWLINE self.recalled = FalseNEWLINE super(Juliet, self).__init__(*args, **kw)NEWLINE def receiveMessage(self, msg, sender):NEWLINE if isinstance(msg, ActorExitRequest):NEWLINE pass # nothing special, just dieNEWLINE elif "what light" in msg:NEWLINE self.send(sender, 'Ay me!')NEWLINE elif msg == 'She speaks!':NEWLINE self.send(sender, 'O Romeo, Romeo! wherefore art thou Romeo?')NEWLINE elif msg == 'Shall I hear more, or shall I speak at this?':NEWLINE self.send(sender, "What's in a name? That which we call a rose")NEWLINE self.send(sender, "By any other name would smell as sweet")NEWLINE elif msg == 'Like softest music to attending ears!':NEWLINE if self.nurse:NEWLINE self.send(self.nurse, 'Anon, good nurse!')NEWLINE else:NEWLINE self.recalled = TrueNEWLINE elif msg == 'Mistress!':NEWLINE self.nurse = senderNEWLINE if self.recalled:NEWLINE self.send(self.nurse, 'Anon, good nurse!')NEWLINE elif 'who_are_you' == msg:NEWLINE self.send(sender, self.myAddress)NEWLINENEWLINENEWLINEclass Nurse(Actor):NEWLINE def __init__(self, *args, **kw):NEWLINE self.heardItAll = FalseNEWLINE super(Nurse, self).__init__(*args, **kw)NEWLINE def receiveMessage(self, msg, sender):NEWLINE if type(msg) == type((1,2)) and msg[0] == 'begin':NEWLINE self.send(msg[1], JulietAppears(msg[2]))NEWLINE self.send(msg[2], 'Mistress!')NEWLINE elif msg == 'Anon, good nurse!':NEWLINE self.heardItAll = TrueNEWLINE elif msg == 'done?':NEWLINE self.send(sender, 'Fini' if self.heardItAll else 'not yet')NEWLINENEWLINENEWLINEclass JulietAppears:NEWLINE stage = 'Right'NEWLINE def __init__(self, julietAddr):NEWLINE self.juliet = julietAddrNEWLINENEWLINENEWLINEclass TestFuncActors():NEWLINENEWLINENEWLINE def test01_ActorSystemStartupShutdown(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE # just finish, make sure no exception is thrown.NEWLINENEWLINE def test01_1_ActorSystemMultipleShutdown(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE asys.shutdown()NEWLINE asys.shutdown()NEWLINENEWLINE def test02_PrimaryActorCreation(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE assert romeo != julietNEWLINENEWLINE def test03_CreateActorUniqueAddress(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE assert romeo != julietNEWLINE romeo2 = asys.createActor(Romeo)NEWLINE assert romeo != romeo2NEWLINENEWLINE def NOtest04_PossibleActorSystemResourceExhaustion(self):NEWLINE try:NEWLINE addresses = [asys.createActor(Juliet) for n in range(10000)]NEWLINE except OSError as err:NEWLINE import errnoNEWLINE if err.errno == errno.EGAIN:NEWLINE passNEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def test05_ManyActorsUniqueAddress(self, asys):NEWLINE addresses = [asys.createActor(Juliet) for n in range(50)]NEWLINE uniqueAddresses = []NEWLINE duplicates = []NEWLINE for A in addresses:NEWLINE if A in uniqueAddresses:NEWLINE duplicates.append(A)NEWLINE else:NEWLINE uniqueAddresses.append(A)NEWLINE if len(addresses) != len(uniqueAddresses):NEWLINE print('Duplicates: %s'%map(str, duplicates))NEWLINE if duplicates:NEWLINE for each in duplicates:NEWLINE print('... %s at: %s'%(str(each), str([N for N,A in enumerate(addresses) if A == each])))NEWLINE print('Note: if this is a UDPTransport test, be advised that Linux occasionally does seem to assign the same UDP port multiple times. Linux bug?')NEWLINE assert len(addresses) == len(uniqueAddresses)NEWLINENEWLINE def test06_ManyActorsValidAddresses(self, asys):NEWLINE import stringNEWLINE addresses = [asys.createActor(Juliet) for n in range(100)]NEWLINE for addr in addresses:NEWLINE invchar = ''.join([c for c in str(addr)NEWLINE if c not in string.ascii_letters + string.digits + "-~/():., '|>"])NEWLINE assert str(addr) == str(addr) + invchar # invchar should be blankNEWLINE if asys.base_name.startswith('multiprocUDP'):NEWLINE # Normally the asys.shutdown() following this test willNEWLINE # shutdown all actors, but for the multiprocUDP base, theNEWLINE # ActorSystem (and logger) process are left behind becauseNEWLINE # UDP does not have guaranteed delivery and 100 processesNEWLINE # sending a UDP message to the ActorSystem nearlyNEWLINE # simultaneously overloads and drops packets. Use a moreNEWLINE # regulated shutdown here for UDP to avoid this overflowNEWLINE # (which does not hurt anything but leaves actor processesNEWLINE # behind).NEWLINE per_loop = 10NEWLINE for ii in range(0, len(addresses), per_loop):NEWLINE for jj in range(ii, ii + per_loop):NEWLINE asys.tell(addresses[jj], ActorExitRequest())NEWLINE time.sleep(0.25)NEWLINENEWLINE def test07_SingleNonListeningActorTell(self, asys):NEWLINE rosalineA = asys.createActor(rosaline)NEWLINE # rosaline does not override the receiveMessage method, so theNEWLINE # Actor default method will throw an exception. This willNEWLINE # Kill the rosaline Actor. It's a top level Actor, so it willNEWLINE # not be restarted. This will cause the 'hello' message to beNEWLINE # delivered to the DeadLetterBox. Verify that no exceptionNEWLINE # makes its way out of the ActorSystem here.NEWLINE asys.tell(rosalineA, 'hello')NEWLINE assert TrueNEWLINENEWLINE def test08_SingleActorTell(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE asys.tell(romeoA, 'hello')NEWLINE # Nothing much happens, Romeo is smitten and has no time for trivialities, butNEWLINE # he will try to generate str() of himself.NEWLINENEWLINE def test09_SingleActorAsk(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE resp = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert resp, 'Shall I hear more == or shall I speak at this?'NEWLINENEWLINE def test10_ActorAskWithNoResponse(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE # This test is possibly unique to the simpleSystemBase, whichNEWLINE # will run an process all messages on an ask (or tell) call.NEWLINE # Properly there is no way to determine if an answer isNEWLINE # forthcoming from an asynchronous system, so all this can doNEWLINE # is assert that there is no response within a particular timeNEWLINE # period. At this point, timing is not supported, so thisNEWLINE # test is underspecified and assumptive.NEWLINE resp = asys.ask(romeoA, "What's in a name? That which we call a rose", 1.5)NEWLINE assert resp is NoneNEWLINE # Now verify that the Actor and system are still alive and operating normally.NEWLINE resp = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert resp, 'Shall I hear more == or shall I speak at this?'NEWLINENEWLINE def test11_SingleActorAskMultipleTimes(self, asys):NEWLINE romeoA = asys.createActor(Romeo)NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeoA, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(romeoA, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINENEWLINE def test12_MultipleActorsAskMultipleTimes(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE r = asys.ask(romeo, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE juliet = asys.createActor(Juliet)NEWLINE r = asys.ask(romeo, 'O Romeo, Romeo! wherefore art thou Romeo?', 1)NEWLINE assert r == 'Shall I hear more, or shall I speak at this?'NEWLINE r = asys.ask(romeo, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(juliet, 'She speaks!', 1)NEWLINE assert r == 'O Romeo, Romeo! wherefore art thou Romeo?'NEWLINE r = asys.ask(romeo, 'Ay me!', 1)NEWLINE assert r == 'She speaks!'NEWLINE r = asys.ask(juliet, "Do you know what light that is?", 1)NEWLINE assert r == 'Ay me!'NEWLINENEWLINE def test13_SubActorCreation(self, asys):NEWLINE capulet = asys.createActor(Capulet)NEWLINE juliet = asys.ask(capulet, 'has a daughter?', 2.5)NEWLINE print ('Juliet is: %s'%str(juliet))NEWLINE assert juliet is not NoneNEWLINE if juliet:NEWLINE r = asys.ask(juliet, 'what light?')NEWLINE assert r == 'Ay me!', 0.75NEWLINE juliet2 = asys.ask(capulet, 'has a daughter?', 1)NEWLINE assert juliet2 is not NoneNEWLINE if juliet2:NEWLINE r = asys.ask(juliet2, 'what light?', 0.5)NEWLINE assert r == 'Ay me!'NEWLINE r = asys.ask(juliet, 'what light?', 0.5)NEWLINE assert r == 'Ay me!'NEWLINENEWLINE def test14_EntireActWithActorStart(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE nurse = asys.createActor(Nurse)NEWLINE assert asys.ask(nurse, 'done?', 1) == 'not yet'NEWLINE asys.tell(nurse, ('begin', romeo, juliet))NEWLINENEWLINE for X in range(50):NEWLINE if asys.ask(nurse, 'done?', 1) == 'Fini':NEWLINE breakNEWLINE time.sleep(0.01) # Allow some time for the entire actNEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'Fini'NEWLINENEWLINE def test15_IncompleteActMissingActor(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINE # no nurse actor createdNEWLINE asys.tell(romeo, JulietAppears(juliet))NEWLINE # No error should occur here when Juliet reaches the end andNEWLINE # doesn't have a nurse to tell.NEWLINENEWLINE time.sleep(0.05) # Allow some time for the entire actNEWLINENEWLINE # Now create the nurse and tell her to talk to romeo andNEWLINE # juliet, which should cause completionNEWLINE nurse = asys.createActor(Nurse)NEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'not yet'NEWLINE asys.tell(nurse, ('begin', romeo, juliet))NEWLINENEWLINE for X in range(50):NEWLINE if asys.ask(nurse, 'done?', 1) == 'Fini':NEWLINE breakNEWLINE time.sleep(0.01) # Allow some time for the entire actNEWLINE r = asys.ask(nurse, 'done?', 1)NEWLINE assert r == 'Fini'NEWLINENEWLINE def test16_ActorProperties(self, asys):NEWLINE romeo = asys.createActor(Romeo)NEWLINE juliet = asys.createActor(Juliet)NEWLINENEWLINE r = asys.ask(romeo, 'who_are_you', 0.25)NEWLINE assert r is not NoneNEWLINE r = asys.ask(juliet, 'who_are_you', 0.25)NEWLINE assert r is not NoneNEWLINE r1 = asys.ask(romeo, 'who_are_you', 0.25)NEWLINE r2 = asys.ask(juliet, 'who_are_you', 0.25)NEWLINE assert r1 != r2NEWLINE |
import yamlNEWLINEimport typesNEWLINEimport pandas as pdNEWLINEfrom Handler.mongo_handler import MongoHandlerNEWLINEfrom Utils.utils import LogNEWLINEyaml.warnings({'YAMLLoadWarning': False})NEWLINEwith open("config.yaml", "rt", encoding="utf-8") as stream:NEWLINE CONFIG = yaml.load(stream)['StockCrawler']NEWLINENEWLINENEWLINEclass DataHandler:NEWLINE def __init__(self):NEWLINE self.log = Log(DataHandler)NEWLINE self.mongo = MongoHandler()NEWLINE self.company_info = NoneNEWLINE self.company_list = NoneNEWLINENEWLINE check_target_location = CONFIG['company_name_location']NEWLINE if check_target_location == 'DB':NEWLINE self.get_target_company = types.MethodType(self._get_company_by_mongo, self)NEWLINE elif check_target_location == 'File':NEWLINE self.get_target_company = types.MethodType(self._get_company_by_file, self)NEWLINENEWLINE def get_target_company(self):NEWLINE passNEWLINENEWLINE def save_stock_data(self, stock_df):NEWLINE self.mongo.update_stock_data(stock_df)NEWLINENEWLINE def _get_company_by_mongo(self, obj):NEWLINE self.log.debug("Get company information by database(MongoDB)")NEWLINE self.company_info = pd.DataFrame(self.mongo.get_company())NEWLINE self.company_list = self.company_info[['company', 'code']]NEWLINENEWLINE def _get_company_by_file(self, obj):NEWLINE passNEWLINE |
# Copyright 2020 Google LLC. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""Tests for tfx.orchestration.portable.cache_utils."""NEWLINEimport osNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom tfx.dsl.io import fileioNEWLINEfrom tfx.orchestration import metadataNEWLINEfrom tfx.orchestration.portable import cache_utilsNEWLINEfrom tfx.orchestration.portable import execution_publish_utilsNEWLINEfrom tfx.orchestration.portable.mlmd import context_libNEWLINEfrom tfx.proto.orchestration import pipeline_pb2NEWLINEfrom tfx.types import standard_artifactsNEWLINEfrom tfx.utils import test_case_utilsNEWLINEfrom google.protobuf import text_formatNEWLINEfrom ml_metadata.proto import metadata_store_pb2NEWLINENEWLINENEWLINEclass CacheUtilsTest(test_case_utils.TfxTest):NEWLINENEWLINE def setUp(self):NEWLINE super().setUp()NEWLINE self._connection_config = metadata_store_pb2.ConnectionConfig()NEWLINE self._connection_config.sqlite.SetInParent()NEWLINE self._module_file_path = os.path.join(self.tmp_dir, 'module_file')NEWLINE self._input_artifacts = {'input_examples': [standard_artifacts.Examples()]}NEWLINE self._output_artifacts = {'output_models': [standard_artifacts.Model()]}NEWLINE self._parameters = {'module_file': self._module_file_path}NEWLINE self._module_file_content = 'module content'NEWLINE self._pipeline_node = text_format.Parse(NEWLINE """NEWLINE executor {NEWLINE python_class_executor_spec {class_path: 'a.b.c'}NEWLINE }NEWLINE """, pipeline_pb2.PipelineNode())NEWLINE self._executor_class_path = 'a.b.c'NEWLINE self._pipeline_info = pipeline_pb2.PipelineInfo(id='pipeline_id')NEWLINENEWLINE def _get_cache_context(self,NEWLINE metadata_handler,NEWLINE custom_pipeline_node=None,NEWLINE custom_pipeline_info=None,NEWLINE custom_input_artifacts=None,NEWLINE custom_output_artifacts=None,NEWLINE custom_parameters=None,NEWLINE custom_module_content=None):NEWLINE with fileio.open(self._module_file_path, 'w+') as f:NEWLINE f.write(custom_module_content or self._module_file_content)NEWLINE return cache_utils.get_cache_context(NEWLINE metadata_handler,NEWLINE custom_pipeline_node or self._pipeline_node,NEWLINE custom_pipeline_info or self._pipeline_info,NEWLINE input_artifacts=(custom_input_artifacts or self._input_artifacts),NEWLINE output_artifacts=(custom_output_artifacts or self._output_artifacts),NEWLINE parameters=(custom_parameters or self._parameters))NEWLINENEWLINE def testGetCacheContext(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE cache_context = self._get_cache_context(m)NEWLINE [context_from_mlmd] = m.store.get_contexts()NEWLINE self.assertProtoPartiallyEquals(NEWLINE cache_context,NEWLINE context_from_mlmd,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINENEWLINE def testGetCacheContextTwiceSameArgs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m)NEWLINE # Same args should not create a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 1)NEWLINENEWLINE def testGetCacheContextTwiceDifferentOutputUri(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE output_model_different_uri = standard_artifacts.Model()NEWLINE output_model_different_uri.uri = 'diff_uri'NEWLINE self._get_cache_context(NEWLINE m,NEWLINE custom_output_artifacts={NEWLINE 'output_models': [output_model_different_uri]NEWLINE })NEWLINE # Only different output uri should not create a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 1)NEWLINENEWLINE def testGetCacheContextTwiceDifferentOutputs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_output_artifacts={'k': [standard_artifacts.Model()]})NEWLINE # Different output skeleton will result in a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentInputs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_input_artifacts={'k': [standard_artifacts.Examples(),]})NEWLINE # Different input artifacts will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentParameters(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m, custom_parameters={'new_prop': 'value'})NEWLINE # Different parameters will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentModuleContent(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m, custom_module_content='new module content')NEWLINE # Different module file content will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentPipelineInfo(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_pipeline_info=pipeline_pb2.PipelineInfo(id='new_id'))NEWLINE # Different pipeline info will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentExecutorSpec(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m,NEWLINE custom_pipeline_node=text_format.Parse(NEWLINE """NEWLINE executor {NEWLINE python_class_executor_spec {class_path: 'n.e.w'}NEWLINE }NEWLINE """, pipeline_pb2.PipelineNode()))NEWLINE # Different executor spec will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCachedOutputArtifacts(self):NEWLINE # Output artifacts that will be used by the first execution with the sameNEWLINE # cache key.NEWLINE output_model_one = standard_artifacts.Model()NEWLINE output_model_one.uri = 'model_one'NEWLINE output_model_two = standard_artifacts.Model()NEWLINE output_model_two.uri = 'model_two'NEWLINE output_example_one = standard_artifacts.Examples()NEWLINE output_example_one.uri = 'example_one'NEWLINE # Output artifacts that will be used by the second execution with the sameNEWLINE # cache key.NEWLINE output_model_three = standard_artifacts.Model()NEWLINE output_model_three.uri = 'model_three'NEWLINE output_model_four = standard_artifacts.Model()NEWLINE output_model_four.uri = 'model_four'NEWLINE output_example_two = standard_artifacts.Examples()NEWLINE output_example_two.uri = 'example_two'NEWLINE output_models_key = 'output_models'NEWLINE output_examples_key = 'output_examples'NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE cache_context = context_lib.register_context_if_not_exists(NEWLINE m, context_lib.CONTEXT_TYPE_EXECUTION_CACHE, 'cache_key')NEWLINE execution_one = execution_publish_utils.register_execution(NEWLINE m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])NEWLINE execution_publish_utils.publish_succeeded_execution(NEWLINE m,NEWLINE execution_one.id, [cache_context],NEWLINE output_artifacts={NEWLINE output_models_key: [output_model_one, output_model_two],NEWLINE output_examples_key: [output_example_one]NEWLINE })NEWLINE execution_two = execution_publish_utils.register_execution(NEWLINE m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])NEWLINE output_artifacts = execution_publish_utils.publish_succeeded_execution(NEWLINE m,NEWLINE execution_two.id, [cache_context],NEWLINE output_artifacts={NEWLINE output_models_key: [output_model_three, output_model_four],NEWLINE output_examples_key: [output_example_two]NEWLINE })NEWLINE # The cached output got should be the artifacts produced by the mostNEWLINE # recent execution under the given cache context.NEWLINE cached_output = cache_utils.get_cached_outputs(m, cache_context)NEWLINE self.assertLen(cached_output, 2)NEWLINE self.assertLen(cached_output[output_models_key], 2)NEWLINE self.assertLen(cached_output[output_examples_key], 1)NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_models_key][0].mlmd_artifact,NEWLINE output_artifacts[output_models_key][0].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_models_key][1].mlmd_artifact,NEWLINE output_artifacts[output_models_key][1].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_examples_key][0].mlmd_artifact,NEWLINE output_artifacts[output_examples_key][0].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE tf.test.main()NEWLINE |
"""NEWLINEThe script is for creating a new graphml including nodes and edges NEWLINEbased on the subset geopackage created by "sv_createSubsetData.py".NEWLINEThis can reduce the volume of graphml, which can reduce the usage of memory in pc and NEWLINEimprove performace.NEWLINENEWLINE"""NEWLINEimport osmnx as oxNEWLINEimport networkx as nxNEWLINEimport osNEWLINEimport pandas as pdNEWLINEimport geopandas as gpdNEWLINEimport timeNEWLINENEWLINENEWLINEdef creatSubGraph(graphPath, gpkg_path, graphOutput):NEWLINE print('read original grapml, and save nodes and edges to geopackage.')NEWLINE G = ox.load_graphml(graphPath)NEWLINE nodes, edges = ox.graph_to_gdfs(G)NEWLINE # nodes = nodes.astype(str)NEWLINE columns = edges.columns.tolist()NEWLINE columns.remove('geometry')NEWLINE edges[columns] = edges[columns].astype(str)NEWLINE nodes.to_file(gpkg_path, layer='nodes_original', driver='GPKG')NEWLINE edges.to_file(gpkg_path, layer='edges_original', driver='GPKG')NEWLINE # sp = gpd.read_file(gpkg_path, layer='urban_sample_points')NEWLINE # nodesIds = pd.concat([sp.n1, sp.n2])NEWLINE # node_drop = nodesIds.drop_duplicates()NEWLINE # nodes = node_drop.tolist()NEWLINE # nodes_int = list(map(int, nodes))NEWLINE print('select nodes within study region buffer.')NEWLINE region_buffer = gpd.read_file(gpkg_path,NEWLINE layer='urban_study_region_buffered')NEWLINE nodes_withinbuffer = gpd.sjoin(nodes,NEWLINE region_buffer,NEWLINE how='inner',NEWLINE op='within')NEWLINE nodes_withinbuffer = nodes_withinbuffer.drop_duplicates(subset='osmid')NEWLINE nodesIds = nodes_withinbuffer.osmid.tolist()NEWLINE nodes_int = list(map(int, nodesIds))NEWLINE print('create sub grapml.')NEWLINE G_sub = G.subgraph(nodes_int).copy()NEWLINE # print(G_sub.nodes)NEWLINE print('save sub nodes and edges to geopackage.')NEWLINE nodes_sub, edges_sub = ox.graph_to_gdfs(G_sub)NEWLINE # nodes_sub = nodes_sub.astype(str)NEWLINE cols = edges_sub.columns.tolist()NEWLINE cols.remove('geometry')NEWLINE edges_sub[cols] = edges_sub[cols].astype(str)NEWLINE nodes_sub.to_file(gpkg_path, layer='nodes_subset', driver='GPKG')NEWLINE edges_sub.to_file(gpkg_path, layer='edges_subset', driver='GPKG')NEWLINE del nodes, edgesNEWLINE del edges_sub, nodes_subNEWLINE ox.save_graphml(G_sub,NEWLINE filename=graphOutput,NEWLINE folder=os.path.join(dirname, 'data'))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE startTime = time.time()NEWLINE print('begin to process')NEWLINE dirname = os.path.abspath('')NEWLINE graph_path = os.path.join(NEWLINE dirname,NEWLINE 'data/phoenix_us_2019_10000m_pedestrian_osm_20190902_proj.graphml')NEWLINE gpkg_path = os.path.join(dirname,NEWLINE 'data/phoenix_us_2019_subset.gpkg')NEWLINE graph_output = 'phoenix_us_2019_10000m_pedestrian_osm_20190902_proj_subset.graphml'NEWLINE creatSubGraph(graph_path, gpkg_path, graph_output)NEWLINE print("finished, time is {}".format(time.time() - startTime)) |
#!/usr/bin/pythonNEWLINE#NEWLINE# Copyright: Ansible ProjectNEWLINE# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)NEWLINENEWLINEfrom __future__ import absolute_import, division, print_functionNEWLINE__metaclass__ = typeNEWLINENEWLINEANSIBLE_METADATA = {'metadata_version': '1.1',NEWLINE 'status': ['preview'],NEWLINE 'supported_by': 'community'}NEWLINENEWLINEDOCUMENTATION = '''NEWLINE---NEWLINEmodule: onyx_vlanNEWLINEauthor: "Samer Deeb (@samerd) Alex Tabachnik (@atabachnik)"NEWLINEshort_description: Manage VLANs on Mellanox ONYX network devicesNEWLINEdescription:NEWLINE - This module provides declarative management of VLANsNEWLINE on Mellanox ONYX network devices.NEWLINEoptions:NEWLINE name:NEWLINE description:NEWLINE - Name of the VLAN.NEWLINE vlan_id:NEWLINE description:NEWLINE - ID of the VLAN.NEWLINE aggregate:NEWLINE description: List of VLANs definitions.NEWLINE purge:NEWLINE description:NEWLINE - Purge VLANs not defined in the I(aggregate) parameter.NEWLINE default: noNEWLINE type: boolNEWLINE state:NEWLINE description:NEWLINE - State of the VLAN configuration.NEWLINE default: presentNEWLINE choices: ['present', 'absent']NEWLINE'''NEWLINENEWLINEEXAMPLES = """NEWLINE- name: configure VLAN ID and nameNEWLINE onyx_vlan:NEWLINE vlan_id: 20NEWLINE name: test-vlanNEWLINENEWLINE- name: remove configurationNEWLINE onyx_vlan:NEWLINE state: absentNEWLINE"""NEWLINENEWLINERETURN = """NEWLINEcommands:NEWLINE description: The list of configuration mode commands to send to the deviceNEWLINE returned: always.NEWLINE type: listNEWLINE sample:NEWLINE - vlan 20NEWLINE - name test-vlanNEWLINE - exitNEWLINE"""NEWLINENEWLINEfrom copy import deepcopyNEWLINENEWLINEfrom ansible.module_utils.basic import AnsibleModuleNEWLINEfrom ansible.module_utils.six import iteritemsNEWLINEfrom ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import remove_default_specNEWLINENEWLINEfrom ansible_collections.community.general.plugins.module_utils.network.onyx.onyx import BaseOnyxModuleNEWLINEfrom ansible_collections.community.general.plugins.module_utils.network.onyx.onyx import show_cmdNEWLINENEWLINENEWLINEclass OnyxVlanModule(BaseOnyxModule):NEWLINE _purge = FalseNEWLINENEWLINE @classmethodNEWLINE def _get_element_spec(cls):NEWLINE return dict(NEWLINE vlan_id=dict(type='int'),NEWLINE name=dict(type='str'),NEWLINE state=dict(default='present', choices=['present', 'absent']),NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def _get_aggregate_spec(cls, element_spec):NEWLINE aggregate_spec = deepcopy(element_spec)NEWLINE aggregate_spec['vlan_id'] = dict(required=True)NEWLINENEWLINE # remove default in aggregate spec, to handle common argumentsNEWLINE remove_default_spec(aggregate_spec)NEWLINE return aggregate_specNEWLINENEWLINE def init_module(self):NEWLINE """ module initializationNEWLINE """NEWLINE element_spec = self._get_element_spec()NEWLINE aggregate_spec = self._get_aggregate_spec(element_spec)NEWLINE argument_spec = dict(NEWLINE aggregate=dict(type='list', elements='dict',NEWLINE options=aggregate_spec),NEWLINE purge=dict(default=False, type='bool'),NEWLINE )NEWLINE argument_spec.update(element_spec)NEWLINE required_one_of = [['vlan_id', 'aggregate']]NEWLINE mutually_exclusive = [['vlan_id', 'aggregate']]NEWLINE self._module = AnsibleModule(NEWLINE argument_spec=argument_spec,NEWLINE required_one_of=required_one_of,NEWLINE mutually_exclusive=mutually_exclusive,NEWLINE supports_check_mode=True)NEWLINENEWLINE def validate_vlan_id(self, value):NEWLINE if value and not 1 <= int(value) <= 4094:NEWLINE self._module.fail_json(msg='vlan id must be between 1 and 4094')NEWLINENEWLINE def get_required_config(self):NEWLINE self._required_config = list()NEWLINE module_params = self._module.paramsNEWLINE aggregate = module_params.get('aggregate')NEWLINE self._purge = module_params.get('purge', False)NEWLINE if aggregate:NEWLINE for item in aggregate:NEWLINE for key in item:NEWLINE if item.get(key) is None:NEWLINE item[key] = module_params[key]NEWLINE self.validate_param_values(item, item)NEWLINE req_item = item.copy()NEWLINE req_item['vlan_id'] = int(req_item['vlan_id'])NEWLINE self._required_config.append(req_item)NEWLINE else:NEWLINE params = {NEWLINE 'vlan_id': module_params['vlan_id'],NEWLINE 'name': module_params['name'],NEWLINE 'state': module_params['state'],NEWLINE }NEWLINE self.validate_param_values(params)NEWLINE self._required_config.append(params)NEWLINENEWLINE def _create_vlan_data(self, vlan_id, vlan_data):NEWLINE if self._os_version >= self.ONYX_API_VERSION:NEWLINE vlan_data = vlan_data[0]NEWLINE return {NEWLINE 'vlan_id': vlan_id,NEWLINE 'name': self.get_config_attr(vlan_data, 'Name')NEWLINE }NEWLINENEWLINE def _get_vlan_config(self):NEWLINE return show_cmd(self._module, "show vlan")NEWLINENEWLINE def load_current_config(self):NEWLINE # called in base class in run functionNEWLINE self._os_version = self._get_os_version()NEWLINE self._current_config = dict()NEWLINE vlan_config = self._get_vlan_config()NEWLINE if not vlan_config:NEWLINE returnNEWLINE for vlan_id, vlan_data in iteritems(vlan_config):NEWLINE try:NEWLINE vlan_id = int(vlan_id)NEWLINE except ValueError:NEWLINE continueNEWLINE self._current_config[vlan_id] = \NEWLINE self._create_vlan_data(vlan_id, vlan_data)NEWLINENEWLINE def generate_commands(self):NEWLINE req_vlans = set()NEWLINE for req_conf in self._required_config:NEWLINE state = req_conf['state']NEWLINE vlan_id = req_conf['vlan_id']NEWLINE if state == 'absent':NEWLINE if vlan_id in self._current_config:NEWLINE self._commands.append('no vlan %s' % vlan_id)NEWLINE else:NEWLINE req_vlans.add(vlan_id)NEWLINE self._generate_vlan_commands(vlan_id, req_conf)NEWLINE if self._purge:NEWLINE for vlan_id in self._current_config:NEWLINE if vlan_id not in req_vlans:NEWLINE self._commands.append('no vlan %s' % vlan_id)NEWLINENEWLINE def _generate_vlan_commands(self, vlan_id, req_conf):NEWLINE curr_vlan = self._current_config.get(vlan_id, {})NEWLINE if not curr_vlan:NEWLINE self._commands.append("vlan %s" % vlan_id)NEWLINE self._commands.append("exit")NEWLINE req_name = req_conf['name']NEWLINE curr_name = curr_vlan.get('name')NEWLINE if req_name:NEWLINE if req_name != curr_name:NEWLINE self._commands.append("vlan %s name %s" % (vlan_id, req_name))NEWLINE elif req_name is not None:NEWLINE if curr_name:NEWLINE self._commands.append("vlan %s no name" % vlan_id)NEWLINENEWLINENEWLINEdef main():NEWLINE """ main entry point for module executionNEWLINE """NEWLINE OnyxVlanModule.main()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
def isMatch(s: str, p: str) -> bool:NEWLINE m, n = len(s), len(p)NEWLINE dp = [[False] * (n + 1) for _ in range(m + 1)]NEWLINE dp[0][0] = TrueNEWLINE for i in range(1, n + 1):NEWLINE if p[i - 1] == "*":NEWLINE dp[0][i] = TrueNEWLINE else:NEWLINE breakNEWLINENEWLINE for i in range(1, m + 1):NEWLINE for j in range(1, n + 1):NEWLINE if p[j - 1] == "*":NEWLINE dp[i][j] = dp[i][j - 1] | dp[i - 1][j]NEWLINE elif p[j - 1] == "?" or s[i - 1] == p[j - 1]:NEWLINE dp[i][j] = dp[i - 1][j - 1]NEWLINE return dp[m][n]NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE s = "adceb"NEWLINE p = "*a*b"NEWLINE result = isMatch(s,p)NEWLINE print(result)NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Generated by Django 1.10.5 on 2017-11-03 13:12NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('main', '0003_profile'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name='medication',NEWLINE name='img_path',NEWLINE field=models.CharField(max_length=200, null=True),NEWLINE ),NEWLINE migrations.AlterField(NEWLINE model_name='medication',NEWLINE name='title',NEWLINE field=models.CharField(max_length=200, unique=True),NEWLINE ),NEWLINE ]NEWLINE |
"""This is the one place the version number is stored."""NEWLINENEWLINE__version__ = '0.5.1'NEWLINE |
import torchNEWLINENEWLINEfrom .resnet import NormalizationNEWLINEfrom .preact_resnet import preact_resnetNEWLINEfrom .resnet import resnetNEWLINEfrom .wideresnet import wideresnetNEWLINENEWLINEfrom .preact_resnetwithswish import preact_resnetwithswishNEWLINEfrom .wideresnetwithswish import wideresnetwithswishNEWLINENEWLINEfrom core.data import DATASETSNEWLINENEWLINENEWLINEMODELS = ['resnet18', 'resnet34', 'resnet50', 'resnet101', NEWLINE 'preact-resnet18', 'preact-resnet34', 'preact-resnet50', 'preact-resnet101', NEWLINE 'wrn-28-10', 'wrn-32-10', 'wrn-34-10', 'wrn-34-20', NEWLINE 'preact-resnet18-swish', 'preact-resnet34-swish',NEWLINE 'wrn-28-10-swish', 'wrn-34-20-swish', 'wrn-70-16-swish']NEWLINENEWLINENEWLINEdef create_model(name, normalize, info, device):NEWLINE """NEWLINE Returns suitable model from its name.NEWLINE Arguments:NEWLINE name (str): name of resnet architecture.NEWLINE normalize (bool): normalize input.NEWLINE info (dict): dataset information.NEWLINE device (str or torch.device): device to work on.NEWLINE Returns:NEWLINE torch.nn.Module.NEWLINE """NEWLINE if info['data'] in ['tiny-imagenet']:NEWLINE assert 'preact-resnet' in name, 'Only preact-resnets are supported for this dataset!'NEWLINE from .ti_preact_resnet import ti_preact_resnetNEWLINE backbone = ti_preact_resnet(name, num_classes=info['num_classes'], device=device)NEWLINE NEWLINE elif info['data'] in DATASETS and info['data'] not in ['tiny-imagenet']:NEWLINE if 'preact-resnet' in name and 'swish' not in name:NEWLINE backbone = preact_resnet(name, num_classes=info['num_classes'], pretrained=False, device=device)NEWLINE elif 'preact-resnet' in name and 'swish' in name:NEWLINE backbone = preact_resnetwithswish(name, dataset=info['data'], num_classes=info['num_classes'])NEWLINE elif 'resnet' in name and 'preact' not in name:NEWLINE backbone = resnet(name, num_classes=info['num_classes'], pretrained=False, device=device)NEWLINE elif 'wrn' in name and 'swish' not in name:NEWLINE backbone = wideresnet(name, num_classes=info['num_classes'], device=device)NEWLINE elif 'wrn' in name and 'swish' in name:NEWLINE backbone = wideresnetwithswish(name, dataset=info['data'], num_classes=info['num_classes'], device=device)NEWLINE else:NEWLINE raise ValueError('Invalid model name {}!'.format(name))NEWLINE NEWLINE else:NEWLINE raise ValueError('Models for {} not yet supported!'.format(info['data']))NEWLINE NEWLINE if normalize:NEWLINE model = torch.nn.Sequential(Normalization(info['mean'], info['std']), backbone)NEWLINE else:NEWLINE model = torch.nn.Sequential(backbone)NEWLINE NEWLINE model = torch.nn.DataParallel(model)NEWLINE model = model.to(device)NEWLINE return modelNEWLINE |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and ContributorsNEWLINE# MIT License. See license.txtNEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEimport frappe, unittestNEWLINENEWLINEfrom frappe.model.db_query import DatabaseQueryNEWLINEfrom frappe.desk.reportview import get_filters_condNEWLINENEWLINEclass TestReportview(unittest.TestCase):NEWLINE def test_basic(self):NEWLINE self.assertTrue({"name":"DocType"} in DatabaseQuery("DocType").execute(limit_page_length=None))NEWLINENEWLINE def test_fields(self):NEWLINE self.assertTrue({"name":"DocType", "issingle":0} \NEWLINE in DatabaseQuery("DocType").execute(fields=["name", "issingle"], limit_page_length=None))NEWLINENEWLINE def test_filters_1(self):NEWLINE self.assertFalse({"name":"DocType"} \NEWLINE in DatabaseQuery("DocType").execute(filters=[["DocType", "name", "like", "J%"]]))NEWLINENEWLINE def test_filters_2(self):NEWLINE self.assertFalse({"name":"DocType"} \NEWLINE in DatabaseQuery("DocType").execute(filters=[{"name": ["like", "J%"]}]))NEWLINENEWLINE def test_filters_3(self):NEWLINE self.assertFalse({"name":"DocType"} \NEWLINE in DatabaseQuery("DocType").execute(filters={"name": ["like", "J%"]}))NEWLINENEWLINE def test_filters_4(self):NEWLINE self.assertTrue({"name":"DocField"} \NEWLINE in DatabaseQuery("DocType").execute(filters={"name": "DocField"}))NEWLINENEWLINE def test_in_not_in_filters(self):NEWLINE self.assertFalse(DatabaseQuery("DocType").execute(filters={"name": ["in", None]}))NEWLINE self.assertTrue({"name":"DocType"} \NEWLINE in DatabaseQuery("DocType").execute(filters={"name": ["not in", None]}))NEWLINENEWLINE for result in [{"name":"DocType"}, {"name":"DocField"}]:NEWLINE self.assertTrue(resultNEWLINE in DatabaseQuery("DocType").execute(filters={"name": ["in", 'DocType,DocField']}))NEWLINENEWLINE for result in [{"name":"DocType"}, {"name":"DocField"}]:NEWLINE self.assertFalse(resultNEWLINE in DatabaseQuery("DocType").execute(filters={"name": ["not in", 'DocType,DocField']}))NEWLINENEWLINE def test_or_filters(self):NEWLINE data = DatabaseQuery("DocField").execute(NEWLINE filters={"parent": "DocType"}, fields=["fieldname", "fieldtype"],NEWLINE or_filters=[{"fieldtype":"Table"}, {"fieldtype":"Select"}])NEWLINENEWLINE self.assertTrue({"fieldtype":"Table", "fieldname":"fields"} in data)NEWLINE self.assertTrue({"fieldtype":"Select", "fieldname":"document_type"} in data)NEWLINE self.assertFalse({"fieldtype":"Check", "fieldname":"issingle"} in data)NEWLINENEWLINE def test_between_filters(self):NEWLINE """ test case to check between filter for date fields """NEWLINE frappe.db.sql("delete from tabEvent")NEWLINENEWLINE # create events to test the between operator filterNEWLINE todays_event = create_event()NEWLINE event1 = create_event(starts_on="2016-07-05 23:59:59")NEWLINE event2 = create_event(starts_on="2016-07-06 00:00:00")NEWLINE event3 = create_event(starts_on="2016-07-07 23:59:59")NEWLINE event4 = create_event(starts_on="2016-07-08 00:00:01")NEWLINENEWLINE # if the values are not passed in filters then event should be filter as current datetimeNEWLINE data = DatabaseQuery("Event").execute(NEWLINE filters={"starts_on": ["between", None]}, fields=["name"])NEWLINENEWLINE self.assertTrue({ "name": event1.name } not in data)NEWLINENEWLINE # if both from and to_date values are passedNEWLINE data = DatabaseQuery("Event").execute(NEWLINE filters={"starts_on": ["between", ["2016-07-06", "2016-07-07"]]},NEWLINE fields=["name"])NEWLINENEWLINE self.assertTrue({ "name": event2.name } in data)NEWLINE self.assertTrue({ "name": event3.name } in data)NEWLINE self.assertTrue({ "name": event1.name } not in data)NEWLINE self.assertTrue({ "name": event4.name } not in data)NEWLINENEWLINE # if only one value is passed in the filterNEWLINE data = DatabaseQuery("Event").execute(NEWLINE filters={"starts_on": ["between", ["2016-07-07"]]},NEWLINE fields=["name"])NEWLINENEWLINE self.assertTrue({ "name": event3.name } in data)NEWLINE self.assertTrue({ "name": event4.name } in data)NEWLINE self.assertTrue({ "name": todays_event.name } in data)NEWLINE self.assertTrue({ "name": event1.name } not in data)NEWLINE self.assertTrue({ "name": event2.name } not in data)NEWLINENEWLINE def test_ignore_permissions_for_get_filters_cond(self):NEWLINE frappe.set_user('test1@example.com')NEWLINE self.assertRaises(frappe.PermissionError, get_filters_cond, 'DocType', dict(istable=1), [])NEWLINE self.assertTrue(get_filters_cond('DocType', dict(istable=1), [], ignore_permissions=True))NEWLINE frappe.set_user('Administrator')NEWLINENEWLINE def test_query_fields_sanitizer(self):NEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle, version()"], limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle, IF(issingle=1, (select name from tabUser), count(name))"],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle, (select count(*) from tabSessions)"],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle, SELECT LOCATE('', `tabUser`.`user`) AS user;"],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle, IF(issingle=1, (SELECT name from tabUser), count(*))"],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle ''"],limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle,'"],limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "select * from tabSessions"],limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle from --"],limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "issingle from tabDocType order by 2 --"],limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name", "1' UNION SELECT * FROM __Auth --"],limit_start=0, limit_page_length=1)NEWLINENEWLINE data = DatabaseQuery("DocType").execute(fields=["name", "issingle", "count(name)"],NEWLINE limit_start=0, limit_page_length=1)NEWLINE self.assertTrue('count(name)' in data[0])NEWLINENEWLINE data = DatabaseQuery("DocType").execute(fields=["name", "issingle", "locate('', name) as _relevance"],NEWLINE limit_start=0, limit_page_length=1)NEWLINE self.assertTrue('_relevance' in data[0])NEWLINENEWLINE data = DatabaseQuery("DocType").execute(fields=["name", "issingle", "date(creation) as creation"],NEWLINE limit_start=0, limit_page_length=1)NEWLINE self.assertTrue('creation' in data[0])NEWLINENEWLINE data = DatabaseQuery("DocType").execute(fields=["name", "issingle",NEWLINE "datediff(modified, creation) as date_diff"], limit_start=0, limit_page_length=1)NEWLINE self.assertTrue('date_diff' in data[0])NEWLINENEWLINE def test_filter_sanitizer(self):NEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name"], filters={'istable,': 1}, limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name"], filters={'editable_grid,': 1}, or_filters={'istable,': 1},NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name"], filters={'editable_grid,': 1},NEWLINE or_filters=[['DocType', 'istable,', '=', 1]],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE self.assertRaises(frappe.DataError, DatabaseQuery("DocType").execute,NEWLINE fields=["name"], filters={'editable_grid,': 1},NEWLINE or_filters=[['DocType', 'istable', '=', 1], ['DocType', 'beta and 1=1', '=', 0]],NEWLINE limit_start=0, limit_page_length=1)NEWLINENEWLINE out = DatabaseQuery("DocType").execute(fields=["name"],NEWLINE filters={'editable_grid': 1, 'module': 'Core'},NEWLINE or_filters=[['DocType', 'istable', '=', 1]], order_by='creation')NEWLINE self.assertTrue('DocField' in [d['name'] for d in out])NEWLINENEWLINE out = DatabaseQuery("DocType").execute(fields=["name"],NEWLINE filters={'issingle': 1}, or_filters=[['DocType', 'module', '=', 'Core']],NEWLINE order_by='creation')NEWLINE self.assertTrue('User Permission for Page and Report' in [d['name'] for d in out])NEWLINENEWLINE out = DatabaseQuery("DocType").execute(fields=["name"],NEWLINE filters={'track_changes': 1, 'module': 'Core'},NEWLINE order_by='creation')NEWLINE self.assertTrue('File' in [d['name'] for d in out])NEWLINENEWLINE out = DatabaseQuery("DocType").execute(fields=["name"],NEWLINE filters=[NEWLINE ['DocType', 'ifnull(track_changes, 0)', '=', 0],NEWLINE ['DocType', 'module', '=', 'Core']NEWLINE ], order_by='creation')NEWLINE self.assertTrue('DefaultValue' in [d['name'] for d in out])NEWLINENEWLINEdef create_event(subject="_Test Event", starts_on=None):NEWLINE """ create a test event """NEWLINENEWLINE from frappe.utils import get_datetimeNEWLINENEWLINE event = frappe.get_doc({NEWLINE "doctype": "Event",NEWLINE "subject": subject,NEWLINE "event_type": "Public",NEWLINE "starts_on": get_datetime(starts_on),NEWLINE }).insert(ignore_permissions=True)NEWLINENEWLINE return eventNEWLINE |
# Copyright 2022 AI SingaporeNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# https://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""NEWLINEAbstract Node class for all nodes.NEWLINE"""NEWLINENEWLINENEWLINE# pylint: disable=unused-importNEWLINEfrom peekingduck.pipeline.nodes.abstract_node import AbstractNodeNEWLINEfrom peekingduck.utils.deprecation import deprecateNEWLINENEWLINEdeprecate(NEWLINE "importing AbstractNode from peekingduck.pipeline.nodes.node is deprecated "NEWLINE "and will be removed in a future version. Please import from "NEWLINE "peekingduck.pipeline.nodes.abstract_node instead.",NEWLINE 3,NEWLINE)NEWLINE |
from sklearn.model_selection import StratifiedKFoldNEWLINEfrom scipy import sparseNEWLINEfrom skml.datasets import sample_down_label_spaceNEWLINE# liac-arffNEWLINEimport arffNEWLINEimport randomNEWLINENEWLINErandom.seed(2018)NEWLINENEWLINENEWLINEdef load_from_arff(filename, labelcount, endian="big",NEWLINE input_feature_type='float', encode_nominal=True, load_sparse=False,NEWLINE return_attribute_definitions=False):NEWLINE """Method for loading ARFF files as numpy arrayNEWLINE ParametersNEWLINE ----------NEWLINE filename : strNEWLINE path to ARFF fileNEWLINE labelcount: integerNEWLINE number of labels in the ARFF fileNEWLINE endian: str {"big", "little"} (default is "big")NEWLINE whether the ARFF file contains labels at the beginning of theNEWLINE attributes list ("big" endianness, MEKA format) or at the endNEWLINE ("little" endianness, MULAN format)NEWLINE input_feature_type: numpy.type as string (default is "float")NEWLINE the desire type of the contents of the return 'X' array-likes,NEWLINE default 'i8', should be a numpy type,NEWLINE see http://docs.scipy.org/doc/numpy/user/basics.types.htmlNEWLINE encode_nominal: bool (default is True)NEWLINE whether convert categorical data into numeric factors - requiredNEWLINE for some scikit classifiers that can't handle non-numericNEWLINE input features.NEWLINE load_sparse: boolean (default is False)NEWLINE whether to read arff file as a sparse file format, liac-arffNEWLINE breaks if sparse reading is enabled for non-sparse ARFFs.NEWLINE return_attribute_definitions: boolean (default is False)NEWLINE whether to return the definitions for each attribute in theNEWLINE datasetNEWLINE ReturnsNEWLINE -------NEWLINE X : scipy.sparseNEWLINE matrix with :code:`input_feature_type` elementsNEWLINE y: scipy.sparseNEWLINE matrix of binary label indicator matrixNEWLINE """NEWLINE matrix = NoneNEWLINENEWLINE if not load_sparse:NEWLINE arff_frame = arff.load(open(filename, 'r'),NEWLINE encode_nominal=encode_nominal,NEWLINE return_type=arff.DENSE)NEWLINE try:NEWLINE matrix = sparse.csr_matrix(NEWLINE arff_frame['data'], dtype=input_feature_type)NEWLINE except:NEWLINE print(arff_frame['data'])NEWLINE else:NEWLINE arff_frame = arff.load(open(filename, 'r'),NEWLINE encode_nominal=encode_nominal,NEWLINE return_type=arff.COO)NEWLINE data = arff_frame['data'][0]NEWLINE row = arff_frame['data'][1]NEWLINE col = arff_frame['data'][2]NEWLINE matrix = sparse.coo_matrix((data, (row, col)),NEWLINE shape=(max(row) + 1, max(col) + 1))NEWLINENEWLINE X, y = None, NoneNEWLINENEWLINE if endian == "big":NEWLINE X, y = matrix.tocsc()[:, labelcount:].tolil(), matrix.tocsc()[NEWLINE :, :labelcount].astype(int).tolil()NEWLINE elif endian == "little":NEWLINE X, y = matrix.tocsc()[NEWLINE :, :-labelcount].tolil(), matrix.tocsc()[:, -labelcount:].astype(int).tolil()NEWLINE else:NEWLINE # unknown endianNEWLINE return NoneNEWLINENEWLINE if return_attribute_definitions:NEWLINE return X, y, arff_frame['attributes']NEWLINE else:NEWLINE return X, yNEWLINENEWLINENEWLINEdef load_data(name):NEWLINE if name == 'scene':NEWLINE # src: MULANNEWLINE return load_from_arff('../data/scene/scene.arff',NEWLINE labelcount=6, endian="little")NEWLINE elif name == 'emotions':NEWLINE return load_from_arff('../data/emotions/emotions.arff',NEWLINE labelcount=6, endian="little")NEWLINE elif name == 'yeast-10':NEWLINE return load_from_arff('../data/yeast/yeast.arff',NEWLINE labelcount=14, endian="little")NEWLINE elif name == 'mediamill-10':NEWLINE return load_from_arff('../data/mediamill/mediamill.arff',NEWLINE labelcount=101, endian="little")NEWLINE elif name == 'enron-10':NEWLINE return load_from_arff('../data/enron/enron.arff',NEWLINE labelcount=53, endian="little")NEWLINE elif name == 'medical-10':NEWLINE return load_from_arff('../data/medical/medical.arff',NEWLINE labelcount=44, endian="little")NEWLINE elif name == 'slashdot-10':NEWLINE return load_from_arff('../data/slashdot/SLASHDOT-F.arff',NEWLINE labelcount=22)NEWLINE elif name == 'ohsumed-10':NEWLINE return load_from_arff('../data/ohsumed/OHSUMED-F.arff',NEWLINE labelcount=23),NEWLINE elif name == 'tmc2007-500-10':NEWLINE return load_from_arff('../data/tmc2007-500/tmc2007-500.arff',NEWLINE labelcount=22, endian="little")NEWLINE elif name == 'imdb-10':NEWLINE # head ../data/imdb/IMDB-F.arff -n 40 | grep "{0,1}" | uniq | wc -lNEWLINE return load_from_arff('../data/imdb/IMDB-F.arff',NEWLINE labelcount=28)NEWLINE else:NEWLINE raise ValueError("No such ../data set: {}".format(name))NEWLINENEWLINE |
"""Test whether all elements of cls.args are instances of Basic. """NEWLINENEWLINE# NOTE: keep tests sorted by (module, class name) key. If a class can'tNEWLINE# be instantiated, add it here anyway with @SKIP("abstract class) (seeNEWLINE# e.g. Function).NEWLINENEWLINEimport osNEWLINEimport reNEWLINEimport warningsNEWLINEimport ioNEWLINENEWLINEfrom sympy import (Basic, S, symbols, sqrt, sin, oo, Interval, exp, Lambda, pi,NEWLINE Eq, log)NEWLINENEWLINEfrom sympy.core.compatibility import rangeNEWLINEfrom sympy.utilities.pytest import XFAIL, SKIPNEWLINEfrom sympy.utilities.exceptions import SymPyDeprecationWarningNEWLINENEWLINEx, y, z = symbols('x,y,z')NEWLINENEWLINENEWLINEdef test_all_classes_are_tested():NEWLINE this = os.path.split(__file__)[0]NEWLINE path = os.path.join(this, os.pardir, os.pardir)NEWLINE sympy_path = os.path.abspath(path)NEWLINE prefix = os.path.split(sympy_path)[0] + os.sepNEWLINENEWLINE re_cls = re.compile(r"^class ([A-Za-z][A-Za-z0-9_]*)\s*\(", re.MULTILINE)NEWLINENEWLINE modules = {}NEWLINENEWLINE for root, dirs, files in os.walk(sympy_path):NEWLINE module = root.replace(prefix, "").replace(os.sep, ".")NEWLINENEWLINE for file in files:NEWLINE if file.startswith(("_", "test_", "bench_")):NEWLINE continueNEWLINE if not file.endswith(".py"):NEWLINE continueNEWLINENEWLINE with io.open(os.path.join(root, file), "r", encoding='utf-8') as f:NEWLINE text = f.read()NEWLINENEWLINE submodule = module + '.' + file[:-3]NEWLINE names = re_cls.findall(text)NEWLINENEWLINE if not names:NEWLINE continueNEWLINENEWLINE try:NEWLINE mod = __import__(submodule, fromlist=names)NEWLINE except ImportError:NEWLINE continueNEWLINENEWLINE def is_Basic(name):NEWLINE cls = getattr(mod, name)NEWLINE if hasattr(cls, '_sympy_deprecated_func'):NEWLINE cls = cls._sympy_deprecated_funcNEWLINE return issubclass(cls, Basic)NEWLINENEWLINE names = list(filter(is_Basic, names))NEWLINENEWLINE if names:NEWLINE modules[submodule] = namesNEWLINENEWLINE ns = globals()NEWLINE failed = []NEWLINENEWLINE for module, names in modules.items():NEWLINE mod = module.replace('.', '__')NEWLINENEWLINE for name in names:NEWLINE test = 'test_' + mod + '__' + nameNEWLINENEWLINE if test not in ns:NEWLINE failed.append(module + '.' + name)NEWLINENEWLINE # reset all SymPyDeprecationWarning into errorsNEWLINE warnings.simplefilter("error", category=SymPyDeprecationWarning)NEWLINENEWLINE assert not failed, "Missing classes: %s. Please add tests for these to sympy/core/tests/test_args.py." % ", ".join(failed)NEWLINENEWLINENEWLINEdef _test_args(obj):NEWLINE return all(isinstance(arg, Basic) for arg in obj.args)NEWLINENEWLINENEWLINEdef test_sympy__assumptions__assume__AppliedPredicate():NEWLINE from sympy.assumptions.assume import AppliedPredicate, PredicateNEWLINE assert _test_args(AppliedPredicate(Predicate("test"), 2))NEWLINENEWLINEdef test_sympy__assumptions__assume__Predicate():NEWLINE from sympy.assumptions.assume import PredicateNEWLINE assert _test_args(Predicate("test"))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__UnevaluatedOnFree():NEWLINE from sympy.assumptions.sathandlers import UnevaluatedOnFreeNEWLINE from sympy import QNEWLINE assert _test_args(UnevaluatedOnFree(Q.positive))NEWLINE assert _test_args(UnevaluatedOnFree(Q.positive(x)))NEWLINE assert _test_args(UnevaluatedOnFree(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__AllArgs():NEWLINE from sympy.assumptions.sathandlers import AllArgsNEWLINE from sympy import QNEWLINE assert _test_args(AllArgs(Q.positive))NEWLINE assert _test_args(AllArgs(Q.positive(x)))NEWLINE assert _test_args(AllArgs(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__AnyArgs():NEWLINE from sympy.assumptions.sathandlers import AnyArgsNEWLINE from sympy import QNEWLINE assert _test_args(AnyArgs(Q.positive))NEWLINE assert _test_args(AnyArgs(Q.positive(x)))NEWLINE assert _test_args(AnyArgs(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__ExactlyOneArg():NEWLINE from sympy.assumptions.sathandlers import ExactlyOneArgNEWLINE from sympy import QNEWLINE assert _test_args(ExactlyOneArg(Q.positive))NEWLINE assert _test_args(ExactlyOneArg(Q.positive(x)))NEWLINE assert _test_args(ExactlyOneArg(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__CheckOldAssump():NEWLINE from sympy.assumptions.sathandlers import CheckOldAssumpNEWLINE from sympy import QNEWLINE assert _test_args(CheckOldAssump(Q.positive))NEWLINE assert _test_args(CheckOldAssump(Q.positive(x)))NEWLINE assert _test_args(CheckOldAssump(Q.positive(x*y)))NEWLINENEWLINEdef test_sympy__assumptions__sathandlers__CheckIsPrime():NEWLINE from sympy.assumptions.sathandlers import CheckIsPrimeNEWLINE from sympy import QNEWLINE # Input must be a numberNEWLINE assert _test_args(CheckIsPrime(Q.positive))NEWLINE assert _test_args(CheckIsPrime(Q.positive(5)))NEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__codegen__ast__AugmentedAssignment():NEWLINE from sympy.codegen.ast import AugmentedAssignmentNEWLINE assert _test_args(AugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__AddAugmentedAssignment():NEWLINE from sympy.codegen.ast import AddAugmentedAssignmentNEWLINE assert _test_args(AddAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__SubAugmentedAssignment():NEWLINE from sympy.codegen.ast import SubAugmentedAssignmentNEWLINE assert _test_args(SubAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__MulAugmentedAssignment():NEWLINE from sympy.codegen.ast import MulAugmentedAssignmentNEWLINE assert _test_args(MulAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__DivAugmentedAssignment():NEWLINE from sympy.codegen.ast import DivAugmentedAssignmentNEWLINE assert _test_args(DivAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__ModAugmentedAssignment():NEWLINE from sympy.codegen.ast import ModAugmentedAssignmentNEWLINE assert _test_args(ModAugmentedAssignment(x, 1))NEWLINENEWLINEdef test_sympy__codegen__ast__CodeBlock():NEWLINE from sympy.codegen.ast import CodeBlock, AssignmentNEWLINE assert _test_args(CodeBlock(Assignment(x, 1), Assignment(y, 2)))NEWLINENEWLINEdef test_sympy__codegen__ast__For():NEWLINE from sympy.codegen.ast import For, CodeBlock, AddAugmentedAssignmentNEWLINE from sympy import RangeNEWLINE assert _test_args(For(x, Range(10), CodeBlock(AddAugmentedAssignment(y, 1))))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Token():NEWLINE from sympy.codegen.ast import TokenNEWLINE assert _test_args(Token())NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Type():NEWLINE from sympy.codegen.ast import TypeNEWLINE assert _test_args(Type('float128'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__IntBaseType():NEWLINE from sympy.codegen.ast import IntBaseTypeNEWLINE assert _test_args(IntBaseType('bigint'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast___SizedIntType():NEWLINE from sympy.codegen.ast import _SizedIntTypeNEWLINE assert _test_args(_SizedIntType('int128', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__SignedIntType():NEWLINE from sympy.codegen.ast import SignedIntTypeNEWLINE assert _test_args(SignedIntType('int128_with_sign', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__UnsignedIntType():NEWLINE from sympy.codegen.ast import UnsignedIntTypeNEWLINE assert _test_args(UnsignedIntType('unt128', 128))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__FloatType():NEWLINE from sympy.codegen.ast import FloatTypeNEWLINE assert _test_args(FloatType('float242', 242, nmant=142, nexp=99))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__ComplexType():NEWLINE from sympy.codegen.ast import ComplexTypeNEWLINE assert _test_args(ComplexType('complex42', 42, nmant=15, nexp=5))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Attribute():NEWLINE from sympy.codegen.ast import AttributeNEWLINE assert _test_args(Attribute('noexcept'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Variable():NEWLINE from sympy.codegen.ast import Variable, Type, value_constNEWLINE assert _test_args(Variable(x))NEWLINE assert _test_args(Variable(y, {value_const}, Type('float32')))NEWLINE assert _test_args(Variable(z, type_=Type('float64')))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Pointer():NEWLINE from sympy.codegen.ast import Pointer, Type, pointer_constNEWLINE assert _test_args(Pointer(x))NEWLINE assert _test_args(Pointer(y, type_=Type('float32')))NEWLINE assert _test_args(Pointer(z, {pointer_const}, Type('float64')))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Declaration():NEWLINE from sympy.codegen.ast import Declaration, Variable, TypeNEWLINE vx = Variable(x, type_=Type('float'))NEWLINE assert _test_args(Declaration(vx))NEWLINE assert _test_args(Declaration(vx, 3.0))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__graycode__GrayCode():NEWLINE from sympy.combinatorics.graycode import GrayCodeNEWLINE # an integer is given and returned from GrayCode as the argNEWLINE assert _test_args(GrayCode(3, start='100'))NEWLINE assert _test_args(GrayCode(3, rank=1))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__subsets__Subset():NEWLINE from sympy.combinatorics.subsets import SubsetNEWLINE assert _test_args(Subset([0, 1], [0, 1, 2, 3]))NEWLINE assert _test_args(Subset(['c', 'd'], ['a', 'b', 'c', 'd']))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__permutations__Permutation():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE assert _test_args(Permutation([0, 1, 2, 3]))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__perm_groups__PermutationGroup():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE from sympy.combinatorics.perm_groups import PermutationGroupNEWLINE assert _test_args(PermutationGroup([Permutation([0, 1])]))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__polyhedron__Polyhedron():NEWLINE from sympy.combinatorics.permutations import PermutationNEWLINE from sympy.combinatorics.polyhedron import PolyhedronNEWLINE from sympy.abc import w, x, y, zNEWLINE pgroup = [Permutation([[0, 1, 2], [3]]),NEWLINE Permutation([[0, 1, 3], [2]]),NEWLINE Permutation([[0, 2, 3], [1]]),NEWLINE Permutation([[1, 2, 3], [0]]),NEWLINE Permutation([[0, 1], [2, 3]]),NEWLINE Permutation([[0, 2], [1, 3]]),NEWLINE Permutation([[0, 3], [1, 2]]),NEWLINE Permutation([[0, 1, 2, 3]])]NEWLINE corners = [w, x, y, z]NEWLINE faces = [(w, x, y), (w, y, z), (w, z, x), (x, y, z)]NEWLINE assert _test_args(Polyhedron(corners, faces, pgroup))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__prufer__Prufer():NEWLINE from sympy.combinatorics.prufer import PruferNEWLINE assert _test_args(Prufer([[0, 1], [0, 2], [0, 3]], 4))NEWLINENEWLINENEWLINEdef test_sympy__combinatorics__partitions__Partition():NEWLINE from sympy.combinatorics.partitions import PartitionNEWLINE assert _test_args(Partition([1]))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__combinatorics__partitions__IntegerPartition():NEWLINE from sympy.combinatorics.partitions import IntegerPartitionNEWLINE assert _test_args(IntegerPartition([1]))NEWLINENEWLINENEWLINEdef test_sympy__concrete__products__Product():NEWLINE from sympy.concrete.products import ProductNEWLINE assert _test_args(Product(x, (x, 0, 10)))NEWLINE assert _test_args(Product(x, (x, 0, y), (y, 0, 10)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_limits__ExprWithLimits():NEWLINE from sympy.concrete.expr_with_limits import ExprWithLimitsNEWLINE assert _test_args(ExprWithLimits(x, (x, 0, 10)))NEWLINE assert _test_args(ExprWithLimits(x*y, (x, 0, 10.),(y,1.,3)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_limits__AddWithLimits():NEWLINE from sympy.concrete.expr_with_limits import AddWithLimitsNEWLINE assert _test_args(AddWithLimits(x, (x, 0, 10)))NEWLINE assert _test_args(AddWithLimits(x*y, (x, 0, 10),(y,1,3)))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__concrete__expr_with_intlimits__ExprWithIntLimits():NEWLINE from sympy.concrete.expr_with_intlimits import ExprWithIntLimitsNEWLINE assert _test_args(ExprWithIntLimits(x, (x, 0, 10)))NEWLINE assert _test_args(ExprWithIntLimits(x*y, (x, 0, 10),(y,1,3)))NEWLINENEWLINENEWLINEdef test_sympy__concrete__summations__Sum():NEWLINE from sympy.concrete.summations import SumNEWLINE assert _test_args(Sum(x, (x, 0, 10)))NEWLINE assert _test_args(Sum(x, (x, 0, y), (y, 0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__core__add__Add():NEWLINE from sympy.core.add import AddNEWLINE assert _test_args(Add(x, y, z, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__basic__Atom():NEWLINE from sympy.core.basic import AtomNEWLINE assert _test_args(Atom())NEWLINENEWLINENEWLINEdef test_sympy__core__basic__Basic():NEWLINE from sympy.core.basic import BasicNEWLINE assert _test_args(Basic())NEWLINENEWLINENEWLINEdef test_sympy__core__containers__Dict():NEWLINE from sympy.core.containers import DictNEWLINE assert _test_args(Dict({x: y, y: z}))NEWLINENEWLINENEWLINEdef test_sympy__core__containers__Tuple():NEWLINE from sympy.core.containers import TupleNEWLINE assert _test_args(Tuple(x, y, z, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__expr__AtomicExpr():NEWLINE from sympy.core.expr import AtomicExprNEWLINE assert _test_args(AtomicExpr())NEWLINENEWLINENEWLINEdef test_sympy__core__expr__Expr():NEWLINE from sympy.core.expr import ExprNEWLINE assert _test_args(Expr())NEWLINENEWLINENEWLINEdef test_sympy__core__expr__UnevaluatedExpr():NEWLINE from sympy.core.expr import UnevaluatedExprNEWLINE from sympy.abc import xNEWLINE assert _test_args(UnevaluatedExpr(x))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Application():NEWLINE from sympy.core.function import ApplicationNEWLINE assert _test_args(Application(1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__function__AppliedUndef():NEWLINE from sympy.core.function import AppliedUndefNEWLINE assert _test_args(AppliedUndef(1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Derivative():NEWLINE from sympy.core.function import DerivativeNEWLINE assert _test_args(Derivative(2, x, y, 3))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__function__Function():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__function__Lambda():NEWLINE assert _test_args(Lambda((x, y), x + y + z))NEWLINENEWLINENEWLINEdef test_sympy__core__function__Subs():NEWLINE from sympy.core.function import SubsNEWLINE assert _test_args(Subs(x + y, x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__function__WildFunction():NEWLINE from sympy.core.function import WildFunctionNEWLINE assert _test_args(WildFunction('f'))NEWLINENEWLINENEWLINEdef test_sympy__core__mod__Mod():NEWLINE from sympy.core.mod import ModNEWLINE assert _test_args(Mod(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__mul__Mul():NEWLINE from sympy.core.mul import MulNEWLINE assert _test_args(Mul(2, x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Catalan():NEWLINE from sympy.core.numbers import CatalanNEWLINE assert _test_args(Catalan())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__ComplexInfinity():NEWLINE from sympy.core.numbers import ComplexInfinityNEWLINE assert _test_args(ComplexInfinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__EulerGamma():NEWLINE from sympy.core.numbers import EulerGammaNEWLINE assert _test_args(EulerGamma())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Exp1():NEWLINE from sympy.core.numbers import Exp1NEWLINE assert _test_args(Exp1())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Float():NEWLINE from sympy.core.numbers import FloatNEWLINE assert _test_args(Float(1.23))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__GoldenRatio():NEWLINE from sympy.core.numbers import GoldenRatioNEWLINE assert _test_args(GoldenRatio())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Half():NEWLINE from sympy.core.numbers import HalfNEWLINE assert _test_args(Half())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__ImaginaryUnit():NEWLINE from sympy.core.numbers import ImaginaryUnitNEWLINE assert _test_args(ImaginaryUnit())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Infinity():NEWLINE from sympy.core.numbers import InfinityNEWLINE assert _test_args(Infinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Integer():NEWLINE from sympy.core.numbers import IntegerNEWLINE assert _test_args(Integer(7))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__numbers__IntegerConstant():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NaN():NEWLINE from sympy.core.numbers import NaNNEWLINE assert _test_args(NaN())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NegativeInfinity():NEWLINE from sympy.core.numbers import NegativeInfinityNEWLINE assert _test_args(NegativeInfinity())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NegativeOne():NEWLINE from sympy.core.numbers import NegativeOneNEWLINE assert _test_args(NegativeOne())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Number():NEWLINE from sympy.core.numbers import NumberNEWLINE assert _test_args(Number(1, 7))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__NumberSymbol():NEWLINE from sympy.core.numbers import NumberSymbolNEWLINE assert _test_args(NumberSymbol())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__One():NEWLINE from sympy.core.numbers import OneNEWLINE assert _test_args(One())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Pi():NEWLINE from sympy.core.numbers import PiNEWLINE assert _test_args(Pi())NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Rational():NEWLINE from sympy.core.numbers import RationalNEWLINE assert _test_args(Rational(1, 7))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__numbers__RationalConstant():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__numbers__Zero():NEWLINE from sympy.core.numbers import ZeroNEWLINE assert _test_args(Zero())NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__operations__AssocOp():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__operations__LatticeOp():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__power__Pow():NEWLINE from sympy.core.power import PowNEWLINE assert _test_args(Pow(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__algebras__quaternion__Quaternion():NEWLINE from sympy.algebras.quaternion import QuaternionNEWLINE assert _test_args(Quaternion(x, 1, 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__Equality():NEWLINE from sympy.core.relational import EqualityNEWLINE assert _test_args(Equality(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__GreaterThan():NEWLINE from sympy.core.relational import GreaterThanNEWLINE assert _test_args(GreaterThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__LessThan():NEWLINE from sympy.core.relational import LessThanNEWLINE assert _test_args(LessThan(x, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__core__relational__Relational():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__core__relational__StrictGreaterThan():NEWLINE from sympy.core.relational import StrictGreaterThanNEWLINE assert _test_args(StrictGreaterThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__StrictLessThan():NEWLINE from sympy.core.relational import StrictLessThanNEWLINE assert _test_args(StrictLessThan(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__core__relational__Unequality():NEWLINE from sympy.core.relational import UnequalityNEWLINE assert _test_args(Unequality(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__sandbox__indexed_integrals__IndexedIntegral():NEWLINE from sympy.tensor import IndexedBase, IdxNEWLINE from sympy.sandbox.indexed_integrals import IndexedIntegralNEWLINE A = IndexedBase('A')NEWLINE i, j = symbols('i j', integer=True)NEWLINE a1, a2 = symbols('a1:3', cls=Idx)NEWLINE assert _test_args(IndexedIntegral(A[a1], A[a2]))NEWLINE assert _test_args(IndexedIntegral(A[i], A[j]))NEWLINENEWLINENEWLINEdef test_sympy__calculus__util__AccumulationBounds():NEWLINE from sympy.calculus.util import AccumulationBoundsNEWLINE assert _test_args(AccumulationBounds(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__EmptySet():NEWLINE from sympy.sets.sets import EmptySetNEWLINE assert _test_args(EmptySet())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__UniversalSet():NEWLINE from sympy.sets.sets import UniversalSetNEWLINE assert _test_args(UniversalSet())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__FiniteSet():NEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(FiniteSet(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Interval():NEWLINE from sympy.sets.sets import IntervalNEWLINE assert _test_args(Interval(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__ProductSet():NEWLINE from sympy.sets.sets import ProductSet, IntervalNEWLINE assert _test_args(ProductSet(Interval(0, 1), Interval(0, 1)))NEWLINENEWLINENEWLINE@SKIP("does it make sense to test this?")NEWLINEdef test_sympy__sets__sets__Set():NEWLINE from sympy.sets.sets import SetNEWLINE assert _test_args(Set())NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Intersection():NEWLINE from sympy.sets.sets import Intersection, IntervalNEWLINE assert _test_args(Intersection(Interval(0, 3), Interval(2, 4),NEWLINE evaluate=False))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Union():NEWLINE from sympy.sets.sets import Union, IntervalNEWLINE assert _test_args(Union(Interval(0, 1), Interval(2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__Complement():NEWLINE from sympy.sets.sets import ComplementNEWLINE assert _test_args(Complement(Interval(0, 2), Interval(0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__sets__sets__SymmetricDifference():NEWLINE from sympy.sets.sets import FiniteSet, SymmetricDifferenceNEWLINE assert _test_args(SymmetricDifference(FiniteSet(1, 2, 3), \NEWLINE FiniteSet(2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__core__trace__Tr():NEWLINE from sympy.core.trace import TrNEWLINE a, b = symbols('a b')NEWLINE assert _test_args(Tr(a + b))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Naturals():NEWLINE from sympy.sets.fancysets import NaturalsNEWLINE assert _test_args(Naturals())NEWLINENEWLINEdef test_sympy__sets__fancysets__Naturals0():NEWLINE from sympy.sets.fancysets import Naturals0NEWLINE assert _test_args(Naturals0())NEWLINENEWLINEdef test_sympy__sets__fancysets__Integers():NEWLINE from sympy.sets.fancysets import IntegersNEWLINE assert _test_args(Integers())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Reals():NEWLINE from sympy.sets.fancysets import RealsNEWLINE assert _test_args(Reals())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Complexes():NEWLINE from sympy.sets.fancysets import ComplexesNEWLINE assert _test_args(Complexes())NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__ComplexRegion():NEWLINE from sympy.sets.fancysets import ComplexRegionNEWLINE from sympy import SNEWLINE from sympy.sets import IntervalNEWLINE a = Interval(0, 1)NEWLINE b = Interval(2, 3)NEWLINE theta = Interval(0, 2*S.Pi)NEWLINE assert _test_args(ComplexRegion(a*b))NEWLINE assert _test_args(ComplexRegion(a*theta, polar=True))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__ImageSet():NEWLINE from sympy.sets.fancysets import ImageSetNEWLINE from sympy import S, SymbolNEWLINE x = Symbol('x')NEWLINE assert _test_args(ImageSet(Lambda(x, x**2), S.Naturals))NEWLINENEWLINENEWLINEdef test_sympy__sets__fancysets__Range():NEWLINE from sympy.sets.fancysets import RangeNEWLINE assert _test_args(Range(1, 5, 1))NEWLINENEWLINENEWLINEdef test_sympy__sets__conditionset__ConditionSet():NEWLINE from sympy.sets.conditionset import ConditionSetNEWLINE from sympy import S, SymbolNEWLINE x = Symbol('x')NEWLINE assert _test_args(ConditionSet(x, Eq(x**2, 1), S.Reals))NEWLINENEWLINENEWLINEdef test_sympy__sets__contains__Contains():NEWLINE from sympy.sets.fancysets import RangeNEWLINE from sympy.sets.contains import ContainsNEWLINE assert _test_args(Contains(x, Range(0, 10, 2)))NEWLINENEWLINENEWLINE# STATSNEWLINENEWLINENEWLINEfrom sympy.stats.crv_types import NormalDistributionNEWLINEnd = NormalDistribution(0, 1)NEWLINEfrom sympy.stats.frv_types import DieDistributionNEWLINEdie = DieDistribution(6)NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousDomain():NEWLINE from sympy.stats.crv import ContinuousDomainNEWLINE assert _test_args(ContinuousDomain({x}, Interval(-oo, oo)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__SingleContinuousDomain():NEWLINE from sympy.stats.crv import SingleContinuousDomainNEWLINE assert _test_args(SingleContinuousDomain(x, Interval(-oo, oo)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ProductContinuousDomain():NEWLINE from sympy.stats.crv import SingleContinuousDomain, ProductContinuousDomainNEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE E = SingleContinuousDomain(y, Interval(0, oo))NEWLINE assert _test_args(ProductContinuousDomain(D, E))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ConditionalContinuousDomain():NEWLINE from sympy.stats.crv import (SingleContinuousDomain,NEWLINE ConditionalContinuousDomain)NEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE assert _test_args(ConditionalContinuousDomain(D, x > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousPSpace():NEWLINE from sympy.stats.crv import ContinuousPSpace, SingleContinuousDomainNEWLINE D = SingleContinuousDomain(x, Interval(-oo, oo))NEWLINE assert _test_args(ContinuousPSpace(D, nd))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__SingleContinuousPSpace():NEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE assert _test_args(SingleContinuousPSpace(x, nd))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ProductContinuousPSpace():NEWLINE from sympy.stats.crv import ProductContinuousPSpace, SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE B = SingleContinuousPSpace(y, nd)NEWLINE assert _test_args(ProductContinuousPSpace(A, B))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__crv__SingleContinuousDistribution():NEWLINE passNEWLINENEWLINEdef test_sympy__stats__drv__SingleDiscreteDomain():NEWLINE from sympy.stats.drv import SingleDiscreteDomainNEWLINE assert _test_args(SingleDiscreteDomain(x, S.Naturals))NEWLINENEWLINEdef test_sympy__stats__drv__SingleDiscretePSpace():NEWLINE from sympy.stats.drv import SingleDiscretePSpaceNEWLINE from sympy.stats.drv_types import PoissonDistributionNEWLINE assert _test_args(SingleDiscretePSpace(x, PoissonDistribution(1)))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__drv__SingleDiscreteDistribution():NEWLINE passNEWLINENEWLINEdef test_sympy__stats__rv__RandomDomain():NEWLINE from sympy.stats.rv import RandomDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__SingleDomain():NEWLINE from sympy.stats.rv import SingleDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE assert _test_args(SingleDomain(x, FiniteSet(1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ConditionalDomain():NEWLINE from sympy.stats.rv import ConditionalDomain, RandomDomainNEWLINE from sympy.sets.sets import FiniteSetNEWLINE D = RandomDomain(FiniteSet(x), FiniteSet(1, 2))NEWLINE assert _test_args(ConditionalDomain(D, x > 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__PSpace():NEWLINE from sympy.stats.rv import PSpace, RandomDomainNEWLINE from sympy import FiniteSetNEWLINE D = RandomDomain(FiniteSet(x), FiniteSet(1, 2, 3, 4, 5, 6))NEWLINE assert _test_args(PSpace(D, die))NEWLINENEWLINENEWLINE@SKIP("abstract Class")NEWLINEdef test_sympy__stats__rv__SinglePSpace():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__stats__rv__RandomSymbol():NEWLINE from sympy.stats.rv import RandomSymbolNEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE assert _test_args(RandomSymbol(x, A))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ProductPSpace():NEWLINE from sympy.stats.rv import ProductPSpaceNEWLINE from sympy.stats.crv import SingleContinuousPSpaceNEWLINE A = SingleContinuousPSpace(x, nd)NEWLINE B = SingleContinuousPSpace(y, nd)NEWLINE assert _test_args(ProductPSpace(A, B))NEWLINENEWLINENEWLINEdef test_sympy__stats__rv__ProductDomain():NEWLINE from sympy.stats.rv import ProductDomain, SingleDomainNEWLINE D = SingleDomain(x, Interval(-oo, oo))NEWLINE E = SingleDomain(y, Interval(0, oo))NEWLINE assert _test_args(ProductDomain(D, E))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Probability():NEWLINE from sympy.stats.symbolic_probability import ProbabilityNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Probability(X > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Expectation():NEWLINE from sympy.stats.symbolic_probability import ExpectationNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Expectation(X > 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Covariance():NEWLINE from sympy.stats.symbolic_probability import CovarianceNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE Y = Normal('Y', 0, 3)NEWLINE assert _test_args(Covariance(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__stats__symbolic_probability__Variance():NEWLINE from sympy.stats.symbolic_probability import VarianceNEWLINE from sympy.stats import NormalNEWLINE X = Normal('X', 0, 1)NEWLINE assert _test_args(Variance(X))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__DiscreteUniformDistribution():NEWLINE from sympy.stats.frv_types import DiscreteUniformDistributionNEWLINE from sympy.core.containers import TupleNEWLINE assert _test_args(DiscreteUniformDistribution(Tuple(*list(range(6)))))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__DieDistribution():NEWLINE from sympy.stats.frv_types import DieDistributionNEWLINE assert _test_args(DieDistribution(6))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__BernoulliDistribution():NEWLINE from sympy.stats.frv_types import BernoulliDistributionNEWLINE assert _test_args(BernoulliDistribution(S.Half, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__BinomialDistribution():NEWLINE from sympy.stats.frv_types import BinomialDistributionNEWLINE assert _test_args(BinomialDistribution(5, S.Half, 1, 0))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__HypergeometricDistribution():NEWLINE from sympy.stats.frv_types import HypergeometricDistributionNEWLINE assert _test_args(HypergeometricDistribution(10, 5, 3))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__RademacherDistribution():NEWLINE from sympy.stats.frv_types import RademacherDistributionNEWLINE assert _test_args(RademacherDistribution())NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__FiniteDomain():NEWLINE from sympy.stats.frv import FiniteDomainNEWLINE assert _test_args(FiniteDomain({(x, 1), (x, 2)})) # x can be 1 or 2NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__SingleFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomainNEWLINE assert _test_args(SingleFiniteDomain(x, {1, 2})) # x can be 1 or 2NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ProductFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomain, ProductFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE yd = SingleFiniteDomain(y, {1, 2})NEWLINE assert _test_args(ProductFiniteDomain(xd, yd))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ConditionalFiniteDomain():NEWLINE from sympy.stats.frv import SingleFiniteDomain, ConditionalFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE assert _test_args(ConditionalFiniteDomain(xd, x > 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__FinitePSpace():NEWLINE from sympy.stats.frv import FinitePSpace, SingleFiniteDomainNEWLINE xd = SingleFiniteDomain(x, {1, 2, 3, 4, 5, 6})NEWLINE p = 1.0/6NEWLINE xd = SingleFiniteDomain(x, {1, 2})NEWLINE assert _test_args(FinitePSpace(xd, {(x, 1): S.Half, (x, 2): S.Half}))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__SingleFinitePSpace():NEWLINE from sympy.stats.frv import SingleFinitePSpaceNEWLINE from sympy import SymbolNEWLINENEWLINE assert _test_args(SingleFinitePSpace(Symbol('x'), die))NEWLINENEWLINENEWLINEdef test_sympy__stats__frv__ProductFinitePSpace():NEWLINE from sympy.stats.frv import SingleFinitePSpace, ProductFinitePSpaceNEWLINE from sympy import SymbolNEWLINE xp = SingleFinitePSpace(Symbol('x'), die)NEWLINE yp = SingleFinitePSpace(Symbol('y'), die)NEWLINE assert _test_args(ProductFinitePSpace(xp, yp))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__frv__SingleFiniteDistribution():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__stats__crv__ContinuousDistribution():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__stats__frv_types__FiniteDistributionHandmade():NEWLINE from sympy.stats.frv_types import FiniteDistributionHandmadeNEWLINE assert _test_args(FiniteDistributionHandmade({1: 1}))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv__ContinuousDistributionHandmade():NEWLINE from sympy.stats.crv import ContinuousDistributionHandmadeNEWLINE from sympy import Symbol, IntervalNEWLINE assert _test_args(ContinuousDistributionHandmade(Symbol('x'),NEWLINE Interval(0, 2)))NEWLINENEWLINEdef test_sympy__stats__rv__Density():NEWLINE from sympy.stats.rv import DensityNEWLINE from sympy.stats.crv_types import NormalNEWLINE assert _test_args(Density(Normal('x', 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ArcsinDistribution():NEWLINE from sympy.stats.crv_types import ArcsinDistributionNEWLINE assert _test_args(ArcsinDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BeniniDistribution():NEWLINE from sympy.stats.crv_types import BeniniDistributionNEWLINE assert _test_args(BeniniDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BetaDistribution():NEWLINE from sympy.stats.crv_types import BetaDistributionNEWLINE assert _test_args(BetaDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__BetaPrimeDistribution():NEWLINE from sympy.stats.crv_types import BetaPrimeDistributionNEWLINE assert _test_args(BetaPrimeDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__CauchyDistribution():NEWLINE from sympy.stats.crv_types import CauchyDistributionNEWLINE assert _test_args(CauchyDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiDistribution():NEWLINE from sympy.stats.crv_types import ChiDistributionNEWLINE assert _test_args(ChiDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiNoncentralDistribution():NEWLINE from sympy.stats.crv_types import ChiNoncentralDistributionNEWLINE assert _test_args(ChiNoncentralDistribution(1,1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ChiSquaredDistribution():NEWLINE from sympy.stats.crv_types import ChiSquaredDistributionNEWLINE assert _test_args(ChiSquaredDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__DagumDistribution():NEWLINE from sympy.stats.crv_types import DagumDistributionNEWLINE assert _test_args(DagumDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ExponentialDistribution():NEWLINE from sympy.stats.crv_types import ExponentialDistributionNEWLINE assert _test_args(ExponentialDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FDistributionDistribution():NEWLINE from sympy.stats.crv_types import FDistributionDistributionNEWLINE assert _test_args(FDistributionDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FisherZDistribution():NEWLINE from sympy.stats.crv_types import FisherZDistributionNEWLINE assert _test_args(FisherZDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__FrechetDistribution():NEWLINE from sympy.stats.crv_types import FrechetDistributionNEWLINE assert _test_args(FrechetDistribution(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__GammaInverseDistribution():NEWLINE from sympy.stats.crv_types import GammaInverseDistributionNEWLINE assert _test_args(GammaInverseDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__GammaDistribution():NEWLINE from sympy.stats.crv_types import GammaDistributionNEWLINE assert _test_args(GammaDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__GumbelDistribution():NEWLINE from sympy.stats.crv_types import GumbelDistributionNEWLINE assert _test_args(GumbelDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__GompertzDistribution():NEWLINE from sympy.stats.crv_types import GompertzDistributionNEWLINE assert _test_args(GompertzDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__KumaraswamyDistribution():NEWLINE from sympy.stats.crv_types import KumaraswamyDistributionNEWLINE assert _test_args(KumaraswamyDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__LaplaceDistribution():NEWLINE from sympy.stats.crv_types import LaplaceDistributionNEWLINE assert _test_args(LaplaceDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__LogisticDistribution():NEWLINE from sympy.stats.crv_types import LogisticDistributionNEWLINE assert _test_args(LogisticDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__LogNormalDistribution():NEWLINE from sympy.stats.crv_types import LogNormalDistributionNEWLINE assert _test_args(LogNormalDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__MaxwellDistribution():NEWLINE from sympy.stats.crv_types import MaxwellDistributionNEWLINE assert _test_args(MaxwellDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__NakagamiDistribution():NEWLINE from sympy.stats.crv_types import NakagamiDistributionNEWLINE assert _test_args(NakagamiDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__NormalDistribution():NEWLINE from sympy.stats.crv_types import NormalDistributionNEWLINE assert _test_args(NormalDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__ParetoDistribution():NEWLINE from sympy.stats.crv_types import ParetoDistributionNEWLINE assert _test_args(ParetoDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__QuadraticUDistribution():NEWLINE from sympy.stats.crv_types import QuadraticUDistributionNEWLINE assert _test_args(QuadraticUDistribution(1, 2))NEWLINENEWLINEdef test_sympy__stats__crv_types__RaisedCosineDistribution():NEWLINE from sympy.stats.crv_types import RaisedCosineDistributionNEWLINE assert _test_args(RaisedCosineDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__RayleighDistribution():NEWLINE from sympy.stats.crv_types import RayleighDistributionNEWLINE assert _test_args(RayleighDistribution(1))NEWLINENEWLINEdef test_sympy__stats__crv_types__ShiftedGompertzDistribution():NEWLINE from sympy.stats.crv_types import ShiftedGompertzDistributionNEWLINE assert _test_args(ShiftedGompertzDistribution(1, 1))NEWLINENEWLINEdef test_sympy__stats__crv_types__StudentTDistribution():NEWLINE from sympy.stats.crv_types import StudentTDistributionNEWLINE assert _test_args(StudentTDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__TriangularDistribution():NEWLINE from sympy.stats.crv_types import TriangularDistributionNEWLINE assert _test_args(TriangularDistribution(-1, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__UniformDistribution():NEWLINE from sympy.stats.crv_types import UniformDistributionNEWLINE assert _test_args(UniformDistribution(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__UniformSumDistribution():NEWLINE from sympy.stats.crv_types import UniformSumDistributionNEWLINE assert _test_args(UniformSumDistribution(1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__VonMisesDistribution():NEWLINE from sympy.stats.crv_types import VonMisesDistributionNEWLINE assert _test_args(VonMisesDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__WeibullDistribution():NEWLINE from sympy.stats.crv_types import WeibullDistributionNEWLINE assert _test_args(WeibullDistribution(1, 1))NEWLINENEWLINENEWLINEdef test_sympy__stats__crv_types__WignerSemicircleDistribution():NEWLINE from sympy.stats.crv_types import WignerSemicircleDistributionNEWLINE assert _test_args(WignerSemicircleDistribution(1))NEWLINENEWLINEdef test_sympy__stats__drv_types__PoissonDistribution():NEWLINE from sympy.stats.drv_types import PoissonDistributionNEWLINE assert _test_args(PoissonDistribution(1))NEWLINENEWLINEdef test_sympy__stats__drv_types__GeometricDistribution():NEWLINE from sympy.stats.drv_types import GeometricDistributionNEWLINE assert _test_args(GeometricDistribution(.5))NEWLINENEWLINEdef test_sympy__core__symbol__Dummy():NEWLINE from sympy.core.symbol import DummyNEWLINE assert _test_args(Dummy('t'))NEWLINENEWLINENEWLINEdef test_sympy__core__symbol__Symbol():NEWLINE from sympy.core.symbol import SymbolNEWLINE assert _test_args(Symbol('t'))NEWLINENEWLINENEWLINEdef test_sympy__core__symbol__Wild():NEWLINE from sympy.core.symbol import WildNEWLINE assert _test_args(Wild('x', exclude=[x]))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__combinatorial__factorials__CombinatorialFunction():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__FallingFactorial():NEWLINE from sympy.functions.combinatorial.factorials import FallingFactorialNEWLINE assert _test_args(FallingFactorial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__MultiFactorial():NEWLINE from sympy.functions.combinatorial.factorials import MultiFactorialNEWLINE assert _test_args(MultiFactorial(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__RisingFactorial():NEWLINE from sympy.functions.combinatorial.factorials import RisingFactorialNEWLINE assert _test_args(RisingFactorial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__binomial():NEWLINE from sympy.functions.combinatorial.factorials import binomialNEWLINE assert _test_args(binomial(2, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__subfactorial():NEWLINE from sympy.functions.combinatorial.factorials import subfactorialNEWLINE assert _test_args(subfactorial(1))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__factorial():NEWLINE from sympy.functions.combinatorial.factorials import factorialNEWLINE assert _test_args(factorial(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__factorials__factorial2():NEWLINE from sympy.functions.combinatorial.factorials import factorial2NEWLINE assert _test_args(factorial2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__bell():NEWLINE from sympy.functions.combinatorial.numbers import bellNEWLINE assert _test_args(bell(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__bernoulli():NEWLINE from sympy.functions.combinatorial.numbers import bernoulliNEWLINE assert _test_args(bernoulli(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__catalan():NEWLINE from sympy.functions.combinatorial.numbers import catalanNEWLINE assert _test_args(catalan(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__genocchi():NEWLINE from sympy.functions.combinatorial.numbers import genocchiNEWLINE assert _test_args(genocchi(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__euler():NEWLINE from sympy.functions.combinatorial.numbers import eulerNEWLINE assert _test_args(euler(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__fibonacci():NEWLINE from sympy.functions.combinatorial.numbers import fibonacciNEWLINE assert _test_args(fibonacci(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__harmonic():NEWLINE from sympy.functions.combinatorial.numbers import harmonicNEWLINE assert _test_args(harmonic(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__combinatorial__numbers__lucas():NEWLINE from sympy.functions.combinatorial.numbers import lucasNEWLINE assert _test_args(lucas(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__Abs():NEWLINE from sympy.functions.elementary.complexes import AbsNEWLINE assert _test_args(Abs(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__adjoint():NEWLINE from sympy.functions.elementary.complexes import adjointNEWLINE assert _test_args(adjoint(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__arg():NEWLINE from sympy.functions.elementary.complexes import argNEWLINE assert _test_args(arg(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__conjugate():NEWLINE from sympy.functions.elementary.complexes import conjugateNEWLINE assert _test_args(conjugate(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__im():NEWLINE from sympy.functions.elementary.complexes import imNEWLINE assert _test_args(im(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__re():NEWLINE from sympy.functions.elementary.complexes import reNEWLINE assert _test_args(re(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__sign():NEWLINE from sympy.functions.elementary.complexes import signNEWLINE assert _test_args(sign(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__polar_lift():NEWLINE from sympy.functions.elementary.complexes import polar_liftNEWLINE assert _test_args(polar_lift(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__periodic_argument():NEWLINE from sympy.functions.elementary.complexes import periodic_argumentNEWLINE assert _test_args(periodic_argument(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__principal_branch():NEWLINE from sympy.functions.elementary.complexes import principal_branchNEWLINE assert _test_args(principal_branch(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__complexes__transpose():NEWLINE from sympy.functions.elementary.complexes import transposeNEWLINE assert _test_args(transpose(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__LambertW():NEWLINE from sympy.functions.elementary.exponential import LambertWNEWLINE assert _test_args(LambertW(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__exponential__ExpBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__exp():NEWLINE from sympy.functions.elementary.exponential import expNEWLINE assert _test_args(exp(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__exp_polar():NEWLINE from sympy.functions.elementary.exponential import exp_polarNEWLINE assert _test_args(exp_polar(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__exponential__log():NEWLINE from sympy.functions.elementary.exponential import logNEWLINE assert _test_args(log(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__HyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__ReciprocalHyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__hyperbolic__InverseHyperbolicFunction():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acosh():NEWLINE from sympy.functions.elementary.hyperbolic import acoshNEWLINE assert _test_args(acosh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acoth():NEWLINE from sympy.functions.elementary.hyperbolic import acothNEWLINE assert _test_args(acoth(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__asinh():NEWLINE from sympy.functions.elementary.hyperbolic import asinhNEWLINE assert _test_args(asinh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__atanh():NEWLINE from sympy.functions.elementary.hyperbolic import atanhNEWLINE assert _test_args(atanh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__asech():NEWLINE from sympy.functions.elementary.hyperbolic import asechNEWLINE assert _test_args(asech(2))NEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__acsch():NEWLINE from sympy.functions.elementary.hyperbolic import acschNEWLINE assert _test_args(acsch(2))NEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__cosh():NEWLINE from sympy.functions.elementary.hyperbolic import coshNEWLINE assert _test_args(cosh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__coth():NEWLINE from sympy.functions.elementary.hyperbolic import cothNEWLINE assert _test_args(coth(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__csch():NEWLINE from sympy.functions.elementary.hyperbolic import cschNEWLINE assert _test_args(csch(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__sech():NEWLINE from sympy.functions.elementary.hyperbolic import sechNEWLINE assert _test_args(sech(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__sinh():NEWLINE from sympy.functions.elementary.hyperbolic import sinhNEWLINE assert _test_args(sinh(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__hyperbolic__tanh():NEWLINE from sympy.functions.elementary.hyperbolic import tanhNEWLINE assert _test_args(tanh(2))NEWLINENEWLINENEWLINE@SKIP("does this work at all?")NEWLINEdef test_sympy__functions__elementary__integers__RoundFunction():NEWLINE from sympy.functions.elementary.integers import RoundFunctionNEWLINE assert _test_args(RoundFunction())NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__ceiling():NEWLINE from sympy.functions.elementary.integers import ceilingNEWLINE assert _test_args(ceiling(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__floor():NEWLINE from sympy.functions.elementary.integers import floorNEWLINE assert _test_args(floor(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__integers__frac():NEWLINE from sympy.functions.elementary.integers import fracNEWLINE assert _test_args(frac(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__IdentityFunction():NEWLINE from sympy.functions.elementary.miscellaneous import IdentityFunctionNEWLINE assert _test_args(IdentityFunction())NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__Max():NEWLINE from sympy.functions.elementary.miscellaneous import MaxNEWLINE assert _test_args(Max(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__miscellaneous__Min():NEWLINE from sympy.functions.elementary.miscellaneous import MinNEWLINE assert _test_args(Min(x, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__miscellaneous__MinMaxBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__piecewise__ExprCondPair():NEWLINE from sympy.functions.elementary.piecewise import ExprCondPairNEWLINE assert _test_args(ExprCondPair(1, True))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__piecewise__Piecewise():NEWLINE from sympy.functions.elementary.piecewise import PiecewiseNEWLINE assert _test_args(Piecewise((1, x >= 0), (0, True)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__TrigonometricFunction():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__ReciprocalTrigonometricFunction():NEWLINE passNEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__elementary__trigonometric__InverseTrigonometricFunction():NEWLINE passNEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acos():NEWLINE from sympy.functions.elementary.trigonometric import acosNEWLINE assert _test_args(acos(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acot():NEWLINE from sympy.functions.elementary.trigonometric import acotNEWLINE assert _test_args(acot(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__asin():NEWLINE from sympy.functions.elementary.trigonometric import asinNEWLINE assert _test_args(asin(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__asec():NEWLINE from sympy.functions.elementary.trigonometric import asecNEWLINE assert _test_args(asec(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__acsc():NEWLINE from sympy.functions.elementary.trigonometric import acscNEWLINE assert _test_args(acsc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__atan():NEWLINE from sympy.functions.elementary.trigonometric import atanNEWLINE assert _test_args(atan(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__atan2():NEWLINE from sympy.functions.elementary.trigonometric import atan2NEWLINE assert _test_args(atan2(2, 3))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__cos():NEWLINE from sympy.functions.elementary.trigonometric import cosNEWLINE assert _test_args(cos(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__csc():NEWLINE from sympy.functions.elementary.trigonometric import cscNEWLINE assert _test_args(csc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__cot():NEWLINE from sympy.functions.elementary.trigonometric import cotNEWLINE assert _test_args(cot(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sin():NEWLINE assert _test_args(sin(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sinc():NEWLINE from sympy.functions.elementary.trigonometric import sincNEWLINE assert _test_args(sinc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__sec():NEWLINE from sympy.functions.elementary.trigonometric import secNEWLINE assert _test_args(sec(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__elementary__trigonometric__tan():NEWLINE from sympy.functions.elementary.trigonometric import tanNEWLINE assert _test_args(tan(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__BesselBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__SphericalBesselBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__bessel__SphericalHankelBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besseli():NEWLINE from sympy.functions.special.bessel import besseliNEWLINE assert _test_args(besseli(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besselj():NEWLINE from sympy.functions.special.bessel import besseljNEWLINE assert _test_args(besselj(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__besselk():NEWLINE from sympy.functions.special.bessel import besselkNEWLINE assert _test_args(besselk(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__bessely():NEWLINE from sympy.functions.special.bessel import besselyNEWLINE assert _test_args(bessely(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hankel1():NEWLINE from sympy.functions.special.bessel import hankel1NEWLINE assert _test_args(hankel1(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hankel2():NEWLINE from sympy.functions.special.bessel import hankel2NEWLINE assert _test_args(hankel2(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__jn():NEWLINE from sympy.functions.special.bessel import jnNEWLINE assert _test_args(jn(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__yn():NEWLINE from sympy.functions.special.bessel import ynNEWLINE assert _test_args(yn(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hn1():NEWLINE from sympy.functions.special.bessel import hn1NEWLINE assert _test_args(hn1(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__hn2():NEWLINE from sympy.functions.special.bessel import hn2NEWLINE assert _test_args(hn2(0, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__AiryBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airyai():NEWLINE from sympy.functions.special.bessel import airyaiNEWLINE assert _test_args(airyai(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airybi():NEWLINE from sympy.functions.special.bessel import airybiNEWLINE assert _test_args(airybi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airyaiprime():NEWLINE from sympy.functions.special.bessel import airyaiprimeNEWLINE assert _test_args(airyaiprime(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__bessel__airybiprime():NEWLINE from sympy.functions.special.bessel import airybiprimeNEWLINE assert _test_args(airybiprime(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_k():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_k as KNEWLINE assert _test_args(K(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_f():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_f as FNEWLINE assert _test_args(F(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_e():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_e as ENEWLINE assert _test_args(E(x))NEWLINE assert _test_args(E(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__elliptic_integrals__elliptic_pi():NEWLINE from sympy.functions.special.elliptic_integrals import elliptic_pi as PNEWLINE assert _test_args(P(x, y))NEWLINE assert _test_args(P(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__delta_functions__DiracDelta():NEWLINE from sympy.functions.special.delta_functions import DiracDeltaNEWLINE assert _test_args(DiracDelta(x, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__singularity_functions__SingularityFunction():NEWLINE from sympy.functions.special.singularity_functions import SingularityFunctionNEWLINE assert _test_args(SingularityFunction(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__delta_functions__Heaviside():NEWLINE from sympy.functions.special.delta_functions import HeavisideNEWLINE assert _test_args(Heaviside(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf():NEWLINE from sympy.functions.special.error_functions import erfNEWLINE assert _test_args(erf(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfc():NEWLINE from sympy.functions.special.error_functions import erfcNEWLINE assert _test_args(erfc(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfi():NEWLINE from sympy.functions.special.error_functions import erfiNEWLINE assert _test_args(erfi(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf2():NEWLINE from sympy.functions.special.error_functions import erf2NEWLINE assert _test_args(erf2(2, 3))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfinv():NEWLINE from sympy.functions.special.error_functions import erfinvNEWLINE assert _test_args(erfinv(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfcinv():NEWLINE from sympy.functions.special.error_functions import erfcinvNEWLINE assert _test_args(erfcinv(2))NEWLINENEWLINEdef test_sympy__functions__special__error_functions__erf2inv():NEWLINE from sympy.functions.special.error_functions import erf2invNEWLINE assert _test_args(erf2inv(2, 3))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__error_functions__FresnelIntegral():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__fresnels():NEWLINE from sympy.functions.special.error_functions import fresnelsNEWLINE assert _test_args(fresnels(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__fresnelc():NEWLINE from sympy.functions.special.error_functions import fresnelcNEWLINE assert _test_args(fresnelc(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__erfs():NEWLINE from sympy.functions.special.error_functions import _erfsNEWLINE assert _test_args(_erfs(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Ei():NEWLINE from sympy.functions.special.error_functions import EiNEWLINE assert _test_args(Ei(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__li():NEWLINE from sympy.functions.special.error_functions import liNEWLINE assert _test_args(li(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Li():NEWLINE from sympy.functions.special.error_functions import LiNEWLINE assert _test_args(Li(2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__error_functions__TrigonometricIntegral():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Si():NEWLINE from sympy.functions.special.error_functions import SiNEWLINE assert _test_args(Si(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Ci():NEWLINE from sympy.functions.special.error_functions import CiNEWLINE assert _test_args(Ci(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Shi():NEWLINE from sympy.functions.special.error_functions import ShiNEWLINE assert _test_args(Shi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__Chi():NEWLINE from sympy.functions.special.error_functions import ChiNEWLINE assert _test_args(Chi(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__error_functions__expint():NEWLINE from sympy.functions.special.error_functions import expintNEWLINE assert _test_args(expint(y, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__gamma():NEWLINE from sympy.functions.special.gamma_functions import gammaNEWLINE assert _test_args(gamma(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__loggamma():NEWLINE from sympy.functions.special.gamma_functions import loggammaNEWLINE assert _test_args(loggamma(2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__lowergamma():NEWLINE from sympy.functions.special.gamma_functions import lowergammaNEWLINE assert _test_args(lowergamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__polygamma():NEWLINE from sympy.functions.special.gamma_functions import polygammaNEWLINE assert _test_args(polygamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__gamma_functions__uppergamma():NEWLINE from sympy.functions.special.gamma_functions import uppergammaNEWLINE assert _test_args(uppergamma(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__beta_functions__beta():NEWLINE from sympy.functions.special.beta_functions import betaNEWLINE assert _test_args(beta(x, x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__MathieuBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieus():NEWLINE from sympy.functions.special.mathieu_functions import mathieusNEWLINE assert _test_args(mathieus(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieuc():NEWLINE from sympy.functions.special.mathieu_functions import mathieucNEWLINE assert _test_args(mathieuc(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieusprime():NEWLINE from sympy.functions.special.mathieu_functions import mathieusprimeNEWLINE assert _test_args(mathieusprime(1, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__mathieu_functions__mathieucprime():NEWLINE from sympy.functions.special.mathieu_functions import mathieucprimeNEWLINE assert _test_args(mathieucprime(1, 1, 1))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__TupleParametersBase():NEWLINE passNEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__TupleArg():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__hyper():NEWLINE from sympy.functions.special.hyper import hyperNEWLINE assert _test_args(hyper([1, 2, 3], [4, 5], x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__meijerg():NEWLINE from sympy.functions.special.hyper import meijergNEWLINE assert _test_args(meijerg([1, 2, 3], [4, 5], [6], [], x))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__hyper__HyperRep():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_power1():NEWLINE from sympy.functions.special.hyper import HyperRep_power1NEWLINE assert _test_args(HyperRep_power1(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_power2():NEWLINE from sympy.functions.special.hyper import HyperRep_power2NEWLINE assert _test_args(HyperRep_power2(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_log1():NEWLINE from sympy.functions.special.hyper import HyperRep_log1NEWLINE assert _test_args(HyperRep_log1(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_atanh():NEWLINE from sympy.functions.special.hyper import HyperRep_atanhNEWLINE assert _test_args(HyperRep_atanh(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_asin1():NEWLINE from sympy.functions.special.hyper import HyperRep_asin1NEWLINE assert _test_args(HyperRep_asin1(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_asin2():NEWLINE from sympy.functions.special.hyper import HyperRep_asin2NEWLINE assert _test_args(HyperRep_asin2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sqrts1():NEWLINE from sympy.functions.special.hyper import HyperRep_sqrts1NEWLINE assert _test_args(HyperRep_sqrts1(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sqrts2():NEWLINE from sympy.functions.special.hyper import HyperRep_sqrts2NEWLINE assert _test_args(HyperRep_sqrts2(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_log2():NEWLINE from sympy.functions.special.hyper import HyperRep_log2NEWLINE assert _test_args(HyperRep_log2(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_cosasin():NEWLINE from sympy.functions.special.hyper import HyperRep_cosasinNEWLINE assert _test_args(HyperRep_cosasin(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__hyper__HyperRep_sinasin():NEWLINE from sympy.functions.special.hyper import HyperRep_sinasinNEWLINE assert _test_args(HyperRep_sinasin(x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__functions__special__polynomials__OrthogonalPolynomial():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__jacobi():NEWLINE from sympy.functions.special.polynomials import jacobiNEWLINE assert _test_args(jacobi(x, 2, 2, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__gegenbauer():NEWLINE from sympy.functions.special.polynomials import gegenbauerNEWLINE assert _test_args(gegenbauer(x, 2, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevt():NEWLINE from sympy.functions.special.polynomials import chebyshevtNEWLINE assert _test_args(chebyshevt(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevt_root():NEWLINE from sympy.functions.special.polynomials import chebyshevt_rootNEWLINE assert _test_args(chebyshevt_root(3, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevu():NEWLINE from sympy.functions.special.polynomials import chebyshevuNEWLINE assert _test_args(chebyshevu(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__chebyshevu_root():NEWLINE from sympy.functions.special.polynomials import chebyshevu_rootNEWLINE assert _test_args(chebyshevu_root(3, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__hermite():NEWLINE from sympy.functions.special.polynomials import hermiteNEWLINE assert _test_args(hermite(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__legendre():NEWLINE from sympy.functions.special.polynomials import legendreNEWLINE assert _test_args(legendre(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__assoc_legendre():NEWLINE from sympy.functions.special.polynomials import assoc_legendreNEWLINE assert _test_args(assoc_legendre(x, 0, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__laguerre():NEWLINE from sympy.functions.special.polynomials import laguerreNEWLINE assert _test_args(laguerre(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__polynomials__assoc_laguerre():NEWLINE from sympy.functions.special.polynomials import assoc_laguerreNEWLINE assert _test_args(assoc_laguerre(x, 0, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__spherical_harmonics__Ynm():NEWLINE from sympy.functions.special.spherical_harmonics import YnmNEWLINE assert _test_args(Ynm(1, 1, x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__spherical_harmonics__Znm():NEWLINE from sympy.functions.special.spherical_harmonics import ZnmNEWLINE assert _test_args(Znm(1, 1, x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__tensor_functions__LeviCivita():NEWLINE from sympy.functions.special.tensor_functions import LeviCivitaNEWLINE assert _test_args(LeviCivita(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__tensor_functions__KroneckerDelta():NEWLINE from sympy.functions.special.tensor_functions import KroneckerDeltaNEWLINE assert _test_args(KroneckerDelta(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__dirichlet_eta():NEWLINE from sympy.functions.special.zeta_functions import dirichlet_etaNEWLINE assert _test_args(dirichlet_eta(x))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__zeta():NEWLINE from sympy.functions.special.zeta_functions import zetaNEWLINE assert _test_args(zeta(101))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__lerchphi():NEWLINE from sympy.functions.special.zeta_functions import lerchphiNEWLINE assert _test_args(lerchphi(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__polylog():NEWLINE from sympy.functions.special.zeta_functions import polylogNEWLINE assert _test_args(polylog(x, y))NEWLINENEWLINENEWLINEdef test_sympy__functions__special__zeta_functions__stieltjes():NEWLINE from sympy.functions.special.zeta_functions import stieltjesNEWLINE assert _test_args(stieltjes(x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__integrals__Integral():NEWLINE from sympy.integrals.integrals import IntegralNEWLINE assert _test_args(Integral(2, (x, 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__integrals__risch__NonElementaryIntegral():NEWLINE from sympy.integrals.risch import NonElementaryIntegralNEWLINE assert _test_args(NonElementaryIntegral(exp(-x**2), x))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__IntegralTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__MellinTransform():NEWLINE from sympy.integrals.transforms import MellinTransformNEWLINE assert _test_args(MellinTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseMellinTransform():NEWLINE from sympy.integrals.transforms import InverseMellinTransformNEWLINE assert _test_args(InverseMellinTransform(2, x, y, 0, 1))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__LaplaceTransform():NEWLINE from sympy.integrals.transforms import LaplaceTransformNEWLINE assert _test_args(LaplaceTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseLaplaceTransform():NEWLINE from sympy.integrals.transforms import InverseLaplaceTransformNEWLINE assert _test_args(InverseLaplaceTransform(2, x, y, 0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__FourierTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseFourierTransform():NEWLINE from sympy.integrals.transforms import InverseFourierTransformNEWLINE assert _test_args(InverseFourierTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__FourierTransform():NEWLINE from sympy.integrals.transforms import FourierTransformNEWLINE assert _test_args(FourierTransform(2, x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__SineCosineTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseSineTransform():NEWLINE from sympy.integrals.transforms import InverseSineTransformNEWLINE assert _test_args(InverseSineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__SineTransform():NEWLINE from sympy.integrals.transforms import SineTransformNEWLINE assert _test_args(SineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseCosineTransform():NEWLINE from sympy.integrals.transforms import InverseCosineTransformNEWLINE assert _test_args(InverseCosineTransform(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__CosineTransform():NEWLINE from sympy.integrals.transforms import CosineTransformNEWLINE assert _test_args(CosineTransform(2, x, y))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__integrals__transforms__HankelTypeTransform():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__InverseHankelTransform():NEWLINE from sympy.integrals.transforms import InverseHankelTransformNEWLINE assert _test_args(InverseHankelTransform(2, x, y, 0))NEWLINENEWLINENEWLINEdef test_sympy__integrals__transforms__HankelTransform():NEWLINE from sympy.integrals.transforms import HankelTransformNEWLINE assert _test_args(HankelTransform(2, x, y, 0))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__cartan_type__CartanType_generator():NEWLINE from sympy.liealgebras.cartan_type import CartanType_generatorNEWLINE assert _test_args(CartanType_generator("A2"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__cartan_type__Standard_Cartan():NEWLINE from sympy.liealgebras.cartan_type import Standard_CartanNEWLINE assert _test_args(Standard_Cartan("A", 2))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__weyl_group__WeylGroup():NEWLINE from sympy.liealgebras.weyl_group import WeylGroupNEWLINE assert _test_args(WeylGroup("B4"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__root_system__RootSystem():NEWLINE from sympy.liealgebras.root_system import RootSystemNEWLINE assert _test_args(RootSystem("A2"))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_a__TypeA():NEWLINE from sympy.liealgebras.type_a import TypeANEWLINE assert _test_args(TypeA(2))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_b__TypeB():NEWLINE from sympy.liealgebras.type_b import TypeBNEWLINE assert _test_args(TypeB(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_c__TypeC():NEWLINE from sympy.liealgebras.type_c import TypeCNEWLINE assert _test_args(TypeC(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_d__TypeD():NEWLINE from sympy.liealgebras.type_d import TypeDNEWLINE assert _test_args(TypeD(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_e__TypeE():NEWLINE from sympy.liealgebras.type_e import TypeENEWLINE assert _test_args(TypeE(6))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_f__TypeF():NEWLINE from sympy.liealgebras.type_f import TypeFNEWLINE assert _test_args(TypeF(4))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__liealgebras__type_g__TypeG():NEWLINE from sympy.liealgebras.type_g import TypeGNEWLINE assert _test_args(TypeG(2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__And():NEWLINE from sympy.logic.boolalg import AndNEWLINE assert _test_args(And(x, y, 2))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__logic__boolalg__Boolean():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanFunction():NEWLINE from sympy.logic.boolalg import BooleanFunctionNEWLINE assert _test_args(BooleanFunction(1, 2, 3))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__logic__boolalg__BooleanAtom():NEWLINE passNEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanTrue():NEWLINE from sympy.logic.boolalg import trueNEWLINE assert _test_args(true)NEWLINENEWLINEdef test_sympy__logic__boolalg__BooleanFalse():NEWLINE from sympy.logic.boolalg import falseNEWLINE assert _test_args(false)NEWLINENEWLINEdef test_sympy__logic__boolalg__Equivalent():NEWLINE from sympy.logic.boolalg import EquivalentNEWLINE assert _test_args(Equivalent(x, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__ITE():NEWLINE from sympy.logic.boolalg import ITENEWLINE assert _test_args(ITE(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Implies():NEWLINE from sympy.logic.boolalg import ImpliesNEWLINE assert _test_args(Implies(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Nand():NEWLINE from sympy.logic.boolalg import NandNEWLINE assert _test_args(Nand(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Nor():NEWLINE from sympy.logic.boolalg import NorNEWLINE assert _test_args(Nor(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Not():NEWLINE from sympy.logic.boolalg import NotNEWLINE assert _test_args(Not(x))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Or():NEWLINE from sympy.logic.boolalg import OrNEWLINE assert _test_args(Or(x, y))NEWLINENEWLINENEWLINEdef test_sympy__logic__boolalg__Xor():NEWLINE from sympy.logic.boolalg import XorNEWLINE assert _test_args(Xor(x, y, 2))NEWLINENEWLINEdef test_sympy__logic__boolalg__Xnor():NEWLINE from sympy.logic.boolalg import XnorNEWLINE assert _test_args(Xnor(x, y, 2))NEWLINENEWLINENEWLINEdef test_sympy__matrices__matrices__DeferredVector():NEWLINE from sympy.matrices.matrices import DeferredVectorNEWLINE assert _test_args(DeferredVector("X"))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__matrices__immutable__ImmutableDenseMatrix():NEWLINE from sympy.matrices.immutable import ImmutableDenseMatrixNEWLINE m = ImmutableDenseMatrix([[1, 2], [3, 4]])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableDenseMatrix(1, 1, [1])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableDenseMatrix(2, 2, lambda i, j: 1)NEWLINE assert m[0, 0] is S.OneNEWLINE m = ImmutableDenseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))NEWLINE assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympifiedNEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__immutable__ImmutableSparseMatrix():NEWLINE from sympy.matrices.immutable import ImmutableSparseMatrixNEWLINE m = ImmutableSparseMatrix([[1, 2], [3, 4]])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(1, 1, {(0, 0): 1})NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(1, 1, [1])NEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINE m = ImmutableSparseMatrix(2, 2, lambda i, j: 1)NEWLINE assert m[0, 0] is S.OneNEWLINE m = ImmutableSparseMatrix(2, 2, lambda i, j: 1/(1 + i) + 1/(1 + j))NEWLINE assert m[1, 1] is S.One # true div. will give 1.0 if i,j not sympifiedNEWLINE assert _test_args(m)NEWLINE assert _test_args(Basic(*list(m)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__slice__MatrixSlice():NEWLINE from sympy.matrices.expressions.slice import MatrixSliceNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', 4, 4)NEWLINE assert _test_args(MatrixSlice(X, (0, 2), (0, 2)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__blockmatrix__BlockDiagMatrix():NEWLINE from sympy.matrices.expressions.blockmatrix import BlockDiagMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, x)NEWLINE Y = MatrixSymbol('Y', y, y)NEWLINE assert _test_args(BlockDiagMatrix(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__blockmatrix__BlockMatrix():NEWLINE from sympy.matrices.expressions.blockmatrix import BlockMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbol, ZeroMatrixNEWLINE X = MatrixSymbol('X', x, x)NEWLINE Y = MatrixSymbol('Y', y, y)NEWLINE Z = MatrixSymbol('Z', x, y)NEWLINE O = ZeroMatrix(y, x)NEWLINE assert _test_args(BlockMatrix([[X, Z], [O, Y]]))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__inverse__Inverse():NEWLINE from sympy.matrices.expressions.inverse import InverseNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Inverse(MatrixSymbol('A', 3, 3)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matadd__MatAdd():NEWLINE from sympy.matrices.expressions.matadd import MatAddNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', x, y)NEWLINE assert _test_args(MatAdd(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__Identity():NEWLINE from sympy.matrices.expressions.matexpr import IdentityNEWLINE assert _test_args(Identity(3))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixExpr():NEWLINE passNEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixElement():NEWLINE from sympy.matrices.expressions.matexpr import MatrixSymbol, MatrixElementNEWLINE from sympy import SNEWLINE assert _test_args(MatrixElement(MatrixSymbol('A', 3, 5), S(2), S(3)))NEWLINENEWLINE@XFAILNEWLINEdef test_sympy__matrices__expressions__matexpr__MatrixSymbol():NEWLINE from sympy.matrices.expressions.matexpr import MatrixSymbolNEWLINE assert _test_args(MatrixSymbol('A', 3, 5))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matexpr__ZeroMatrix():NEWLINE from sympy.matrices.expressions.matexpr import ZeroMatrixNEWLINE assert _test_args(ZeroMatrix(3, 5))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matmul__MatMul():NEWLINE from sympy.matrices.expressions.matmul import MatMulNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', y, x)NEWLINE assert _test_args(MatMul(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__dotproduct__DotProduct():NEWLINE from sympy.matrices.expressions.dotproduct import DotProductNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, 1)NEWLINE Y = MatrixSymbol('Y', x, 1)NEWLINE assert _test_args(DotProduct(X, Y))NEWLINENEWLINEdef test_sympy__matrices__expressions__diagonal__DiagonalMatrix():NEWLINE from sympy.matrices.expressions.diagonal import DiagonalMatrixNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE x = MatrixSymbol('x', 10, 1)NEWLINE assert _test_args(DiagonalMatrix(x))NEWLINENEWLINEdef test_sympy__matrices__expressions__diagonal__DiagonalOf():NEWLINE from sympy.matrices.expressions.diagonal import DiagonalOfNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('x', 10, 10)NEWLINE assert _test_args(DiagonalOf(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__hadamard__HadamardProduct():NEWLINE from sympy.matrices.expressions.hadamard import HadamardProductNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, y)NEWLINE Y = MatrixSymbol('Y', x, y)NEWLINE assert _test_args(HadamardProduct(X, Y))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__matpow__MatPow():NEWLINE from sympy.matrices.expressions.matpow import MatPowNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE X = MatrixSymbol('X', x, x)NEWLINE assert _test_args(MatPow(X, 2))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__transpose__Transpose():NEWLINE from sympy.matrices.expressions.transpose import TransposeNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Transpose(MatrixSymbol('A', 3, 5)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__adjoint__Adjoint():NEWLINE from sympy.matrices.expressions.adjoint import AdjointNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Adjoint(MatrixSymbol('A', 3, 5)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__trace__Trace():NEWLINE from sympy.matrices.expressions.trace import TraceNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Trace(MatrixSymbol('A', 3, 3)))NEWLINENEWLINEdef test_sympy__matrices__expressions__determinant__Determinant():NEWLINE from sympy.matrices.expressions.determinant import DeterminantNEWLINE from sympy.matrices.expressions import MatrixSymbolNEWLINE assert _test_args(Determinant(MatrixSymbol('A', 3, 3)))NEWLINENEWLINENEWLINEdef test_sympy__matrices__expressions__funcmatrix__FunctionMatrix():NEWLINE from sympy.matrices.expressions.funcmatrix import FunctionMatrixNEWLINE from sympy import symbolsNEWLINE i, j = symbols('i,j')NEWLINE assert _test_args(FunctionMatrix(3, 3, Lambda((i, j), i - j) ))NEWLINENEWLINEdef test_sympy__matrices__expressions__fourier__DFT():NEWLINE from sympy.matrices.expressions.fourier import DFTNEWLINE from sympy import SNEWLINE assert _test_args(DFT(S(2)))NEWLINENEWLINEdef test_sympy__matrices__expressions__fourier__IDFT():NEWLINE from sympy.matrices.expressions.fourier import IDFTNEWLINE from sympy import SNEWLINE assert _test_args(IDFT(S(2)))NEWLINENEWLINEfrom sympy.matrices.expressions import MatrixSymbolNEWLINEX = MatrixSymbol('X', 10, 10)NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__LofLU():NEWLINE from sympy.matrices.expressions.factorizations import LofLUNEWLINE assert _test_args(LofLU(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofLU():NEWLINE from sympy.matrices.expressions.factorizations import UofLUNEWLINE assert _test_args(UofLU(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__QofQR():NEWLINE from sympy.matrices.expressions.factorizations import QofQRNEWLINE assert _test_args(QofQR(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__RofQR():NEWLINE from sympy.matrices.expressions.factorizations import RofQRNEWLINE assert _test_args(RofQR(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__LofCholesky():NEWLINE from sympy.matrices.expressions.factorizations import LofCholeskyNEWLINE assert _test_args(LofCholesky(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofCholesky():NEWLINE from sympy.matrices.expressions.factorizations import UofCholeskyNEWLINE assert _test_args(UofCholesky(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__EigenVectors():NEWLINE from sympy.matrices.expressions.factorizations import EigenVectorsNEWLINE assert _test_args(EigenVectors(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__EigenValues():NEWLINE from sympy.matrices.expressions.factorizations import EigenValuesNEWLINE assert _test_args(EigenValues(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__UofSVD():NEWLINE from sympy.matrices.expressions.factorizations import UofSVDNEWLINE assert _test_args(UofSVD(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__VofSVD():NEWLINE from sympy.matrices.expressions.factorizations import VofSVDNEWLINE assert _test_args(VofSVD(X))NEWLINENEWLINEdef test_sympy__matrices__expressions__factorizations__SofSVD():NEWLINE from sympy.matrices.expressions.factorizations import SofSVDNEWLINE assert _test_args(SofSVD(X))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__matrices__expressions__factorizations__Factorization():NEWLINE passNEWLINENEWLINEdef test_sympy__physics__vector__frame__CoordinateSym():NEWLINE from sympy.physics.vector import CoordinateSymNEWLINE from sympy.physics.vector import ReferenceFrameNEWLINE assert _test_args(CoordinateSym('R_x', ReferenceFrame('R'), 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__paulialgebra__Pauli():NEWLINE from sympy.physics.paulialgebra import PauliNEWLINE assert _test_args(Pauli(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__anticommutator__AntiCommutator():NEWLINE from sympy.physics.quantum.anticommutator import AntiCommutatorNEWLINE assert _test_args(AntiCommutator(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionBra3D():NEWLINE from sympy.physics.quantum.cartesian import PositionBra3DNEWLINE assert _test_args(PositionBra3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionKet3D():NEWLINE from sympy.physics.quantum.cartesian import PositionKet3DNEWLINE assert _test_args(PositionKet3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PositionState3D():NEWLINE from sympy.physics.quantum.cartesian import PositionState3DNEWLINE assert _test_args(PositionState3D(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxBra():NEWLINE from sympy.physics.quantum.cartesian import PxBraNEWLINE assert _test_args(PxBra(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxKet():NEWLINE from sympy.physics.quantum.cartesian import PxKetNEWLINE assert _test_args(PxKet(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__PxOp():NEWLINE from sympy.physics.quantum.cartesian import PxOpNEWLINE assert _test_args(PxOp(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XBra():NEWLINE from sympy.physics.quantum.cartesian import XBraNEWLINE assert _test_args(XBra(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XKet():NEWLINE from sympy.physics.quantum.cartesian import XKetNEWLINE assert _test_args(XKet(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__XOp():NEWLINE from sympy.physics.quantum.cartesian import XOpNEWLINE assert _test_args(XOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__YOp():NEWLINE from sympy.physics.quantum.cartesian import YOpNEWLINE assert _test_args(YOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cartesian__ZOp():NEWLINE from sympy.physics.quantum.cartesian import ZOpNEWLINE assert _test_args(ZOp(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__CG():NEWLINE from sympy.physics.quantum.cg import CGNEWLINE from sympy import SNEWLINE assert _test_args(CG(S(3)/2, S(3)/2, S(1)/2, -S(1)/2, 1, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner3j():NEWLINE from sympy.physics.quantum.cg import Wigner3jNEWLINE assert _test_args(Wigner3j(6, 0, 4, 0, 2, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner6j():NEWLINE from sympy.physics.quantum.cg import Wigner6jNEWLINE assert _test_args(Wigner6j(1, 2, 3, 2, 1, 2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__cg__Wigner9j():NEWLINE from sympy.physics.quantum.cg import Wigner9jNEWLINE assert _test_args(Wigner9j(2, 1, 1, S(3)/2, S(1)/2, 1, S(1)/2, S(1)/2, 0))NEWLINENEWLINEdef test_sympy__physics__quantum__circuitplot__Mz():NEWLINE from sympy.physics.quantum.circuitplot import MzNEWLINE assert _test_args(Mz(0))NEWLINENEWLINEdef test_sympy__physics__quantum__circuitplot__Mx():NEWLINE from sympy.physics.quantum.circuitplot import MxNEWLINE assert _test_args(Mx(0))NEWLINENEWLINEdef test_sympy__physics__quantum__commutator__Commutator():NEWLINE from sympy.physics.quantum.commutator import CommutatorNEWLINE A, B = symbols('A,B', commutative=False)NEWLINE assert _test_args(Commutator(A, B))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__constants__HBar():NEWLINE from sympy.physics.quantum.constants import HBarNEWLINE assert _test_args(HBar())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__dagger__Dagger():NEWLINE from sympy.physics.quantum.dagger import DaggerNEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Dagger(Dagger(Ket('psi'))))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CGate():NEWLINE from sympy.physics.quantum.gate import CGate, GateNEWLINE assert _test_args(CGate((0, 1), Gate(2)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CGateS():NEWLINE from sympy.physics.quantum.gate import CGateS, GateNEWLINE assert _test_args(CGateS((0, 1), Gate(2)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__CNotGate():NEWLINE from sympy.physics.quantum.gate import CNotGateNEWLINE assert _test_args(CNotGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__Gate():NEWLINE from sympy.physics.quantum.gate import GateNEWLINE assert _test_args(Gate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__HadamardGate():NEWLINE from sympy.physics.quantum.gate import HadamardGateNEWLINE assert _test_args(HadamardGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__IdentityGate():NEWLINE from sympy.physics.quantum.gate import IdentityGateNEWLINE assert _test_args(IdentityGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__OneQubitGate():NEWLINE from sympy.physics.quantum.gate import OneQubitGateNEWLINE assert _test_args(OneQubitGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__PhaseGate():NEWLINE from sympy.physics.quantum.gate import PhaseGateNEWLINE assert _test_args(PhaseGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__SwapGate():NEWLINE from sympy.physics.quantum.gate import SwapGateNEWLINE assert _test_args(SwapGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__TGate():NEWLINE from sympy.physics.quantum.gate import TGateNEWLINE assert _test_args(TGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__TwoQubitGate():NEWLINE from sympy.physics.quantum.gate import TwoQubitGateNEWLINE assert _test_args(TwoQubitGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__UGate():NEWLINE from sympy.physics.quantum.gate import UGateNEWLINE from sympy.matrices.immutable import ImmutableDenseMatrixNEWLINE from sympy import Integer, TupleNEWLINE assert _test_args(NEWLINE UGate(Tuple(Integer(1)), ImmutableDenseMatrix([[1, 0], [0, 2]])))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__XGate():NEWLINE from sympy.physics.quantum.gate import XGateNEWLINE assert _test_args(XGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__YGate():NEWLINE from sympy.physics.quantum.gate import YGateNEWLINE assert _test_args(YGate(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__gate__ZGate():NEWLINE from sympy.physics.quantum.gate import ZGateNEWLINE assert _test_args(ZGate(0))NEWLINENEWLINENEWLINE@SKIP("TODO: sympy.physics")NEWLINEdef test_sympy__physics__quantum__grover__OracleGate():NEWLINE from sympy.physics.quantum.grover import OracleGateNEWLINE assert _test_args(OracleGate())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__grover__WGate():NEWLINE from sympy.physics.quantum.grover import WGateNEWLINE assert _test_args(WGate(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__ComplexSpace():NEWLINE from sympy.physics.quantum.hilbert import ComplexSpaceNEWLINE assert _test_args(ComplexSpace(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__DirectSumHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import DirectSumHilbertSpace, ComplexSpace, FockSpaceNEWLINE c = ComplexSpace(2)NEWLINE f = FockSpace()NEWLINE assert _test_args(DirectSumHilbertSpace(c, f))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__FockSpace():NEWLINE from sympy.physics.quantum.hilbert import FockSpaceNEWLINE assert _test_args(FockSpace())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__HilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import HilbertSpaceNEWLINE assert _test_args(HilbertSpace())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__L2():NEWLINE from sympy.physics.quantum.hilbert import L2NEWLINE from sympy import oo, IntervalNEWLINE assert _test_args(L2(Interval(0, oo)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__TensorPowerHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import TensorPowerHilbertSpace, FockSpaceNEWLINE f = FockSpace()NEWLINE assert _test_args(TensorPowerHilbertSpace(f, 2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__hilbert__TensorProductHilbertSpace():NEWLINE from sympy.physics.quantum.hilbert import TensorProductHilbertSpace, FockSpace, ComplexSpaceNEWLINE c = ComplexSpace(2)NEWLINE f = FockSpace()NEWLINE assert _test_args(TensorProductHilbertSpace(f, c))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__innerproduct__InnerProduct():NEWLINE from sympy.physics.quantum import Bra, Ket, InnerProductNEWLINE b = Bra('b')NEWLINE k = Ket('k')NEWLINE assert _test_args(InnerProduct(b, k))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__DifferentialOperator():NEWLINE from sympy.physics.quantum.operator import DifferentialOperatorNEWLINE from sympy import Derivative, FunctionNEWLINE f = Function('f')NEWLINE assert _test_args(DifferentialOperator(1/x*Derivative(f(x), x), f(x)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__HermitianOperator():NEWLINE from sympy.physics.quantum.operator import HermitianOperatorNEWLINE assert _test_args(HermitianOperator('H'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__IdentityOperator():NEWLINE from sympy.physics.quantum.operator import IdentityOperatorNEWLINE assert _test_args(IdentityOperator(5))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__Operator():NEWLINE from sympy.physics.quantum.operator import OperatorNEWLINE assert _test_args(Operator('A'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__OuterProduct():NEWLINE from sympy.physics.quantum.operator import OuterProductNEWLINE from sympy.physics.quantum import Ket, BraNEWLINE b = Bra('b')NEWLINE k = Ket('k')NEWLINE assert _test_args(OuterProduct(k, b))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__operator__UnitaryOperator():NEWLINE from sympy.physics.quantum.operator import UnitaryOperatorNEWLINE assert _test_args(UnitaryOperator('U'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABBra():NEWLINE from sympy.physics.quantum.piab import PIABBraNEWLINE assert _test_args(PIABBra('B'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonOp():NEWLINE from sympy.physics.quantum.boson import BosonOpNEWLINE assert _test_args(BosonOp('a'))NEWLINE assert _test_args(BosonOp('a', False))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonFockKet():NEWLINE from sympy.physics.quantum.boson import BosonFockKetNEWLINE assert _test_args(BosonFockKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonFockBra():NEWLINE from sympy.physics.quantum.boson import BosonFockBraNEWLINE assert _test_args(BosonFockBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonCoherentKet():NEWLINE from sympy.physics.quantum.boson import BosonCoherentKetNEWLINE assert _test_args(BosonCoherentKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__boson__BosonCoherentBra():NEWLINE from sympy.physics.quantum.boson import BosonCoherentBraNEWLINE assert _test_args(BosonCoherentBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionOp():NEWLINE from sympy.physics.quantum.fermion import FermionOpNEWLINE assert _test_args(FermionOp('c'))NEWLINE assert _test_args(FermionOp('c', False))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionFockKet():NEWLINE from sympy.physics.quantum.fermion import FermionFockKetNEWLINE assert _test_args(FermionFockKet(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__fermion__FermionFockBra():NEWLINE from sympy.physics.quantum.fermion import FermionFockBraNEWLINE assert _test_args(FermionFockBra(1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaOpBase():NEWLINE from sympy.physics.quantum.pauli import SigmaOpBaseNEWLINE assert _test_args(SigmaOpBase())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaX():NEWLINE from sympy.physics.quantum.pauli import SigmaXNEWLINE assert _test_args(SigmaX())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaY():NEWLINE from sympy.physics.quantum.pauli import SigmaYNEWLINE assert _test_args(SigmaY())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZ():NEWLINE from sympy.physics.quantum.pauli import SigmaZNEWLINE assert _test_args(SigmaZ())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaMinus():NEWLINE from sympy.physics.quantum.pauli import SigmaMinusNEWLINE assert _test_args(SigmaMinus())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaPlus():NEWLINE from sympy.physics.quantum.pauli import SigmaPlusNEWLINE assert _test_args(SigmaPlus())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZKet():NEWLINE from sympy.physics.quantum.pauli import SigmaZKetNEWLINE assert _test_args(SigmaZKet(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__pauli__SigmaZBra():NEWLINE from sympy.physics.quantum.pauli import SigmaZBraNEWLINE assert _test_args(SigmaZBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABHamiltonian():NEWLINE from sympy.physics.quantum.piab import PIABHamiltonianNEWLINE assert _test_args(PIABHamiltonian('P'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__piab__PIABKet():NEWLINE from sympy.physics.quantum.piab import PIABKetNEWLINE assert _test_args(PIABKet('K'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qexpr__QExpr():NEWLINE from sympy.physics.quantum.qexpr import QExprNEWLINE assert _test_args(QExpr(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__Fourier():NEWLINE from sympy.physics.quantum.qft import FourierNEWLINE assert _test_args(Fourier(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__IQFT():NEWLINE from sympy.physics.quantum.qft import IQFTNEWLINE assert _test_args(IQFT(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__QFT():NEWLINE from sympy.physics.quantum.qft import QFTNEWLINE assert _test_args(QFT(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qft__RkGate():NEWLINE from sympy.physics.quantum.qft import RkGateNEWLINE assert _test_args(RkGate(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubit():NEWLINE from sympy.physics.quantum.qubit import IntQubitNEWLINE assert _test_args(IntQubit(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubitBra():NEWLINE from sympy.physics.quantum.qubit import IntQubitBraNEWLINE assert _test_args(IntQubitBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__IntQubitState():NEWLINE from sympy.physics.quantum.qubit import IntQubitState, QubitStateNEWLINE assert _test_args(IntQubitState(QubitState(0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__Qubit():NEWLINE from sympy.physics.quantum.qubit import QubitNEWLINE assert _test_args(Qubit(0, 0, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__QubitBra():NEWLINE from sympy.physics.quantum.qubit import QubitBraNEWLINE assert _test_args(QubitBra('1', 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__qubit__QubitState():NEWLINE from sympy.physics.quantum.qubit import QubitStateNEWLINE assert _test_args(QubitState(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__density__Density():NEWLINE from sympy.physics.quantum.density import DensityNEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Density([Ket(0), 0.5], [Ket(1), 0.5]))NEWLINENEWLINENEWLINE@SKIP("TODO: sympy.physics.quantum.shor: Cmod Not Implemented")NEWLINEdef test_sympy__physics__quantum__shor__CMod():NEWLINE from sympy.physics.quantum.shor import CModNEWLINE assert _test_args(CMod())NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__CoupledSpinState():NEWLINE from sympy.physics.quantum.spin import CoupledSpinStateNEWLINE assert _test_args(CoupledSpinState(1, 0, (1, 1)))NEWLINE assert _test_args(CoupledSpinState(1, 0, (1, S(1)/2, S(1)/2)))NEWLINE assert _test_args(CoupledSpinState(NEWLINE 1, 0, (1, S(1)/2, S(1)/2), ((2, 3, S(1)/2), (1, 2, 1)) ))NEWLINE j, m, j1, j2, j3, j12, x = symbols('j m j1:4 j12 x')NEWLINE assert CoupledSpinState(NEWLINE j, m, (j1, j2, j3)).subs(j2, x) == CoupledSpinState(j, m, (j1, x, j3))NEWLINE assert CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, j12), (1, 2, j)) ).subs(j12, x) == \NEWLINE CoupledSpinState(j, m, (j1, j2, j3), ((1, 3, x), (1, 2, j)) )NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__J2Op():NEWLINE from sympy.physics.quantum.spin import J2OpNEWLINE assert _test_args(J2Op('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JminusOp():NEWLINE from sympy.physics.quantum.spin import JminusOpNEWLINE assert _test_args(JminusOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JplusOp():NEWLINE from sympy.physics.quantum.spin import JplusOpNEWLINE assert _test_args(JplusOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxBra():NEWLINE from sympy.physics.quantum.spin import JxBraNEWLINE assert _test_args(JxBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxBraCoupled():NEWLINE from sympy.physics.quantum.spin import JxBraCoupledNEWLINE assert _test_args(JxBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxKet():NEWLINE from sympy.physics.quantum.spin import JxKetNEWLINE assert _test_args(JxKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxKetCoupled():NEWLINE from sympy.physics.quantum.spin import JxKetCoupledNEWLINE assert _test_args(JxKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JxOp():NEWLINE from sympy.physics.quantum.spin import JxOpNEWLINE assert _test_args(JxOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyBra():NEWLINE from sympy.physics.quantum.spin import JyBraNEWLINE assert _test_args(JyBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyBraCoupled():NEWLINE from sympy.physics.quantum.spin import JyBraCoupledNEWLINE assert _test_args(JyBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyKet():NEWLINE from sympy.physics.quantum.spin import JyKetNEWLINE assert _test_args(JyKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyKetCoupled():NEWLINE from sympy.physics.quantum.spin import JyKetCoupledNEWLINE assert _test_args(JyKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JyOp():NEWLINE from sympy.physics.quantum.spin import JyOpNEWLINE assert _test_args(JyOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzBra():NEWLINE from sympy.physics.quantum.spin import JzBraNEWLINE assert _test_args(JzBra(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzBraCoupled():NEWLINE from sympy.physics.quantum.spin import JzBraCoupledNEWLINE assert _test_args(JzBraCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzKet():NEWLINE from sympy.physics.quantum.spin import JzKetNEWLINE assert _test_args(JzKet(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzKetCoupled():NEWLINE from sympy.physics.quantum.spin import JzKetCoupledNEWLINE assert _test_args(JzKetCoupled(1, 0, (1, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__JzOp():NEWLINE from sympy.physics.quantum.spin import JzOpNEWLINE assert _test_args(JzOp('J'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__Rotation():NEWLINE from sympy.physics.quantum.spin import RotationNEWLINE assert _test_args(Rotation(pi, 0, pi/2))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__SpinState():NEWLINE from sympy.physics.quantum.spin import SpinStateNEWLINE assert _test_args(SpinState(1, 0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__spin__WignerD():NEWLINE from sympy.physics.quantum.spin import WignerDNEWLINE assert _test_args(WignerD(0, 1, 2, 3, 4, 5))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Bra():NEWLINE from sympy.physics.quantum.state import BraNEWLINE assert _test_args(Bra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__BraBase():NEWLINE from sympy.physics.quantum.state import BraBaseNEWLINE assert _test_args(BraBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Ket():NEWLINE from sympy.physics.quantum.state import KetNEWLINE assert _test_args(Ket(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__KetBase():NEWLINE from sympy.physics.quantum.state import KetBaseNEWLINE assert _test_args(KetBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__State():NEWLINE from sympy.physics.quantum.state import StateNEWLINE assert _test_args(State(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__StateBase():NEWLINE from sympy.physics.quantum.state import StateBaseNEWLINE assert _test_args(StateBase(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepBra():NEWLINE from sympy.physics.quantum.state import TimeDepBraNEWLINE assert _test_args(TimeDepBra('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepKet():NEWLINE from sympy.physics.quantum.state import TimeDepKetNEWLINE assert _test_args(TimeDepKet('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__TimeDepState():NEWLINE from sympy.physics.quantum.state import TimeDepStateNEWLINE assert _test_args(TimeDepState('psi', 't'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__state__Wavefunction():NEWLINE from sympy.physics.quantum.state import WavefunctionNEWLINE from sympy.functions import sinNEWLINE from sympy import PiecewiseNEWLINE n = 1NEWLINE L = 1NEWLINE g = Piecewise((0, x < 0), (0, x > L), (sqrt(2//L)*sin(n*pi*x/L), True))NEWLINE assert _test_args(Wavefunction(g, x))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__tensorproduct__TensorProduct():NEWLINE from sympy.physics.quantum.tensorproduct import TensorProductNEWLINE assert _test_args(TensorProduct(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__identitysearch__GateIdentity():NEWLINE from sympy.physics.quantum.gate import XNEWLINE from sympy.physics.quantum.identitysearch import GateIdentityNEWLINE assert _test_args(GateIdentity(X(0), X(0)))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOOp():NEWLINE from sympy.physics.quantum.sho1d import SHOOpNEWLINE assert _test_args(SHOOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__RaisingOp():NEWLINE from sympy.physics.quantum.sho1d import RaisingOpNEWLINE assert _test_args(RaisingOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__LoweringOp():NEWLINE from sympy.physics.quantum.sho1d import LoweringOpNEWLINE assert _test_args(LoweringOp('a'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__NumberOp():NEWLINE from sympy.physics.quantum.sho1d import NumberOpNEWLINE assert _test_args(NumberOp('N'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__Hamiltonian():NEWLINE from sympy.physics.quantum.sho1d import HamiltonianNEWLINE assert _test_args(Hamiltonian('H'))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOState():NEWLINE from sympy.physics.quantum.sho1d import SHOStateNEWLINE assert _test_args(SHOState(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOKet():NEWLINE from sympy.physics.quantum.sho1d import SHOKetNEWLINE assert _test_args(SHOKet(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__quantum__sho1d__SHOBra():NEWLINE from sympy.physics.quantum.sho1d import SHOBraNEWLINE assert _test_args(SHOBra(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AnnihilateBoson():NEWLINE from sympy.physics.secondquant import AnnihilateBosonNEWLINE assert _test_args(AnnihilateBoson(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AnnihilateFermion():NEWLINE from sympy.physics.secondquant import AnnihilateFermionNEWLINE assert _test_args(AnnihilateFermion(0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__Annihilator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__AntiSymmetricTensor():NEWLINE from sympy.physics.secondquant import AntiSymmetricTensorNEWLINE i, j = symbols('i j', below_fermi=True)NEWLINE a, b = symbols('a b', above_fermi=True)NEWLINE assert _test_args(AntiSymmetricTensor('v', (a, i), (b, j)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__BosonState():NEWLINE from sympy.physics.secondquant import BosonStateNEWLINE assert _test_args(BosonState((0, 1)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__BosonicOperator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__Commutator():NEWLINE from sympy.physics.secondquant import CommutatorNEWLINE assert _test_args(Commutator(x, y))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__CreateBoson():NEWLINE from sympy.physics.secondquant import CreateBosonNEWLINE assert _test_args(CreateBoson(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__CreateFermion():NEWLINE from sympy.physics.secondquant import CreateFermionNEWLINE assert _test_args(CreateFermion(0))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__physics__secondquant__Creator():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__Dagger():NEWLINE from sympy.physics.secondquant import DaggerNEWLINE from sympy import INEWLINE assert _test_args(Dagger(2*I))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FermionState():NEWLINE from sympy.physics.secondquant import FermionStateNEWLINE assert _test_args(FermionState((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FermionicOperator():NEWLINE from sympy.physics.secondquant import FermionicOperatorNEWLINE assert _test_args(FermionicOperator(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockState():NEWLINE from sympy.physics.secondquant import FockStateNEWLINE assert _test_args(FockState((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBosonBra():NEWLINE from sympy.physics.secondquant import FockStateBosonBraNEWLINE assert _test_args(FockStateBosonBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBosonKet():NEWLINE from sympy.physics.secondquant import FockStateBosonKetNEWLINE assert _test_args(FockStateBosonKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateBra():NEWLINE from sympy.physics.secondquant import FockStateBraNEWLINE assert _test_args(FockStateBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateFermionBra():NEWLINE from sympy.physics.secondquant import FockStateFermionBraNEWLINE assert _test_args(FockStateFermionBra((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateFermionKet():NEWLINE from sympy.physics.secondquant import FockStateFermionKetNEWLINE assert _test_args(FockStateFermionKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__FockStateKet():NEWLINE from sympy.physics.secondquant import FockStateKetNEWLINE assert _test_args(FockStateKet((0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__InnerProduct():NEWLINE from sympy.physics.secondquant import InnerProductNEWLINE from sympy.physics.secondquant import FockStateKet, FockStateBraNEWLINE assert _test_args(InnerProduct(FockStateBra((0, 1)), FockStateKet((0, 1))))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__NO():NEWLINE from sympy.physics.secondquant import NO, F, FdNEWLINE assert _test_args(NO(Fd(x)*F(y)))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__PermutationOperator():NEWLINE from sympy.physics.secondquant import PermutationOperatorNEWLINE assert _test_args(PermutationOperator(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__SqOperator():NEWLINE from sympy.physics.secondquant import SqOperatorNEWLINE assert _test_args(SqOperator(0))NEWLINENEWLINENEWLINEdef test_sympy__physics__secondquant__TensorSymbol():NEWLINE from sympy.physics.secondquant import TensorSymbolNEWLINE assert _test_args(TensorSymbol(x))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__dimensions__Dimension():NEWLINE from sympy.physics.units.dimensions import DimensionNEWLINE assert _test_args(Dimension("length", "L"))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__dimensions__DimensionSystem():NEWLINE from sympy.physics.units.dimensions import DimensionSystemNEWLINE from sympy.physics.units.dimensions import length, time, velocityNEWLINE assert _test_args(DimensionSystem((length, time), (velocity,)))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__quantities__Quantity():NEWLINE from sympy.physics.units.quantities import QuantityNEWLINE from sympy.physics.units import lengthNEWLINE assert _test_args(Quantity("dam", length, 10))NEWLINENEWLINENEWLINEdef test_sympy__physics__units__prefixes__Prefix():NEWLINE from sympy.physics.units.prefixes import PrefixNEWLINE assert _test_args(Prefix('kilo', 'k', 3))NEWLINENEWLINENEWLINEdef test_sympy__core__numbers__AlgebraicNumber():NEWLINE from sympy.core.numbers import AlgebraicNumberNEWLINE assert _test_args(AlgebraicNumber(sqrt(2), [1, 2, 3]))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__GroebnerBasis():NEWLINE from sympy.polys.polytools import GroebnerBasisNEWLINE assert _test_args(GroebnerBasis([x, y, z], x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__Poly():NEWLINE from sympy.polys.polytools import PolyNEWLINE assert _test_args(Poly(2, x, y))NEWLINENEWLINENEWLINEdef test_sympy__polys__polytools__PurePoly():NEWLINE from sympy.polys.polytools import PurePolyNEWLINE assert _test_args(PurePoly(2, x, y))NEWLINENEWLINENEWLINE@SKIP('abstract class')NEWLINEdef test_sympy__polys__rootoftools__RootOf():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__polys__rootoftools__ComplexRootOf():NEWLINE from sympy.polys.rootoftools import ComplexRootOfNEWLINE assert _test_args(ComplexRootOf(x**3 + x + 1, 0))NEWLINENEWLINENEWLINEdef test_sympy__polys__rootoftools__RootSum():NEWLINE from sympy.polys.rootoftools import RootSumNEWLINE assert _test_args(RootSum(x**3 + x + 1, sin))NEWLINENEWLINENEWLINEdef test_sympy__series__limits__Limit():NEWLINE from sympy.series.limits import LimitNEWLINE assert _test_args(Limit(x, x, 0, dir='-'))NEWLINENEWLINENEWLINEdef test_sympy__series__order__Order():NEWLINE from sympy.series.order import OrderNEWLINE assert _test_args(Order(1, x, y))NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__sequences__SeqBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__sequences__EmptySequence():NEWLINE from sympy.series.sequences import EmptySequenceNEWLINE assert _test_args(EmptySequence())NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__sequences__SeqExpr():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqPer():NEWLINE from sympy.series.sequences import SeqPerNEWLINE assert _test_args(SeqPer((1, 2, 3), (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqFormula():NEWLINE from sympy.series.sequences import SeqFormulaNEWLINE assert _test_args(SeqFormula(x**2, (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqExprOp():NEWLINE from sympy.series.sequences import SeqExprOp, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqExprOp(s1, s2))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqAdd():NEWLINE from sympy.series.sequences import SeqAdd, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqAdd(s1, s2))NEWLINENEWLINENEWLINEdef test_sympy__series__sequences__SeqMul():NEWLINE from sympy.series.sequences import SeqMul, sequenceNEWLINE s1 = sequence((1, 2, 3))NEWLINE s2 = sequence(x**2)NEWLINE assert _test_args(SeqMul(s1, s2))NEWLINENEWLINENEWLINE@SKIP('Abstract Class')NEWLINEdef test_sympy__series__series_class__SeriesBase():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__series__fourier__FourierSeries():NEWLINE from sympy.series.fourier import fourier_seriesNEWLINE assert _test_args(fourier_series(x, (x, -pi, pi)))NEWLINENEWLINENEWLINEdef test_sympy__series__formal__FormalPowerSeries():NEWLINE from sympy.series.formal import fpsNEWLINE assert _test_args(fps(log(1 + x), x))NEWLINENEWLINENEWLINEdef test_sympy__simplify__hyperexpand__Hyper_Function():NEWLINE from sympy.simplify.hyperexpand import Hyper_FunctionNEWLINE assert _test_args(Hyper_Function([2], [1]))NEWLINENEWLINENEWLINEdef test_sympy__simplify__hyperexpand__G_Function():NEWLINE from sympy.simplify.hyperexpand import G_FunctionNEWLINE assert _test_args(G_Function([2], [1], [], []))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__tensor__array__ndim_array__ImmutableNDimArray():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__tensor__array__dense_ndim_array__ImmutableDenseNDimArray():NEWLINE from sympy.tensor.array.dense_ndim_array import ImmutableDenseNDimArrayNEWLINE densarr = ImmutableDenseNDimArray(range(10, 34), (2, 3, 4))NEWLINE assert _test_args(densarr)NEWLINENEWLINENEWLINEdef test_sympy__tensor__array__sparse_ndim_array__ImmutableSparseNDimArray():NEWLINE from sympy.tensor.array.sparse_ndim_array import ImmutableSparseNDimArrayNEWLINE sparr = ImmutableSparseNDimArray(range(10, 34), (2, 3, 4))NEWLINE assert _test_args(sparr)NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__Idx():NEWLINE from sympy.tensor.indexed import IdxNEWLINE assert _test_args(Idx('test'))NEWLINE assert _test_args(Idx(1, (0, 10)))NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__Indexed():NEWLINE from sympy.tensor.indexed import Indexed, IdxNEWLINE assert _test_args(Indexed('A', Idx('i'), Idx('j')))NEWLINENEWLINENEWLINEdef test_sympy__tensor__indexed__IndexedBase():NEWLINE from sympy.tensor.indexed import IndexedBaseNEWLINE assert _test_args(IndexedBase('A', shape=(x, y)))NEWLINE assert _test_args(IndexedBase('A', 1))NEWLINE assert _test_args(IndexedBase('A')[0, 1])NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorIndexType():NEWLINE from sympy.tensor.tensor import TensorIndexTypeNEWLINE assert _test_args(TensorIndexType('Lorentz', metric=False))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorSymmetry():NEWLINE from sympy.tensor.tensor import TensorSymmetry, get_symmetric_group_sgsNEWLINE assert _test_args(TensorSymmetry(get_symmetric_group_sgs(2)))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorType():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, get_symmetric_group_sgs, TensorTypeNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE assert _test_args(TensorType([Lorentz], sym))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorHead():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, TensorHeadNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE assert _test_args(TensorHead('p', S1, 0))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensorIndex():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorIndexNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE assert _test_args(TensorIndex('i', Lorentz))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__tensor__tensor__TensExpr():NEWLINE passNEWLINENEWLINEdef test_sympy__tensor__tensor__TensAdd():NEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensAddNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p, q = S1('p,q')NEWLINE t1 = p(a)NEWLINE t2 = q(a)NEWLINE assert _test_args(TensAdd(t1, t2))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__Tensor():NEWLINE from sympy.core import SNEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDSNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p = S1('p')NEWLINE assert _test_args(p(a))NEWLINENEWLINENEWLINEdef test_sympy__tensor__tensor__TensMul():NEWLINE from sympy.core import SNEWLINE from sympy.tensor.tensor import TensorIndexType, TensorSymmetry, TensorType, get_symmetric_group_sgs, tensor_indices, TensMul, TIDSNEWLINE Lorentz = TensorIndexType('Lorentz', dummy_fmt='L')NEWLINE a, b = tensor_indices('a,b', Lorentz)NEWLINE sym = TensorSymmetry(get_symmetric_group_sgs(1))NEWLINE S1 = TensorType([Lorentz], sym)NEWLINE p = S1('p')NEWLINE q = S1('q')NEWLINE assert _test_args(3*p(a)*q(b))NEWLINENEWLINENEWLINEdef test_as_coeff_add():NEWLINE assert (7, (3*x, 4*x**2)) == (7 + 3*x + 4*x**2).as_coeff_add()NEWLINENEWLINENEWLINEdef test_sympy__geometry__curve__Curve():NEWLINE from sympy.geometry.curve import CurveNEWLINE assert _test_args(Curve((x, 1), (x, 0, 1)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point():NEWLINE from sympy.geometry.point import PointNEWLINE assert _test_args(Point(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point2D():NEWLINE from sympy.geometry.point import Point2DNEWLINE assert _test_args(Point2D(0, 1))NEWLINENEWLINENEWLINEdef test_sympy__geometry__point__Point3D():NEWLINE from sympy.geometry.point import Point3DNEWLINE assert _test_args(Point3D(0, 1, 2))NEWLINENEWLINENEWLINEdef test_sympy__geometry__ellipse__Ellipse():NEWLINE from sympy.geometry.ellipse import EllipseNEWLINE assert _test_args(Ellipse((0, 1), 2, 3))NEWLINENEWLINENEWLINEdef test_sympy__geometry__ellipse__Circle():NEWLINE from sympy.geometry.ellipse import CircleNEWLINE assert _test_args(Circle((0, 1), 2))NEWLINENEWLINENEWLINEdef test_sympy__geometry__parabola__Parabola():NEWLINE from sympy.geometry.parabola import ParabolaNEWLINE from sympy.geometry.line import LineNEWLINE assert _test_args(Parabola((0, 0), Line((2, 3), (4, 3))))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line():NEWLINE from sympy.geometry.line import LineNEWLINE assert _test_args(Line((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray():NEWLINE from sympy.geometry.line import RayNEWLINE assert _test_args(Ray((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment():NEWLINE from sympy.geometry.line import SegmentNEWLINE assert _test_args(Segment((0, 1), (2, 3)))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity2D():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line2D():NEWLINE from sympy.geometry.line import Line2DNEWLINE assert _test_args(Line2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray2D():NEWLINE from sympy.geometry.line import Ray2DNEWLINE assert _test_args(Ray2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment2D():NEWLINE from sympy.geometry.line import Segment2DNEWLINE assert _test_args(Segment2D((0, 1), (2, 3)))NEWLINENEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__line__LinearEntity3D():NEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Line3D():NEWLINE from sympy.geometry.line import Line3DNEWLINE assert _test_args(Line3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Segment3D():NEWLINE from sympy.geometry.line import Segment3DNEWLINE assert _test_args(Segment3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__line__Ray3D():NEWLINE from sympy.geometry.line import Ray3DNEWLINE assert _test_args(Ray3D((0, 1, 1), (2, 3, 4)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__plane__Plane():NEWLINE from sympy.geometry.plane import PlaneNEWLINE assert _test_args(Plane((1, 1, 1), (-3, 4, -2), (1, 2, 3)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__Polygon():NEWLINE from sympy.geometry.polygon import PolygonNEWLINE assert _test_args(Polygon((0, 1), (2, 3), (4, 5), (6, 7)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__RegularPolygon():NEWLINE from sympy.geometry.polygon import RegularPolygonNEWLINE assert _test_args(RegularPolygon((0, 1), 2, 3, 4))NEWLINENEWLINENEWLINEdef test_sympy__geometry__polygon__Triangle():NEWLINE from sympy.geometry.polygon import TriangleNEWLINE assert _test_args(Triangle((0, 1), (2, 3), (4, 5)))NEWLINENEWLINENEWLINEdef test_sympy__geometry__entity__GeometryEntity():NEWLINE from sympy.geometry.entity import GeometryEntityNEWLINE from sympy.geometry.point import PointNEWLINE assert _test_args(GeometryEntity(Point(1, 0), 1, [1, 2]))NEWLINENEWLINE@SKIP("abstract class")NEWLINEdef test_sympy__geometry__entity__GeometrySet():NEWLINE passNEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Manifold():NEWLINE from sympy.diffgeom import ManifoldNEWLINE assert _test_args(Manifold('name', 3))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Patch():NEWLINE from sympy.diffgeom import Manifold, PatchNEWLINE assert _test_args(Patch('name', Manifold('name', 3)))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__CoordSystem():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystemNEWLINE assert _test_args(CoordSystem('name', Patch('name', Manifold('name', 3))))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__diffgeom__diffgeom__Point():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, PointNEWLINE assert _test_args(Point(NEWLINE CoordSystem('name', Patch('name', Manifold('name', 3))), [x, y]))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__BaseScalarField():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarFieldNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseScalarField(cs, 0))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__BaseVectorField():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorFieldNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseVectorField(cs, 0))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Differential():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, DifferentialNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(Differential(BaseScalarField(cs, 0)))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__Commutator():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CommutatorNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE cs1 = CoordSystem('name1', Patch('name', Manifold('name', 3)))NEWLINE v = BaseVectorField(cs, 0)NEWLINE v1 = BaseVectorField(cs1, 0)NEWLINE assert _test_args(Commutator(v, v1))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__TensorProduct():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, TensorProductNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE assert _test_args(TensorProduct(d, d))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__WedgeProduct():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, WedgeProductNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE d1 = Differential(BaseScalarField(cs, 1))NEWLINE assert _test_args(WedgeProduct(d, d1))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__LieDerivative():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseScalarField, Differential, BaseVectorField, LieDerivativeNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE d = Differential(BaseScalarField(cs, 0))NEWLINE v = BaseVectorField(cs, 0)NEWLINE assert _test_args(LieDerivative(v, d))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__diffgeom__diffgeom__BaseCovarDerivativeOp():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseCovarDerivativeOpNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE assert _test_args(BaseCovarDerivativeOp(cs, 0, [[[0, ]*3, ]*3, ]*3))NEWLINENEWLINENEWLINEdef test_sympy__diffgeom__diffgeom__CovarDerivativeOp():NEWLINE from sympy.diffgeom import Manifold, Patch, CoordSystem, BaseVectorField, CovarDerivativeOpNEWLINE cs = CoordSystem('name', Patch('name', Manifold('name', 3)))NEWLINE v = BaseVectorField(cs, 0)NEWLINE _test_args(CovarDerivativeOp(v, [[[0, ]*3, ]*3, ]*3))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Class():NEWLINE from sympy.categories.baseclasses import ClassNEWLINE assert _test_args(Class())NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Object():NEWLINE from sympy.categories import ObjectNEWLINE assert _test_args(Object("A"))NEWLINENEWLINENEWLINE@XFAILNEWLINEdef test_sympy__categories__baseclasses__Morphism():NEWLINE from sympy.categories import Object, MorphismNEWLINE assert _test_args(Morphism(Object("A"), Object("B")))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__IdentityMorphism():NEWLINE from sympy.categories import Object, IdentityMorphismNEWLINE assert _test_args(IdentityMorphism(Object("A")))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__NamedMorphism():NEWLINE from sympy.categories import Object, NamedMorphismNEWLINE assert _test_args(NamedMorphism(Object("A"), Object("B"), "f"))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__CompositeMorphism():NEWLINE from sympy.categories import Object, NamedMorphism, CompositeMorphismNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE g = NamedMorphism(B, C, "g")NEWLINE assert _test_args(CompositeMorphism(f, g))NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Diagram():NEWLINE from sympy.categories import Object, NamedMorphism, DiagramNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE d = Diagram([f])NEWLINE assert _test_args(d)NEWLINENEWLINENEWLINEdef test_sympy__categories__baseclasses__Category():NEWLINE from sympy.categories import Object, NamedMorphism, Diagram, CategoryNEWLINE A = Object("A")NEWLINE B = Object("B")NEWLINE C = Object("C")NEWLINE f = NamedMorphism(A, B, "f")NEWLINE g = NamedMorphism(B, C, "g")NEWLINE d1 = Diagram([f, g])NEWLINE d2 = Diagram([f])NEWLINE K = Category("K", commutative_diagrams=[d1, d2])NEWLINE assert _test_args(K)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___totient():NEWLINE from sympy.ntheory.factor_ import totientNEWLINE k = symbols('k', integer=True)NEWLINE t = totient(k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___reduced_totient():NEWLINE from sympy.ntheory.factor_ import reduced_totientNEWLINE k = symbols('k', integer=True)NEWLINE t = reduced_totient(k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___divisor_sigma():NEWLINE from sympy.ntheory.factor_ import divisor_sigmaNEWLINE k = symbols('k', integer=True)NEWLINE n = symbols('n', integer=True)NEWLINE t = divisor_sigma(n, k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___udivisor_sigma():NEWLINE from sympy.ntheory.factor_ import udivisor_sigmaNEWLINE k = symbols('k', integer=True)NEWLINE n = symbols('n', integer=True)NEWLINE t = udivisor_sigma(n, k)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___primenu():NEWLINE from sympy.ntheory.factor_ import primenuNEWLINE n = symbols('n', integer=True)NEWLINE t = primenu(n)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__factor___primeomega():NEWLINE from sympy.ntheory.factor_ import primeomegaNEWLINE n = symbols('n', integer=True)NEWLINE t = primeomega(n)NEWLINE assert _test_args(t)NEWLINENEWLINENEWLINEdef test_sympy__ntheory__residue_ntheory__mobius():NEWLINE from sympy.ntheory import mobiusNEWLINE assert _test_args(mobius(2))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__waves__TWave():NEWLINE from sympy.physics.optics import TWaveNEWLINE A, f, phi = symbols('A, f, phi')NEWLINE assert _test_args(TWave(A, f, phi))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__gaussopt__BeamParameter():NEWLINE from sympy.physics.optics import BeamParameterNEWLINE assert _test_args(BeamParameter(530e-9, 1, w=1e-3))NEWLINENEWLINENEWLINEdef test_sympy__physics__optics__medium__Medium():NEWLINE from sympy.physics.optics import MediumNEWLINE assert _test_args(Medium('m'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ast__Assignment():NEWLINE from sympy.codegen.ast import AssignmentNEWLINE assert _test_args(Assignment(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__expm1():NEWLINE from sympy.codegen.cfunctions import expm1NEWLINE assert _test_args(expm1(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log1p():NEWLINE from sympy.codegen.cfunctions import log1pNEWLINE assert _test_args(log1p(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__exp2():NEWLINE from sympy.codegen.cfunctions import exp2NEWLINE assert _test_args(exp2(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log2():NEWLINE from sympy.codegen.cfunctions import log2NEWLINE assert _test_args(log2(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__fma():NEWLINE from sympy.codegen.cfunctions import fmaNEWLINE assert _test_args(fma(x, y, z))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__log10():NEWLINE from sympy.codegen.cfunctions import log10NEWLINE assert _test_args(log10(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__cfunctions__Sqrt():NEWLINE from sympy.codegen.cfunctions import SqrtNEWLINE assert _test_args(Sqrt(x))NEWLINENEWLINEdef test_sympy__codegen__cfunctions__Cbrt():NEWLINE from sympy.codegen.cfunctions import CbrtNEWLINE assert _test_args(Cbrt(x))NEWLINENEWLINEdef test_sympy__codegen__cfunctions__hypot():NEWLINE from sympy.codegen.cfunctions import hypotNEWLINE assert _test_args(hypot(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__FFunction():NEWLINE from sympy.codegen.ffunctions import FFunctionNEWLINE assert _test_args(FFunction('f'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__F95Function():NEWLINE from sympy.codegen.ffunctions import F95FunctionNEWLINE assert _test_args(F95Function('f'))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__isign():NEWLINE from sympy.codegen.ffunctions import isignNEWLINE assert _test_args(isign(1, x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__dsign():NEWLINE from sympy.codegen.ffunctions import dsignNEWLINE assert _test_args(dsign(1, x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__cmplx():NEWLINE from sympy.codegen.ffunctions import cmplxNEWLINE assert _test_args(cmplx(x, y))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__kind():NEWLINE from sympy.codegen.ffunctions import kindNEWLINE assert _test_args(kind(x))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__merge():NEWLINE from sympy.codegen.ffunctions import mergeNEWLINE assert _test_args(merge(1, 2, Eq(x, 0)))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions___literal():NEWLINE from sympy.codegen.ffunctions import _literalNEWLINE assert _test_args(_literal(1))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__literal_sp():NEWLINE from sympy.codegen.ffunctions import literal_spNEWLINE assert _test_args(literal_sp(1))NEWLINENEWLINENEWLINEdef test_sympy__codegen__ffunctions__literal_dp():NEWLINE from sympy.codegen.ffunctions import literal_dpNEWLINE assert _test_args(literal_dp(1))NEWLINENEWLINENEWLINEdef test_sympy__vector__coordsysrect__CoordSys3D():NEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE assert _test_args(CoordSys3D('C'))NEWLINENEWLINENEWLINEdef test_sympy__vector__point__Point():NEWLINE from sympy.vector.point import PointNEWLINE assert _test_args(Point('P'))NEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependent():NEWLINE from sympy.vector.basisdependent import BasisDependentNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentMul():NEWLINE from sympy.vector.basisdependent import BasisDependentMulNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentAdd():NEWLINE from sympy.vector.basisdependent import BasisDependentAddNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__basisdependent__BasisDependentZero():NEWLINE from sympy.vector.basisdependent import BasisDependentZeroNEWLINE #These classes have been created to maintain an OOP hierarchyNEWLINE #for Vectors and Dyadics. Are NOT meant to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__vector__BaseVector():NEWLINE from sympy.vector.vector import BaseVectorNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseVector(0, C, ' ', ' '))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorAdd():NEWLINE from sympy.vector.vector import VectorAdd, VectorMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE from sympy.abc import a, b, c, x, y, zNEWLINE v1 = a*C.i + b*C.j + c*C.kNEWLINE v2 = x*C.i + y*C.j + z*C.kNEWLINE assert _test_args(VectorAdd(v1, v2))NEWLINE assert _test_args(VectorMul(x, v1))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorMul():NEWLINE from sympy.vector.vector import VectorMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE from sympy.abc import aNEWLINE assert _test_args(VectorMul(a, C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__VectorZero():NEWLINE from sympy.vector.vector import VectorZeroNEWLINE assert _test_args(VectorZero())NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Vector():NEWLINE from sympy.vector.vector import VectorNEWLINE #Vector is never to be initialized using argsNEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Cross():NEWLINE from sympy.vector.vector import CrossNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE _test_args(Cross(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__vector__Dot():NEWLINE from sympy.vector.vector import DotNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE _test_args(Dot(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__Dyadic():NEWLINE from sympy.vector.dyadic import DyadicNEWLINE #Dyadic is never to be initialized using argsNEWLINE passNEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__BaseDyadic():NEWLINE from sympy.vector.dyadic import BaseDyadicNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseDyadic(C.i, C.j))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicMul():NEWLINE from sympy.vector.dyadic import BaseDyadic, DyadicMulNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(DyadicMul(3, BaseDyadic(C.i, C.j)))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicAdd():NEWLINE from sympy.vector.dyadic import BaseDyadic, DyadicAddNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(2 * DyadicAdd(BaseDyadic(C.i, C.i),NEWLINE BaseDyadic(C.i, C.j)))NEWLINENEWLINENEWLINEdef test_sympy__vector__dyadic__DyadicZero():NEWLINE from sympy.vector.dyadic import DyadicZeroNEWLINE assert _test_args(DyadicZero())NEWLINENEWLINENEWLINEdef test_sympy__vector__deloperator__Del():NEWLINE from sympy.vector.deloperator import DelNEWLINE assert _test_args(Del())NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Curl():NEWLINE from sympy.vector.operators import CurlNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Curl(C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Divergence():NEWLINE from sympy.vector.operators import DivergenceNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Divergence(C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__operators__Gradient():NEWLINE from sympy.vector.operators import GradientNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(Gradient(C.x))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__Orienter():NEWLINE from sympy.vector.orienters import OrienterNEWLINE #Not to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__ThreeAngleOrienter():NEWLINE from sympy.vector.orienters import ThreeAngleOrienterNEWLINE #Not to be initializedNEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__AxisOrienter():NEWLINE from sympy.vector.orienters import AxisOrienterNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(AxisOrienter(x, C.i))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__BodyOrienter():NEWLINE from sympy.vector.orienters import BodyOrienterNEWLINE assert _test_args(BodyOrienter(x, y, z, '123'))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__SpaceOrienter():NEWLINE from sympy.vector.orienters import SpaceOrienterNEWLINE assert _test_args(SpaceOrienter(x, y, z, '123'))NEWLINENEWLINENEWLINEdef test_sympy__vector__orienters__QuaternionOrienter():NEWLINE from sympy.vector.orienters import QuaternionOrienterNEWLINE a, b, c, d = symbols('a b c d')NEWLINE assert _test_args(QuaternionOrienter(a, b, c, d))NEWLINENEWLINENEWLINEdef test_sympy__vector__scalar__BaseScalar():NEWLINE from sympy.vector.scalar import BaseScalarNEWLINE from sympy.vector.coordsysrect import CoordSys3DNEWLINE C = CoordSys3D('C')NEWLINE assert _test_args(BaseScalar(0, C, ' ', ' '))NEWLINENEWLINENEWLINEdef test_sympy__physics__wigner__Wigner3j():NEWLINE from sympy.physics.wigner import Wigner3jNEWLINE assert _test_args(Wigner3j(0, 0, 0, 0, 0, 0))NEWLINENEWLINEdef test_sympy__integrals__rubi__symbol__matchpyWC():NEWLINE from sympy.integrals.rubi.symbol import matchpyWCNEWLINE assert _test_args(matchpyWC(1, True, 'a'))NEWLINE |
import setuptoolsNEWLINENEWLINEwith open("README.md", "r", encoding="utf-8") as f:NEWLINE long_description = f.read()NEWLINENEWLINEsetuptools.setup(NEWLINE name="sunnyvale",NEWLINE version="0.0.1",NEWLINE author="Gunhoon Lee",NEWLINE author_email="gunhoon@gmail.com",NEWLINE description="A small example package",NEWLINE long_description=long_description,NEWLINE long_description_content_type="text/markdown",NEWLINE url="https://github.com/gunhoon/sunnyvale",NEWLINE packages=setuptools.find_packages(),NEWLINE classifiers=[NEWLINE "Programming Language :: Python :: 3",NEWLINE "License :: OSI Approved :: MIT License",NEWLINE "Operating System :: OS Independent",NEWLINE ],NEWLINE python_requires='>=3.6',NEWLINE)NEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINETests for the Py2-like class:`basestring` type.NEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_import, unicode_literals, print_functionNEWLINEimport osNEWLINENEWLINEfrom past import utilsNEWLINEfrom future.tests.base import unittestNEWLINEfrom past.builtins import basestring, str as oldstrNEWLINENEWLINENEWLINEclass TestBaseString(unittest.TestCase):NEWLINENEWLINE def test_isinstance(self):NEWLINE s = b'abc'NEWLINE self.assertTrue(isinstance(s, basestring))NEWLINE s2 = oldstr(b'abc')NEWLINE self.assertTrue(isinstance(s2, basestring))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE |
import importlibNEWLINEimport osNEWLINEimport timeNEWLINEimport randomNEWLINENEWLINEimport torchNEWLINEimport torch.nn.functional as FNEWLINEimport numpy as npNEWLINEimport ComputePostBNNEWLINEfrom utils.setlogger import get_loggerNEWLINENEWLINEfrom utils.model_profiling import model_profilingNEWLINEfrom utils.config import FLAGSNEWLINEfrom utils.datasets import get_datasetNEWLINENEWLINE# set log filesNEWLINEsaved_path = os.path.join("logs", '{}-{}'.format(FLAGS.dataset, FLAGS.model[7:]))NEWLINEif not os.path.exists(saved_path):NEWLINE os.makedirs(saved_path)NEWLINElogger = get_logger(os.path.join(saved_path, '{}_div1optimizer.log'.format('test' if FLAGS.test_only else 'train')))NEWLINENEWLINEdef set_random_seed():NEWLINE """set random seed"""NEWLINE if hasattr(FLAGS, 'random_seed'):NEWLINE seed = FLAGS.random_seedNEWLINE else:NEWLINE seed = 0NEWLINE random.seed(seed)NEWLINE np.random.seed(seed)NEWLINE torch.manual_seed(seed)NEWLINE torch.cuda.manual_seed(seed)NEWLINE torch.cuda.manual_seed_all(seed)NEWLINENEWLINEdef get_model():NEWLINE """get model"""NEWLINE model_lib = importlib.import_module(FLAGS.model)NEWLINE model = model_lib.Model(FLAGS.num_classes, input_size=FLAGS.image_size)NEWLINE return modelNEWLINENEWLINEdef get_optimizer(model):NEWLINE """get optimizer"""NEWLINE # all depthwise convolution (N, 1, x, x) has no weight decayNEWLINE # weight decay only on normal conv and fcNEWLINE if FLAGS.dataset == 'imagenet1k':NEWLINE model_params = []NEWLINE for params in model.parameters():NEWLINE ps = list(params.size())NEWLINE if len(ps) == 4 and ps[1] != 1: # normal convNEWLINE weight_decay = FLAGS.weight_decayNEWLINE elif len(ps) == 2: # fcNEWLINE weight_decay = FLAGS.weight_decayNEWLINE else:NEWLINE weight_decay = 0NEWLINE item = {'params': params, 'weight_decay': weight_decay,NEWLINE 'lr': FLAGS.lr, 'momentum': FLAGS.momentum,NEWLINE 'nesterov': FLAGS.nesterov}NEWLINE model_params.append(item)NEWLINE optimizer = torch.optim.SGD(model_params)NEWLINE else:NEWLINE optimizer = torch.optim.SGD(model.parameters(), FLAGS.lr,NEWLINE momentum=FLAGS.momentum, nesterov=FLAGS.nesterov,NEWLINE weight_decay=FLAGS.weight_decay)NEWLINE return optimizerNEWLINENEWLINEdef profiling(model, use_cuda):NEWLINE """profiling on either gpu or cpu"""NEWLINE print('Start model profiling, use_cuda:{}.'.format(use_cuda))NEWLINE for width_mult in sorted(FLAGS.width_mult_list, reverse=True):NEWLINE model.apply(NEWLINE lambda m: setattr(m, 'width_mult', width_mult))NEWLINE print('Model profiling with width mult {}x:'.format(width_mult))NEWLINE verbose = width_mult == max(FLAGS.width_mult_list)NEWLINE model_profiling(NEWLINE model, FLAGS.image_size, FLAGS.image_size,NEWLINE verbose=getattr(FLAGS, 'model_profiling_verbose', verbose))NEWLINENEWLINEdef train(epoch, loader, model, criterion, optimizer, lr_scheduler):NEWLINE t_start = time.time()NEWLINE model.train()NEWLINE for batch_idx, (input_list, target) in enumerate(loader):NEWLINE target = target.cuda(non_blocking=True)NEWLINE optimizer.zero_grad()NEWLINE # do max widthNEWLINE max_width = FLAGS.width_mult_range[1]NEWLINE model.apply(lambda m: setattr(m, 'width_mult', max_width))NEWLINE max_output = model(input_list[0])NEWLINE loss = criterion(max_output, target)NEWLINE loss.backward()NEWLINE max_output_detach = max_output.detach()NEWLINE # do other widths and resolutionNEWLINE min_width = FLAGS.width_mult_range[0]NEWLINE width_mult_list = [min_width]NEWLINE sampled_width = list(np.random.uniform(FLAGS.width_mult_range[0], FLAGS.width_mult_range[1], 2))NEWLINE width_mult_list.extend(sampled_width)NEWLINE for width_mult in sorted(width_mult_list, reverse=True):NEWLINE model.apply(NEWLINE lambda m: setattr(m, 'width_mult', width_mult))NEWLINE output = model(input_list[random.randint(0, 3)])NEWLINE loss = torch.nn.KLDivLoss(reduction='batchmean')(F.log_softmax(output, dim=1), F.softmax(max_output_detach, dim=1))NEWLINE loss.backward()NEWLINE optimizer.step()NEWLINE lr_scheduler.step()NEWLINE # print training logNEWLINE if batch_idx % FLAGS.print_freq == 0 or batch_idx == len(loader)-1:NEWLINE with torch.no_grad():NEWLINE for width_mult in sorted(FLAGS.width_mult_list, reverse=True):NEWLINE model.apply(lambda m: setattr(m, 'width_mult', width_mult))NEWLINE output = model(input_list[0])NEWLINE loss = criterion(output, target).cpu().numpy()NEWLINE indices = torch.max(output, dim=1)[1]NEWLINE acc = (indices == target).sum().cpu().numpy() / indices.size()[0]NEWLINE logger.info('TRAIN {:.1f}s LR:{:.4f} {}x Epoch:{}/{} Iter:{}/{} Loss:{:.4f} Acc:{:.3f}'.format(NEWLINE time.time() - t_start, optimizer.param_groups[0]['lr'], str(width_mult), epoch,NEWLINE FLAGS.num_epochs, batch_idx, len(loader), loss, acc))NEWLINENEWLINENEWLINEdef validate(epoch, loader, model, criterion, postloader):NEWLINE t_start = time.time()NEWLINE model.eval()NEWLINE resolution = FLAGS.image_sizeNEWLINE with torch.no_grad():NEWLINE for width_mult in sorted(FLAGS.width_mult_list, reverse=True):NEWLINE model.apply(lambda m: setattr(m, 'width_mult', width_mult))NEWLINE model = ComputePostBN.ComputeBN(model, postloader, resolution)NEWLINE loss, acc, cnt = 0, 0, 0NEWLINE for batch_idx, (input, target) in enumerate(loader):NEWLINE input, target = input.cuda(non_blocking=True), target.cuda(non_blocking=True)NEWLINE output = model(input)NEWLINE loss += criterion(output, target).cpu().numpy() * target.size()[0]NEWLINE indices = torch.max(output, dim=1)[1]NEWLINE acc += (indices == target).sum().cpu().numpy()NEWLINE cnt += target.size()[0]NEWLINE logger.info('VAL {:.1f}s {}x Epoch:{}/{} Loss:{:.4f} Acc:{:.3f}'.format(NEWLINE time.time() - t_start, str(width_mult), epoch,NEWLINE FLAGS.num_epochs, loss/cnt, acc/cnt))NEWLINENEWLINEdef test(epoch, loader, model, criterion, postloader):NEWLINE t_start = time.time()NEWLINE model.eval()NEWLINE with torch.no_grad():NEWLINE for resolution in FLAGS.resolution_list:NEWLINE for width_mult in sorted(FLAGS.width_mult_list, reverse=True):NEWLINE model.apply(lambda m: setattr(m, 'width_mult', width_mult))NEWLINE model = ComputePostBN.ComputeBN(model, postloader, resolution)NEWLINE loss, acc, cnt = 0, 0, 0NEWLINE for batch_idx, (input, target) in enumerate(loader):NEWLINE input, target =input.cuda(non_blocking=True), target.cuda(non_blocking=True)NEWLINE output = model(F.interpolate(input, (resolution, resolution), mode='bilinear', align_corners=True))NEWLINE loss += criterion(output, target).cpu().numpy() * target.size()[0]NEWLINE indices = torch.max(output, dim=1)[1]NEWLINE acc += (indices==target).sum().cpu().numpy()NEWLINE cnt += target.size()[0]NEWLINE logger.info('VAL {:.1f}s {}x-{} Epoch:{}/{} Loss:{:.4f} Acc:{:.3f}'.format(NEWLINE time.time() - t_start, str(width_mult), str(resolution), epoch,NEWLINE FLAGS.num_epochs, loss/cnt, acc/cnt))NEWLINENEWLINEdef train_val_test():NEWLINE """train and val"""NEWLINE # seedNEWLINE set_random_seed()NEWLINENEWLINE # modelNEWLINE model = get_model()NEWLINE model_wrapper = torch.nn.DataParallel(model).cuda()NEWLINE criterion = torch.nn.CrossEntropyLoss().cuda()NEWLINE train_loader, val_loader = get_dataset()NEWLINENEWLINE # check pretrainedNEWLINE if FLAGS.pretrained:NEWLINE checkpoint = torch.load(FLAGS.pretrained)NEWLINE # update keys from external modelsNEWLINE if type(checkpoint) == dict and 'model' in checkpoint:NEWLINE checkpoint = checkpoint['model']NEWLINE new_keys = list(model_wrapper.state_dict().keys())NEWLINE old_keys = list(checkpoint.keys())NEWLINE new_keys = [key for key in new_keys if 'running' not in key]NEWLINE new_keys = [key for key in new_keys if 'tracked' not in key]NEWLINE old_keys = [key for key in old_keys if 'running' not in key]NEWLINE old_keys = [key for key in old_keys if 'tracked' not in key]NEWLINE if not FLAGS.test_only:NEWLINE old_keys = old_keys[:-2]NEWLINE new_keys = new_keys[:-2]NEWLINENEWLINE new_checkpoint = {}NEWLINE for key_new, key_old in zip(new_keys, old_keys):NEWLINE new_checkpoint[key_new] = checkpoint[key_old]NEWLINE model_wrapper.load_state_dict(new_checkpoint, strict=False)NEWLINE print('Loaded model {}.'.format(FLAGS.pretrained))NEWLINE optimizer = get_optimizer(model_wrapper)NEWLINE # check resume trainingNEWLINE if FLAGS.resume:NEWLINE checkpoint = torch.load(FLAGS.resume)NEWLINE model_wrapper.load_state_dict(checkpoint['model'])NEWLINE optimizer.load_state_dict(checkpoint['optimizer'])NEWLINE last_epoch = checkpoint['last_epoch']NEWLINE lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, len(train_loader)*FLAGS.num_epochs)NEWLINE lr_scheduler.last_epoch = last_epochNEWLINE print('Loaded checkpoint {} at epoch {}.'.format(NEWLINE FLAGS.resume, last_epoch))NEWLINE else:NEWLINE lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, len(train_loader)*FLAGS.num_epochs)NEWLINE last_epoch = lr_scheduler.last_epochNEWLINE # print model and do profilingNEWLINE print(model_wrapper)NEWLINE if FLAGS.profiling:NEWLINE if 'gpu' in FLAGS.profiling:NEWLINE profiling(model, use_cuda=True)NEWLINE if 'cpu' in FLAGS.profiling:NEWLINE profiling(model, use_cuda=False)NEWLINENEWLINE if FLAGS.test_only:NEWLINE logger.info('Start testing.')NEWLINE test(last_epoch, val_loader, model_wrapper, criterion, train_loader)NEWLINE returnNEWLINENEWLINE logger.info('Start training.')NEWLINE for epoch in range(last_epoch + 1, FLAGS.num_epochs):NEWLINE # trainNEWLINE train(epoch, train_loader, model_wrapper, criterion, optimizer, lr_scheduler)NEWLINENEWLINE # valNEWLINE validate(epoch, val_loader, model_wrapper, criterion, train_loader)NEWLINENEWLINE # lr_scheduler.step()NEWLINE torch.save(NEWLINE {NEWLINE 'model': model_wrapper.state_dict(),NEWLINE 'optimizer': optimizer.state_dict(),NEWLINE 'last_epoch': epoch,NEWLINE },NEWLINE os.path.join(saved_path, 'checkpoint_{}.pt'.format(epoch)))NEWLINE returnNEWLINENEWLINENEWLINEdef main():NEWLINE """train and eval model"""NEWLINE train_val_test()NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main() |
# Copyright 2021 The Flax Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Class and functions to define and initialize the actor-critic model."""NEWLINENEWLINEimport functoolsNEWLINEimport numpy as onpNEWLINEfrom flax import linen as nnNEWLINEfrom flax import optimNEWLINEimport jaxNEWLINEimport jax.numpy as jnpNEWLINENEWLINE# See issue #620.NEWLINE# pytype: disable=wrong-keyword-argsNEWLINENEWLINENEWLINEclass ActorCritic(nn.Module):NEWLINE """Class defining the actor-critic model."""NEWLINENEWLINE num_outputs: intNEWLINENEWLINE @nn.compactNEWLINE def __call__(self, x):NEWLINE """Define the convolutional network architecture.NEWLINENEWLINE Architecture originates from "Human-level control through deep reinforcementNEWLINE learning.", Nature 518, no. 7540 (2015): 529-533.NEWLINE Note that this is different than the one from "Playing atari with deepNEWLINE reinforcement learning." arxiv.org/abs/1312.5602 (2013)NEWLINENEWLINE Network is used to both estimate policy (logits) and expected state value;NEWLINE in other words, hidden layers' params are shared between policy and valueNEWLINE networks, see e.g.:NEWLINE github.com/openai/baselines/blob/master/baselines/ppo1/cnn_policy.pyNEWLINE """NEWLINE dtype = jnp.float32NEWLINE x = x.astype(dtype) / 255.NEWLINE x = nn.Conv(features=32, kernel_size=(8, 8), strides=(4, 4), name='conv1',NEWLINE dtype=dtype)(x)NEWLINE x = nn.relu(x)NEWLINE x = nn.Conv(features=64, kernel_size=(4, 4), strides=(2, 2), name='conv2',NEWLINE dtype=dtype)(x)NEWLINE x = nn.relu(x)NEWLINE x = nn.Conv(features=64, kernel_size=(3, 3), strides=(1, 1), name='conv3',NEWLINE dtype=dtype)(x)NEWLINE x = nn.relu(x)NEWLINE x = x.reshape((x.shape[0], -1)) # flattenNEWLINE x = nn.Dense(features=512, name='hidden', dtype=dtype)(x)NEWLINE x = nn.relu(x)NEWLINE logits = nn.Dense(features=self.num_outputs, name='logits', dtype=dtype)(x)NEWLINE policy_log_probabilities = nn.log_softmax(logits)NEWLINE value = nn.Dense(features=1, name='value', dtype=dtype)(x)NEWLINE return policy_log_probabilities, valueNEWLINENEWLINE@functools.partial(jax.jit, static_argnums=1)NEWLINEdef get_initial_params(key: onp.ndarray, module: ActorCritic):NEWLINE input_dims = (1, 84, 84, 4) # (minibatch, height, width, stacked frames)NEWLINE init_shape = jnp.ones(input_dims, jnp.float32)NEWLINE initial_params = module.init(key, init_shape)['params']NEWLINE return initial_paramsNEWLINENEWLINEdef create_optimizer(params, learning_rate: float):NEWLINE optimizer_def = optim.Adam(learning_rate)NEWLINE optimizer = optimizer_def.create(params)NEWLINE return optimizerNEWLINE |
import matplotlib.pyplot as pltNEWLINEfrom sklearn.manifold import TSNENEWLINENEWLINE# Tsne PlotNEWLINEdef tsneplot(embeddings,labels,fig_path):NEWLINE print("********************* tSNE Plot*********************")NEWLINE X = TSNE(n_components=2,perplexity=100,n_iter=1000).fit_transform(embeddings)NEWLINE colors = ['#FF0000', '#06D506', '#0931F7', '#00FFFF', '#FFE500', '#F700FF', '#9300FF', '#FFD700','#10DADE'] # Red , Green, BlueNEWLINE for c in range(len(colors)):NEWLINE points = []NEWLINE for j in range(len(labels)):NEWLINE if (labels[j] == c):NEWLINE points.append(list(X[j]))NEWLINE x = []NEWLINE y = []NEWLINE for z in points:NEWLINE x.append(z[0])NEWLINE y.append(z[1])NEWLINE plt.plot(x, y, 'ro', c=colors[c], markersize=20, marker='.')NEWLINE plt.axis('off')NEWLINE plt.savefig(fig_path)NEWLINE plt.close() |
# proxy moduleNEWLINEfrom traitsui.wx.boolean_editor import *NEWLINE |
import pathlibNEWLINEimport randomNEWLINEimport reNEWLINENEWLINENEWLINEclass Tip:NEWLINE def __init__(self, html=None, ref_url=None, ref_name=None):NEWLINE self.html = htmlNEWLINE self.ref_url = ref_urlNEWLINE self.ref_name = ref_nameNEWLINENEWLINE @staticmethodNEWLINE def parse_meta(meta):NEWLINE meta = meta.split('\n')NEWLINE meta = [kv.split(': ') for kv in meta]NEWLINE meta = {k: v for k, v in meta}NEWLINE return metaNEWLINENEWLINE @classmethodNEWLINE def from_file(cls, path):NEWLINE with open(path, 'r') as f:NEWLINE html = f.read() # .split('\n')NEWLINE try:NEWLINE meta, content = re.split(r'\n-{3,}\n', html, maxsplit=1)NEWLINE except (IndexError, ValueError):NEWLINE return cls('parse error', '', '')NEWLINE meta = cls.parse_meta(meta)NEWLINE return cls(content, **meta)NEWLINENEWLINE def __repr__(self):NEWLINE return self.htmlNEWLINENEWLINE def _repr_html_(self):NEWLINE return self.nice_output()NEWLINENEWLINE def nice_output(self):NEWLINE html = f'''NEWLINE <div class="alert alert-warning" role="alert">NEWLINE {self.html}NEWLINENEWLINE <button type="button" class="close" data-dismiss="alert" aria-label="Close">NEWLINE <span aria-hidden="true">×</span>NEWLINE </button>NEWLINE </div>NEWLINE <p>NEWLINE Source: <a href="{self.ref_url}" target="_blank">{self.ref_name}</a>NEWLINE </p>NEWLINE '''NEWLINE return htmlNEWLINENEWLINENEWLINEdef random_tip():NEWLINE tip_list = pathlib.Path(__file__).parent / 'tip_files'NEWLINE tip_list = list(tip_list.iterdir())NEWLINE tip_file = random.choice(tip_list)NEWLINE tip = Tip.from_file(tip_file)NEWLINE return tipNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE random_tip()NEWLINE |
from django.db import modelsNEWLINEfrom django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \NEWLINE PermissionsMixinNEWLINENEWLINEclass UserManager(BaseUserManager):NEWLINE def create_user(self, email, password=None, **extra_fields):NEWLINE """creates and saves a new user"""NEWLINE if not email:NEWLINE raise ValueError('Users must have an email address')NEWLINE user = self.model(email=self.normalize_email(email), **extra_fields)NEWLINE user.set_password(password)NEWLINE user.save(using=self._db)NEWLINE return userNEWLINENEWLINE def create_superuser(self, email, password):NEWLINE """Creates and saves a new super user"""NEWLINE user = self.create_user(email, password)NEWLINE user.is_staff = TrueNEWLINE user.is_superuser = TrueNEWLINE user.save(using= self._db)NEWLINE return user NEWLINENEWLINENEWLINEclass User(AbstractBaseUser,PermissionsMixin):NEWLINE """custom user model that supports using email insteadof username"""NEWLINE email = models.EmailField(max_length=255, unique=True)NEWLINE name = models.CharField(max_length=255)NEWLINE is_active = models.BooleanField(default=True)NEWLINE is_staff = models.BooleanField(default=False)NEWLINENEWLINE objects = UserManager()NEWLINENEWLINE USERNAME_FIELD = 'email' |
n = int(input())NEWLINEfor j in range(n):NEWLINE word = input()NEWLINE if len(word) <= 10:NEWLINE print(word)NEWLINE else:NEWLINE print(word[0] + str(len(word)-2) + word[-1])NEWLINE |
import torchNEWLINENEWLINE#from tinydfa import DFA, DFALayer, FeedbackPointsHandlingNEWLINEfrom tinydfa.light_dfa import DFA, DFALayerNEWLINENEWLINENEWLINEclass VeryTinyNeRFModel(torch.nn.Module):NEWLINE r"""Define a "very tiny" NeRF model comprising three fully connected layers.NEWLINE """NEWLINENEWLINE def __init__(self, filter_size=128, num_encoding_functions=6, use_viewdirs=True):NEWLINE super(VeryTinyNeRFModel, self).__init__()NEWLINE self.num_encoding_functions = num_encoding_functionsNEWLINE self.xyz_encoding_dims = 3 + 3 * 2 * num_encoding_functionsNEWLINE if use_viewdirs is True:NEWLINE self.viewdir_encoding_dims = 3 + 3 * 2 * num_encoding_functionsNEWLINE else:NEWLINE self.viewdir_encoding_dims = 0NEWLINE # Input layer (default: 65 -> 128)NEWLINE self.layer1 = torch.nn.Linear(NEWLINE self.xyz_encoding_dims + self.viewdir_encoding_dims, filter_sizeNEWLINE )NEWLINE # Layer 2 (default: 128 -> 128)NEWLINE self.layer2 = torch.nn.Linear(filter_size, filter_size)NEWLINE # Layer 3 (default: 128 -> 4)NEWLINE self.layer3 = torch.nn.Linear(filter_size, 4)NEWLINE # Short hand for torch.nn.functional.reluNEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE def forward(self, x):NEWLINE x = self.relu(self.layer1(x))NEWLINE x = self.relu(self.layer2(x))NEWLINE x = self.layer3(x)NEWLINE return xNEWLINENEWLINENEWLINEclass MultiHeadNeRFModel(torch.nn.Module):NEWLINE r"""Define a "multi-head" NeRF model (radiance and RGB colors are predicted byNEWLINE separate heads).NEWLINE """NEWLINENEWLINE def __init__(self, hidden_size=128, num_encoding_functions=6, use_viewdirs=True):NEWLINE super(MultiHeadNeRFModel, self).__init__()NEWLINE self.num_encoding_functions = num_encoding_functionsNEWLINE self.xyz_encoding_dims = 3 + 3 * 2 * num_encoding_functionsNEWLINE if use_viewdirs is True:NEWLINE self.viewdir_encoding_dims = 3 + 3 * 2 * num_encoding_functionsNEWLINE else:NEWLINE self.viewdir_encoding_dims = 0NEWLINE # Input layer (default: 39 -> 128)NEWLINE self.layer1 = torch.nn.Linear(self.xyz_encoding_dims, hidden_size)NEWLINE # Layer 2 (default: 128 -> 128)NEWLINE self.layer2 = torch.nn.Linear(hidden_size, hidden_size)NEWLINE # Layer 3_1 (default: 128 -> 1): Predicts radiance ("sigma")NEWLINE self.layer3_1 = torch.nn.Linear(hidden_size, 1)NEWLINE # Layer 3_2 (default: 128 -> 1): Predicts a feature vector (used for color)NEWLINE self.layer3_2 = torch.nn.Linear(hidden_size, hidden_size)NEWLINENEWLINE # Layer 4 (default: 39 + 128 -> 128)NEWLINE self.layer4 = torch.nn.Linear(NEWLINE self.viewdir_encoding_dims + hidden_size, hidden_sizeNEWLINE )NEWLINE # Layer 5 (default: 128 -> 128)NEWLINE self.layer5 = torch.nn.Linear(hidden_size, hidden_size)NEWLINE # Layer 6 (default: 128 -> 3): Predicts RGB colorNEWLINE self.layer6 = torch.nn.Linear(hidden_size, 3)NEWLINENEWLINE # Short hand for torch.nn.functional.reluNEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE def forward(self, x):NEWLINE x, view = x[..., : self.xyz_encoding_dims], x[..., self.xyz_encoding_dims :]NEWLINE x = self.relu(self.layer1(x))NEWLINE x = self.relu(self.layer2(x))NEWLINE sigma = self.layer3_1(x)NEWLINE feat = self.relu(self.layer3_2(x))NEWLINE x = torch.cat((feat, view), dim=-1)NEWLINE x = self.relu(self.layer4(x))NEWLINE x = self.relu(self.layer5(x))NEWLINE x = self.layer6(x)NEWLINE return torch.cat((x, sigma), dim=-1)NEWLINENEWLINENEWLINEclass ReplicateNeRFModel(torch.nn.Module):NEWLINE r"""NeRF model that follows the figure (from the supp. material of NeRF) toNEWLINE every last detail. (ofc, with some flexibility)NEWLINE """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE hidden_size=256,NEWLINE num_layers=4,NEWLINE num_encoding_fn_xyz=6,NEWLINE num_encoding_fn_dir=4,NEWLINE include_input_xyz=True,NEWLINE include_input_dir=True,NEWLINE ):NEWLINE super(ReplicateNeRFModel, self).__init__()NEWLINE # xyz_encoding_dims = 3 + 3 * 2 * num_encoding_functionsNEWLINENEWLINE self.dim_xyz = (3 if include_input_xyz else 0) + 2 * 3 * num_encoding_fn_xyzNEWLINE self.dim_dir = (3 if include_input_dir else 0) + 2 * 3 * num_encoding_fn_dirNEWLINENEWLINE self.layer1 = torch.nn.Linear(self.dim_xyz, hidden_size)NEWLINE self.layer2 = torch.nn.Linear(hidden_size, hidden_size)NEWLINE self.layer3 = torch.nn.Linear(hidden_size, hidden_size)NEWLINE self.fc_alpha = torch.nn.Linear(hidden_size, 1)NEWLINENEWLINE self.layer4 = torch.nn.Linear(hidden_size + self.dim_dir, hidden_size // 2)NEWLINE self.layer5 = torch.nn.Linear(hidden_size // 2, hidden_size // 2)NEWLINE self.fc_rgb = torch.nn.Linear(hidden_size // 2, 3)NEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE def forward(self, x):NEWLINE xyz, direction = x[..., : self.dim_xyz], x[..., self.dim_xyz :]NEWLINE x_ = self.relu(self.layer1(xyz))NEWLINE x_ = self.relu(self.layer2(x_))NEWLINE feat = self.layer3(x_)NEWLINE alpha = self.fc_alpha(x_)NEWLINE y_ = self.relu(self.layer4(torch.cat((feat, direction), dim=-1)))NEWLINE y_ = self.relu(self.layer5(y_))NEWLINE rgb = self.fc_rgb(y_)NEWLINE return torch.cat((rgb, alpha), dim=-1)NEWLINENEWLINENEWLINEclass PaperNeRFModel(torch.nn.Module):NEWLINE r"""Implements the NeRF model as described in Fig. 7 (appendix) of theNEWLINE arXiv submission (v0). """NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE num_layers=8,NEWLINE hidden_size=256,NEWLINE skip_connect_every=4,NEWLINE num_encoding_fn_xyz=6,NEWLINE num_encoding_fn_dir=4,NEWLINE include_input_xyz=True,NEWLINE include_input_dir=True,NEWLINE use_viewdirs=True,NEWLINE ):NEWLINE super(PaperNeRFModel, self).__init__()NEWLINENEWLINE include_input_xyz = 3 if include_input_xyz else 0NEWLINE include_input_dir = 3 if include_input_dir else 0NEWLINE self.dim_xyz = include_input_xyz + 2 * 3 * num_encoding_fn_xyzNEWLINE self.dim_dir = include_input_dir + 2 * 3 * num_encoding_fn_dirNEWLINENEWLINE self.layers_xyz = torch.nn.ModuleList()NEWLINE self.use_viewdirs = use_viewdirsNEWLINE self.layers_xyz.append(torch.nn.Linear(self.dim_xyz, 256))NEWLINE for i in range(1, 8):NEWLINE if i == 4:NEWLINE self.layers_xyz.append(torch.nn.Linear(self.dim_xyz + 256, 256))NEWLINE else:NEWLINE self.layers_xyz.append(torch.nn.Linear(256, 256))NEWLINE self.fc_feat = torch.nn.Linear(256, 256)NEWLINE self.fc_alpha = torch.nn.Linear(256, 1)NEWLINENEWLINE self.layers_dir = torch.nn.ModuleList()NEWLINE self.layers_dir.append(torch.nn.Linear(256 + self.dim_dir, 128))NEWLINE for i in range(3):NEWLINE self.layers_dir.append(torch.nn.Linear(128, 128))NEWLINE self.fc_rgb = torch.nn.Linear(128, 3)NEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE def forward(self, x):NEWLINE xyz, dirs = x[..., : self.dim_xyz], x[..., self.dim_xyz :]NEWLINE for i in range(8):NEWLINE if i == 4:NEWLINE x = self.layers_xyz[i](torch.cat((xyz, x), -1))NEWLINE else:NEWLINE x = self.layers_xyz[i](x)NEWLINE x = self.relu(x)NEWLINE feat = self.fc_feat(x)NEWLINE alpha = self.fc_alpha(feat)NEWLINE if self.use_viewdirs:NEWLINE x = self.layers_dir[0](torch.cat((feat, dirs), -1))NEWLINE else:NEWLINE x = self.layers_dir[0](feat)NEWLINE x = self.relu(x)NEWLINE for i in range(1, 3):NEWLINE x = self.layers_dir[i](x)NEWLINE x = self.relu(x)NEWLINE rgb = self.fc_rgb(x)NEWLINE return torch.cat((rgb, alpha), dim=-1)NEWLINENEWLINENEWLINEclass FlexibleNeRFModel(torch.nn.Module):NEWLINE def __init__(NEWLINE self,NEWLINE num_layers=4,NEWLINE hidden_size=128,NEWLINE skip_connect_every=4,NEWLINE num_encoding_fn_xyz=6,NEWLINE num_encoding_fn_dir=4,NEWLINE include_input_xyz=True,NEWLINE include_input_dir=True,NEWLINE use_viewdirs=True,NEWLINE ):NEWLINE super(FlexibleNeRFModel, self).__init__()NEWLINENEWLINE include_input_xyz = 3 if include_input_xyz else 0NEWLINE include_input_dir = 3 if include_input_dir else 0NEWLINE self.dim_xyz = include_input_xyz + 2 * 3 * num_encoding_fn_xyzNEWLINE self.dim_dir = include_input_dir + 2 * 3 * num_encoding_fn_dirNEWLINE self.skip_connect_every = skip_connect_everyNEWLINE if not use_viewdirs:NEWLINE self.dim_dir = 0NEWLINENEWLINE self.layer1 = torch.nn.Linear(self.dim_xyz, hidden_size)NEWLINE self.layers_xyz = torch.nn.ModuleList()NEWLINE for i in range(num_layers - 1):NEWLINE if i % self.skip_connect_every == 0 and i > 0 and i != num_layers - 1:NEWLINE self.layers_xyz.append(NEWLINE torch.nn.Linear(self.dim_xyz + hidden_size, hidden_size)NEWLINE )NEWLINE else:NEWLINE self.layers_xyz.append(torch.nn.Linear(hidden_size, hidden_size))NEWLINENEWLINE self.use_viewdirs = use_viewdirsNEWLINE if self.use_viewdirs:NEWLINE self.layers_dir = torch.nn.ModuleList()NEWLINE # This deviates from the original paper, and follows the code release instead.NEWLINE self.layers_dir.append(NEWLINE torch.nn.Linear(self.dim_dir + hidden_size, hidden_size // 2)NEWLINE )NEWLINENEWLINE self.fc_alpha = torch.nn.Linear(hidden_size, 1)NEWLINE self.fc_rgb = torch.nn.Linear(hidden_size // 2, 3)NEWLINE self.fc_feat = torch.nn.Linear(hidden_size, hidden_size)NEWLINE else:NEWLINE self.fc_out = torch.nn.Linear(hidden_size, 4)NEWLINENEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE def forward(self, x):NEWLINE if self.use_viewdirs:NEWLINE xyz, view = x[..., : self.dim_xyz], x[..., self.dim_xyz :]NEWLINE else:NEWLINE xyz = x[..., : self.dim_xyz]NEWLINE x = self.layer1(xyz) # Missing a ReLU (?)NEWLINE for i in range(len(self.layers_xyz)):NEWLINE if (NEWLINE i % self.skip_connect_every == 0NEWLINE and i > 0NEWLINE and i != len(self.linear_layers) - 1NEWLINE ):NEWLINE x = torch.cat((x, xyz), dim=-1)NEWLINE x = self.relu(self.layers_xyz[i](x))NEWLINE if self.use_viewdirs:NEWLINE feat = self.relu(self.fc_feat(x))NEWLINE alpha = self.fc_alpha(x)NEWLINE x = torch.cat((feat, view), dim=-1)NEWLINE for l in self.layers_dir:NEWLINE x = self.relu(l(x))NEWLINE rgb = self.fc_rgb(x)NEWLINE return torch.cat((rgb, alpha), dim=-1)NEWLINE else:NEWLINE return self.fc_out(x)NEWLINENEWLINENEWLINEclass DFAFlexibleNeRFModel(torch.nn.Module):NEWLINE def __init__(self, num_layers=4, hidden_size=128, skip_connect_every=4, num_encoding_fn_xyz=6,NEWLINE num_encoding_fn_dir=4, include_input_xyz=True, include_input_dir=True, use_viewdirs=True,):NEWLINE super(DFAFlexibleNeRFModel, self).__init__()NEWLINENEWLINE # Determine the inputs:NEWLINE include_input_xyz = 3 if include_input_xyz else 0 # Add raw xyz coordinatesNEWLINE include_input_dir = 3 if include_input_dir else 0 # Add raw viewing angle (specularity)NEWLINE self.dim_xyz = include_input_xyz + 2 * 3 * num_encoding_fn_xyz # Total xyz input: raw? + embeddingNEWLINENEWLINE self.use_viewdirs = use_viewdirs # Are we using view direction? (specularity)NEWLINE if not self.use_viewdirs:NEWLINE self.dim_dir = 0NEWLINE else:NEWLINE self.dim_dir = include_input_dir + 2 * 3 * num_encoding_fn_dirNEWLINENEWLINE # Network layersNEWLINE self.layer1 = torch.nn.Linear(self.dim_xyz, hidden_size) # Input layerNEWLINE self.dfa1 = DFALayer(name='dfa1')NEWLINE # First stack of layers, using only xyz coordinates:NEWLINE self.layers_xyz = torch.nn.ModuleList()NEWLINE self.dfa_xyz = torch.nn.ModuleList()NEWLINE self.skip_connect_every = skip_connect_everyNEWLINE for i in range(num_layers - 1):NEWLINE if i % self.skip_connect_every == 0 and i > 0 and i != num_layers - 1:NEWLINE # Handle skip-connection.NEWLINE self.layers_xyz.append(torch.nn.Linear(self.dim_xyz + hidden_size, hidden_size))NEWLINE else:NEWLINE self.layers_xyz.append(torch.nn.Linear(hidden_size, hidden_size))NEWLINE self.dfa_xyz.append(DFALayer(name=f'dfa_xyz{i}'))NEWLINENEWLINENEWLINE if self.use_viewdirs:NEWLINE self.fc_alpha = torch.nn.Linear(hidden_size, 1) # Transparency output at top of xyz stackNEWLINENEWLINE self.fc_feat = torch.nn.Linear(hidden_size, hidden_size) # Link between angle stack and xyz stackNEWLINE self.dfa_feat = DFALayer(name='dfa_feat')NEWLINENEWLINE # Second stack of layers, using viewing angle:NEWLINE self.layers_dir = torch.nn.ModuleList()NEWLINE # This deviates from the original paper, and follows the code release instead.NEWLINE self.layers_dir.append(NEWLINE torch.nn.Linear(self.dim_dir + hidden_size, hidden_size // 2)NEWLINE )NEWLINE self.dfa_dir = DFALayer(name='dfa_dir')NEWLINENEWLINE self.fc_rgb = torch.nn.Linear(hidden_size // 2, 3) # RGB color output, at top of viewing angle stackNEWLINE else:NEWLINE # If not using viewing angle, go straight to (transparency, r, g, b) output:NEWLINE self.fc_out = torch.nn.Linear(hidden_size, 4)NEWLINENEWLINE self.relu = torch.nn.functional.reluNEWLINENEWLINE self.dfa_layers = [self.dfa1, *self.dfa_xyz, self.dfa_feat, self.dfa_dir]NEWLINE self.dfa = DFA(self.dfa_layers) #feedback_points_handling=FeedbackPointsHandling.MINIBATCH)NEWLINENEWLINE def forward(self, x):NEWLINE # Separate the xyz and viewing angle embeddingsNEWLINE if self.use_viewdirs:NEWLINE xyz, view = x[..., :self.dim_xyz], x[..., self.dim_xyz:]NEWLINE else:NEWLINE xyz = x[..., :self.dim_xyz]NEWLINENEWLINE x = self.dfa1(self.relu(self.layer1(xyz))) # Go through first layerNEWLINE # Go through xyz stack:NEWLINE for i in range(len(self.layers_xyz)):NEWLINE if (i % self.skip_connect_every == 0 and i > 0 and i != len(self.linear_layers) - 1):NEWLINE # Handle skip connectionNEWLINE x = torch.cat((x, xyz), dim=-1)NEWLINE x = self.dfa_xyz[i](self.relu(self.layers_xyz[i](x))) # Go through layerNEWLINENEWLINE if self.use_viewdirs:NEWLINE alpha = self.fc_alpha(x) # Output alpha (transparency value)NEWLINE # Prepare for viewing angle stack:NEWLINE feat = self.dfa_feat(self.relu(self.fc_feat(x))) # Link between xyz/viewing angle stackNEWLINE x = torch.cat((feat, view), dim=-1) # Add viewing angle informationNEWLINE for l in self.layers_dir:NEWLINE # Go through viewing angle stack (proper):NEWLINE x = self.dfa_dir(self.relu(l(x)))NEWLINE rgb = self.fc_rgb(x) # Output rgb valueNEWLINE return self.dfa(torch.cat((rgb, alpha), dim=-1))NEWLINE else:NEWLINE return self.dfa(self.fc_out(x))NEWLINE |
"""NEWLINEWSGI config for DjangoECom project.NEWLINENEWLINEIt exposes the WSGI callable as a module-level variable named ``application``.NEWLINENEWLINEFor more information on this file, seeNEWLINEhttps://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/NEWLINE"""NEWLINENEWLINEimport osNEWLINENEWLINEfrom django.core.wsgi import get_wsgi_applicationNEWLINENEWLINEos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'DjangoECom.settings')NEWLINENEWLINEapplication = get_wsgi_application()NEWLINE |
# import the generic views you want, and the models NEWLINE# they apply to.NEWLINEfrom django.views.generic import ListViewNEWLINENEWLINE# Import the models you want to use.NEWLINEfrom snippets.models import SnippetNEWLINENEWLINE# Create a class for your model that subclassesNEWLINE# the generic view you want. This serves as anNEWLINE# index view.NEWLINEclass SnippetListView(ListView):NEWLINE # Finally, tell the generic view what modelNEWLINE # it applies to, and which template to use.NEWLINE model = SnippetNEWLINE template_name = 'snippets/index.html'NEWLINENEWLINE# ==============================================NEWLINENEWLINE# In your urls.py, you'll need to update the NEWLINE# corresponding route. It'll look like this.NEWLINEurls(r'^index/$', views.SnippetListView.as_view())NEWLINE |
# Copyright © 2019 Province of British ColumbiaNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""The unique worker functionality for this service is contained here.NEWLINENEWLINEThe entry-point is the **cb_subscription_handler**NEWLINENEWLINEThe design and flow leverage a few constraints that are placed upon itNEWLINEby NATS Streaming and using AWAIT on the default loop.NEWLINE- NATS streaming queues require one message to be processed at a time.NEWLINE- AWAIT on the default loop effectively runs synchronouslyNEWLINENEWLINEIf these constraints change, the use of Flask-SQLAlchemy would need to change.NEWLINEFlask-SQLAlchemy currently allows the base model to be changed, or reworkingNEWLINEthe model to a standalone SQLAlchemy usage with an async engine would needNEWLINEto be pursued.NEWLINE"""NEWLINEimport asyncioNEWLINEimport datetimeNEWLINEimport jsonNEWLINEimport osNEWLINENEWLINEimport natsNEWLINEfrom flask import FlaskNEWLINEfrom legal_api import dbNEWLINEfrom legal_api.models import FilingNEWLINEfrom sentry_sdk import capture_messageNEWLINEfrom sqlalchemy.exc import OperationalErrorNEWLINEfrom entity_queue_common.messages import create_filing_msgNEWLINEfrom entity_queue_common.service import QueueServiceManagerNEWLINEfrom entity_queue_common.service_utils import FilingException, QueueException, loggerNEWLINENEWLINEfrom entity_pay import configNEWLINENEWLINEqsm = QueueServiceManager() # pylint: disable=invalid-nameNEWLINEAPP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))NEWLINEFLASK_APP = Flask(__name__)NEWLINEFLASK_APP.config.from_object(APP_CONFIG)NEWLINEdb.init_app(FLASK_APP)NEWLINENEWLINENEWLINEdef extract_payment_token(msg: nats.aio.client.Msg) -> dict:NEWLINE """Return a dict of the json string in the Msg.data."""NEWLINE return json.loads(msg.data.decode('utf-8'))NEWLINENEWLINENEWLINEdef get_filing_by_payment_id(payment_id: int) -> Filing:NEWLINE """Return the outcome of Filing.get_filing_by_payment_token."""NEWLINE return Filing.get_filing_by_payment_token(str(payment_id))NEWLINENEWLINENEWLINEasync def publish_filing(filing: Filing):NEWLINE """Publish the filing message onto the NATS filing subject."""NEWLINE payload = create_filing_msg(filing.id)NEWLINE subject = APP_CONFIG.FILER_PUBLISH_OPTIONS['subject']NEWLINENEWLINE await qsm.service.publish(subject, payload)NEWLINENEWLINENEWLINEasync def process_payment(payment_token, flask_app):NEWLINE """Render the payment status."""NEWLINE if not flask_app:NEWLINE raise QueueException('Flask App not available.')NEWLINENEWLINE with flask_app.app_context():NEWLINENEWLINE # try to find the filing 5 times before putting back on the queue - in case payment token ends up on the queueNEWLINE # before it is assigned to filing.NEWLINE counter = 1NEWLINE filing_submission = NoneNEWLINE while not filing_submission and counter <= 5:NEWLINE filing_submission = get_filing_by_payment_id(payment_token['paymentToken'].get('id'))NEWLINE counter += 1NEWLINE if not filing_submission:NEWLINE await asyncio.sleep(0.2)NEWLINE if not filing_submission:NEWLINE raise FilingExceptionNEWLINENEWLINE if filing_submission.status == Filing.Status.COMPLETED.value:NEWLINE # log and skip thisNEWLINE # it shouldn't be an error, but there could be something to investigate ifNEWLINE # multiple retries are happening on something that should have been completed.NEWLINE logger.warning('Queue: Attempting to reprocess business.id=%s, filing.id=%s payment=%s',NEWLINE filing_submission.business_id, filing_submission.id, payment_token)NEWLINE capture_message(f'Queue Issue: Attempting to reprocess business.id={filing_submission.business_id},'NEWLINE 'filing.id={filing_submission.id} payment={payment_token}')NEWLINE returnNEWLINENEWLINE if payment_token['paymentToken'].get('statusCode') == 'TRANSACTION_FAILED':NEWLINE # TODO: The customer has cancelled out of paying, so we could note this betterNEWLINE # technically the filing is still pending payment/processingNEWLINE returnNEWLINENEWLINE if payment_token['paymentToken'].get('statusCode') == Filing.Status.COMPLETED.value:NEWLINE filing_submission.payment_completion_date = datetime.datetime.utcnow()NEWLINE db.session.add(filing_submission)NEWLINE db.session.commit()NEWLINENEWLINE if not filing_submission.effective_date or \NEWLINE filing_submission.effective_date <= datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc):NEWLINE # if we're not a future effective date, then submit for processingNEWLINE try:NEWLINE await publish_filing(filing_submission)NEWLINE except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841;NEWLINE # mark any failure for human reviewNEWLINE capture_message('Queue Error: Failied to place filing:{filing_submission.id} on Queue with error:{err}',NEWLINE level='error')NEWLINENEWLINE returnNEWLINENEWLINE # if we're here and haven't been able to action it,NEWLINE # then we've received an unknown status and should throw an errorNEWLINE logger.error('Unknown payment status given: %s', payment_token['paymentToken'].get('statusCode'))NEWLINE raise QueueExceptionNEWLINENEWLINENEWLINEasync def cb_subscription_handler(msg: nats.aio.client.Msg):NEWLINE """Use Callback to process Queue Msg objects."""NEWLINE try:NEWLINE logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode())NEWLINE payment_token = extract_payment_token(msg)NEWLINE logger.debug('Extracted payment token: %s', payment_token)NEWLINE await process_payment(payment_token, FLASK_APP)NEWLINE except OperationalError as err:NEWLINE logger.error('Queue Blocked - Database Issue: %s', json.dumps(payment_token), exc_info=True)NEWLINE raise err # We don't want to handle the error, as a DB down would drain the queueNEWLINE except FilingException as err:NEWLINE logger.error('Queue Error - cannot find filing: %s'NEWLINE '\n\nThis message has been put back on the queue for reprocessing.',NEWLINE json.dumps(payment_token), exc_info=True)NEWLINE raise err # we don't want to handle the error, so that the message gets put back on the queueNEWLINE except (QueueException, Exception): # pylint: disable=broad-exceptNEWLINE # Catch Exception so that any error is still caught and the message is removed from the queueNEWLINE capture_message('Queue Error:' + json.dumps(payment_token), level='error')NEWLINE logger.error('Queue Error: %s', json.dumps(payment_token), exc_info=True)NEWLINE |
import pyaf.Bench.TS_datasets as tsdsNEWLINEimport tests.artificial.process_artificial_dataset as artNEWLINENEWLINENEWLINENEWLINENEWLINEart.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 12); |
# coding=utf-8NEWLINE# Copyright 2020 The TensorFlow GAN Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Trains a CycleGAN model."""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEfrom absl import appNEWLINEfrom absl import flagsNEWLINENEWLINEimport tensorflow.compat.v1 as tfNEWLINEfrom tensorflow_gan.examples.cyclegan import train_libNEWLINENEWLINEflags.DEFINE_string('image_set_x_file_pattern', None,NEWLINE 'File pattern of images in image set X')NEWLINEflags.DEFINE_string('image_set_y_file_pattern', None,NEWLINE 'File pattern of images in image set Y')NEWLINEflags.DEFINE_integer('batch_size', 1, 'The number of images in each batch.')NEWLINEflags.DEFINE_integer('patch_size', 64, 'The patch size of images.')NEWLINEflags.DEFINE_string('master', '', 'Name of the TensorFlow master to use.')NEWLINEflags.DEFINE_string('train_log_dir', '/tmp/tfgan_logdir/cyclegan/',NEWLINE 'Directory where to write event logs.')NEWLINEflags.DEFINE_float('generator_lr', 0.0002,NEWLINE 'The compression model learning rate.')NEWLINEflags.DEFINE_float('discriminator_lr', 0.0001,NEWLINE 'The discriminator learning rate.')NEWLINEflags.DEFINE_integer('max_number_of_steps', 500000,NEWLINE 'The maximum number of gradient steps.')NEWLINEflags.DEFINE_integer(NEWLINE 'ps_replicas', 0,NEWLINE 'The number of parameter servers. If the value is 0, then the parameters 'NEWLINE 'are handled locally by the worker.')NEWLINEflags.DEFINE_integer(NEWLINE 'task', 0,NEWLINE 'The Task ID. This value is used when training with multiple workers to 'NEWLINE 'identify each worker.')NEWLINEflags.DEFINE_float('cycle_consistency_loss_weight', 10.0,NEWLINE 'The weight of cycle consistency loss')NEWLINENEWLINEFLAGS = flags.FLAGSNEWLINENEWLINENEWLINEdef main(_):NEWLINE hparams = train_lib.HParams(NEWLINE FLAGS.image_set_x_file_pattern, FLAGS.image_set_y_file_pattern,NEWLINE FLAGS.batch_size, FLAGS.patch_size, FLAGS.master, FLAGS.train_log_dir,NEWLINE FLAGS.generator_lr, FLAGS.discriminator_lr, FLAGS.max_number_of_steps,NEWLINE FLAGS.ps_replicas, FLAGS.task, FLAGS.cycle_consistency_loss_weight)NEWLINE train_lib.train(hparams)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE tf.disable_v2_behavior()NEWLINE app.run(main)NEWLINE |
#NEWLINE# Copyright (c) 2016-2021 JEP AUTHORS.NEWLINE#NEWLINE# This file is licensed under the the zlib/libpng License.NEWLINE#NEWLINE# This software is provided 'as-is', without any express or impliedNEWLINE# warranty. In no event will the authors be held liable for anyNEWLINE# damages arising from the use of this software.NEWLINE# NEWLINE# Permission is granted to anyone to use this software for anyNEWLINE# purpose, including commercial applications, and to alter it andNEWLINE# redistribute it freely, subject to the following restrictions:NEWLINE# NEWLINE# 1. The origin of this software must not be misrepresented; youNEWLINE# must not claim that you wrote the original software. If you useNEWLINE# this software in a product, an acknowledgment in the productNEWLINE# documentation would be appreciated but is not required.NEWLINE# NEWLINE# 2. Altered source versions must be plainly marked as such, andNEWLINE# must not be misrepresented as being the original software.NEWLINE# NEWLINE# 3. This notice may not be removed or altered from any sourceNEWLINE# distribution.NEWLINE#NEWLINENEWLINEimport sysNEWLINENEWLINEclass StreamRedirect(object):NEWLINE "Redirects a Python output stream to a Java OutputStream"NEWLINENEWLINE def __init__(self, javaOutputStream):NEWLINE from java.io import PrintStreamNEWLINE self.printstream = PrintStream(javaOutputStream)NEWLINE self.printmethod = getattr(self.printstream, 'print')NEWLINE self.flushmethod = getattr(self.printstream, 'flush')NEWLINENEWLINE def write(self, msg):NEWLINE self.printmethod(msg)NEWLINENEWLINE def flush(self):NEWLINE self.flushmethod()NEWLINENEWLINEdef redirectStdout(javaOutputStream):NEWLINE sys.stdout = StreamRedirect(javaOutputStream)NEWLINENEWLINEdef redirectStderr(javaOutputStream):NEWLINE sys.stderr = StreamRedirect(javaOutputStream)NEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE# **************************************************************************NEWLINE# Copyright © 2016 jianglinNEWLINE# File Name: __init__.pyNEWLINE# Author: jianglinNEWLINE# Email: xiyang0807@gmail.comNEWLINE# Created: 2016-11-25 17:45:36 (CST)NEWLINE# Last Update:星期五 2016-11-25 17:45:36 (CST)NEWLINE# By:NEWLINE# Description:NEWLINE# **************************************************************************NEWLINE |
from typing import ListNEWLINENEWLINEfrom .en import FILTERS as EN_FILTERSNEWLINEfrom .de import FILTERS as DE_FILTERSNEWLINEfrom .fr import FILTERS as FR_FILTERSNEWLINEfrom .es import FILTERS as ES_FILTERSNEWLINEfrom .mx import FILTERS as MX_FILTERSNEWLINEfrom .ru import FILTERS as RU_FILTERSNEWLINEfrom .cn import FILTERS as CN_FILTERSNEWLINEfrom .pt import FILTERS as PT_FILTERSNEWLINEfrom .ko import FILTERS as KO_FILTERSNEWLINENEWLINENEWLINEdef get_filter_list_by_lang(lang: str) -> List[str]:NEWLINE if lang == "en":NEWLINE return EN_FILTERSNEWLINE elif lang == "de":NEWLINE return DE_FILTERSNEWLINE elif lang == "fr":NEWLINE return FR_FILTERSNEWLINE elif lang == "es":NEWLINE return ES_FILTERSNEWLINE elif lang == "mx":NEWLINE return MX_FILTERSNEWLINE elif lang == "ru":NEWLINE return RU_FILTERSNEWLINE elif lang == "cn":NEWLINE return CN_FILTERSNEWLINE elif lang == "pt":NEWLINE return PT_FILTERSNEWLINE elif lang == "ko":NEWLINE return KO_FILTERSNEWLINE else:NEWLINE raise ValueError("Language '{}' not supported".format(lang))NEWLINE |
#ABC089eNEWLINEimport sysNEWLINEinput = sys.stdin.readlineNEWLINEsys.setrecursionlimit(10**6)NEWLINE |
from __future__ import absolute_importNEWLINENEWLINEimport sixNEWLINEimport pytzNEWLINENEWLINEfrom datetime import datetimeNEWLINENEWLINEfrom sentry.coreapi import APIUnauthorizedNEWLINEfrom sentry.mediators import Mediator, ParamNEWLINEfrom sentry.mediators.token_exchange.validator import ValidatorNEWLINEfrom sentry.mediators.token_exchange.util import token_expirationNEWLINEfrom sentry.models import ApiApplication, ApiToken, SentryAppNEWLINEfrom sentry.utils.cache import memoizeNEWLINENEWLINENEWLINEclass Refresher(Mediator):NEWLINE """NEWLINE Exchanges a Refresh Token for a new Access TokenNEWLINE """NEWLINENEWLINE install = Param('sentry.models.SentryAppInstallation')NEWLINE refresh_token = Param(six.string_types)NEWLINE client_id = Param(six.string_types)NEWLINE user = Param('sentry.models.User')NEWLINENEWLINE def call(self):NEWLINE self._validate()NEWLINE self._expire_token()NEWLINENEWLINE return ApiToken.objects.create(NEWLINE user=self.user,NEWLINE application=self.application,NEWLINE scope_list=self.sentry_app.scope_list,NEWLINE expires_at=token_expiration(),NEWLINE )NEWLINENEWLINE def _validate(self):NEWLINE Validator.run(NEWLINE install=self.install,NEWLINE client_id=self.client_id,NEWLINE user=self.user,NEWLINE )NEWLINENEWLINE self._validate_token_belongs_to_app()NEWLINE self._validate_token_is_active()NEWLINENEWLINE def _validate_token_belongs_to_app(self):NEWLINE if self.token.application != self.application:NEWLINE raise APIUnauthorizedNEWLINENEWLINE def _validate_token_is_active(self):NEWLINE if self.token.expires_at < datetime.utcnow().replace(tzinfo=pytz.UTC):NEWLINE raise APIUnauthorizedNEWLINENEWLINE def _expire_token(self):NEWLINE self.token.update(expires_at=datetime.utcnow())NEWLINENEWLINE @memoizeNEWLINE def token(self):NEWLINE try:NEWLINE return ApiToken.objects.get(refresh_token=self.refresh_token)NEWLINE except ApiToken.DoesNotExist:NEWLINE raise APIUnauthorizedNEWLINENEWLINE @memoizeNEWLINE def application(self):NEWLINE try:NEWLINE return ApiApplication.objects.get(client_id=self.client_id)NEWLINE except ApiApplication.DoesNotExist:NEWLINE raise APIUnauthorizedNEWLINENEWLINE @propertyNEWLINE def sentry_app(self):NEWLINE try:NEWLINE return self.application.sentry_appNEWLINE except SentryApp.DoesNotExist:NEWLINE raise APIUnauthorizedNEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Copyright 2020 Google Inc.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE# Make sure that your AWS credentials are configured correclty, seeNEWLINE# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html #pylint: disable=line-too-longNEWLINE"""Demo CLI tool for AWS."""NEWLINENEWLINEfrom datetime import datetimeNEWLINEfrom typing import TYPE_CHECKINGNEWLINENEWLINEfrom libcloudforensics.providers.aws.internal import accountNEWLINEfrom libcloudforensics.providers.aws.internal import log as aws_logNEWLINEfrom libcloudforensics.providers.aws import forensicsNEWLINENEWLINEif TYPE_CHECKING:NEWLINE import argparseNEWLINENEWLINENEWLINEdef ListInstances(args: 'argparse.Namespace') -> None:NEWLINE """List EC2 instances in AWS account.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINENEWLINE aws_account = account.AWSAccount(args.zone)NEWLINE instances = aws_account.ListInstances()NEWLINENEWLINE print('Instances found:')NEWLINE for instance in instances:NEWLINE boot_volume = instances[instance].GetBootVolume().volume_idNEWLINE print('Name: {0:s}, Boot volume: {1:s}'.format(instance, boot_volume))NEWLINENEWLINENEWLINEdef ListVolumes(args: 'argparse.Namespace') -> None:NEWLINE """List EBS volumes in AWS account.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINENEWLINE aws_account = account.AWSAccount(args.zone)NEWLINE volumes = aws_account.ListVolumes()NEWLINENEWLINE print('Volumes found:')NEWLINE for volume in volumes:NEWLINE print('Name: {0:s}, Zone: {1:s}'.format(NEWLINE volume, volumes[volume].availability_zone))NEWLINENEWLINENEWLINEdef CreateVolumeCopy(args: 'argparse.Namespace') -> None:NEWLINE """Create a AWS Volume copy.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINE print('Starting volume copy...')NEWLINE volume_copy = forensics.CreateVolumeCopy(args.zone,NEWLINE dst_zone=args.dst_zone,NEWLINE instance_id=args.instance_id,NEWLINE volume_id=args.volume_id,NEWLINE src_profile=args.src_profile,NEWLINE dst_profile=args.dst_profile)NEWLINE print(NEWLINE 'Done! Volume {0:s} successfully created. You will find it in 'NEWLINE 'your AWS account under the name {1:s}.'.format(NEWLINE volume_copy.volume_id, volume_copy.name))NEWLINENEWLINENEWLINEdef QueryLogs(args: 'argparse.Namespace') -> None:NEWLINE """Query AWS CloudTrail log events.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINE ct = aws_log.AWSCloudTrail(account.AWSAccount(args.zone))NEWLINENEWLINE params = {}NEWLINE if args.filter:NEWLINE params['qfilter'] = args.filterNEWLINE if args.start:NEWLINE params['starttime'] = datetime.strptime(args.start, '%Y-%m-%d %H:%M:%S')NEWLINE if args.end:NEWLINE params['endtime'] = datetime.strptime(args.end, '%Y-%m-%d %H:%M:%S')NEWLINENEWLINE result = ct.LookupEvents(**params)NEWLINENEWLINE if result:NEWLINE print('Log events found: {0:d}'.format(len(result)))NEWLINE for event in result:NEWLINE print(event)NEWLINE |
# coding=utf-8NEWLINEimport shelveNEWLINENEWLINENEWLINEdef store_person(db): # 存储用户输入数据到shelf对象中NEWLINE pid = input('Enter unique ID number: ')NEWLINE person = {}NEWLINE person['name'] = input('Enter name: ')NEWLINE person['age'] = input('Enter age: ')NEWLINE person['phone'] = input('Enter phone number: ')NEWLINE db[pid] = personNEWLINENEWLINENEWLINEdef lookup_person(db): # 从shelf对象中查询数据NEWLINE pid = input('Enter ID number: ')NEWLINE field = input('What would you like to know? (name, age, phone) ')NEWLINE field = field.strip().lower() # 忽略大小写NEWLINE try:NEWLINE print(field.capitalize() + ':', db[pid][field])NEWLINE except KeyError:NEWLINE print("No record.")NEWLINENEWLINENEWLINEdef print_help(): # 帮助信息NEWLINE print('The available commands are:')NEWLINE print('store : Stores information about a person')NEWLINE print('lookup : Looks up a person from ID number')NEWLINE print('quit : Save changes and exit')NEWLINE print('? : Prints this message')NEWLINENEWLINENEWLINEdef enter_command(): # 获取用户输入的命令NEWLINE cmd = input('Enter command (? for help): ')NEWLINE cmd = cmd.strip().lower() # 忽略大小写NEWLINE return cmdNEWLINENEWLINENEWLINEdef main():NEWLINE database = shelve.open('Chapter18_shelve_db') # 打开一个数据库文件NEWLINE try:NEWLINE while True:NEWLINE cmd = enter_command()NEWLINE if cmd == 'store':NEWLINE store_person(database)NEWLINE elif cmd == 'lookup':NEWLINE lookup_person(database)NEWLINE elif cmd == '?':NEWLINE print_help()NEWLINE elif cmd == 'quit':NEWLINE returnNEWLINE finally:NEWLINE database.close() # 关闭数据库NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINENEWLINE# ### 标准库shelve模块NEWLINE# - Python object persistenceNEWLINE# - 官方文档:https://docs.python.org/3/library/shelve.htmlNEWLINE# - shelve模块可以简单地将数据存储在文件中;NEWLINE#NEWLINE# ### 文件的打开与关闭NEWLINE# shelve.open()函数将一个文件名作为参数,并返回一个可用来存储数据的shelf对象,其类似与所有键都为字符串的字典;NEWLINE# 如果shelve.open()函数的参数writeback设置为True,那么操作过程中的数据结构都将保存在内存中,直到关闭shelf对象时才写入磁盘;NEWLINE# 如果处理的数据量不多,建议将参数writeback设置为True;NEWLINE# 操作完成后,可以调用shelf对象的close()方法关闭;NEWLINE#NEWLINE# ### 特别注意NEWLINE# 修改使用模块shelve存储的对象,必须将获取的副本赋值给一个临时变量,并在这个副本后再次存储;NEWLINE |
"""NEWLINE Copyright (c) 2019-2020 Intel CorporationNEWLINE Licensed under the Apache License, Version 2.0 (the "License");NEWLINE you may not use this file except in compliance with the License.NEWLINE You may obtain a copy of the License atNEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINE Unless required by applicable law or agreed to in writing, softwareNEWLINE distributed under the License is distributed on an "AS IS" BASIS,NEWLINE WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE See the License for the specific language governing permissions andNEWLINE limitations under the License.NEWLINE"""NEWLINEimport itertoolsNEWLINEfrom collections import CounterNEWLINEfrom pathlib import PathNEWLINEfrom typing import ListNEWLINENEWLINEimport networkx as nxNEWLINEimport pytestNEWLINEimport torchNEWLINEfrom copy import deepcopyNEWLINEfrom torch import nnNEWLINENEWLINEfrom nncf import register_moduleNEWLINEfrom nncf.dynamic_graph.context import ScopeNEWLINEfrom nncf.dynamic_graph.graph import InputAgnosticOperationExecutionContext, NNCFGraph, OperationExecutionContextNEWLINEfrom nncf.dynamic_graph.graph_builder import ModelInputInfoNEWLINEfrom nncf.dynamic_graph.operator_metatypes import NoopMetatypeNEWLINEfrom nncf.dynamic_graph.patch_pytorch import MODEL_INPUT_OP_NAMENEWLINEfrom nncf.dynamic_graph.version_agnostic_op_names import VersionAgnosticNamesNEWLINEfrom nncf.layer_utils import _NNCFModuleMixinNEWLINEfrom nncf.module_operations import BaseOpNEWLINEfrom nncf.nncf_network import NNCFNetwork, InsertionCommand, InsertionPoint, InsertionType, OperationPriority, \NEWLINE InsertionPointGraph, InsertionPointGraphNodeTypeNEWLINEfrom tests.conftest import TEST_ROOTNEWLINEfrom tests.helpers import TwoConvTestModel, BasicConvTestModel, check_correct_nncf_modules_replacementNEWLINENEWLINENEWLINEdef test_disable_shape_matching():NEWLINE class MatMulModel(nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.dummy_param = torch.nn.Parameter(torch.ones([1]))NEWLINENEWLINE def forward(self, inputs):NEWLINE half1, half2 = torch.chunk(inputs, 2, dim=2)NEWLINE return torch.bmm(half1, half2.transpose(1, 2))NEWLINENEWLINE model = MatMulModel()NEWLINENEWLINE input_shape_1 = (3, 32, 32)NEWLINE input_shape_2 = (4, 64, 64)NEWLINENEWLINE qnet_no_shape = NNCFNetwork(deepcopy(model), input_infos=[ModelInputInfo(input_shape_1), ],NEWLINE scopes_without_shape_matching=['MatMulModel']) # type: NNCFNetworkNEWLINE _ = qnet_no_shape(torch.zeros(*input_shape_1))NEWLINE graph_1 = deepcopy(qnet_no_shape.get_graph())NEWLINENEWLINE _ = qnet_no_shape(torch.zeros(*input_shape_2))NEWLINE graph_2 = deepcopy(qnet_no_shape.get_graph())NEWLINENEWLINE keys_1 = list(graph_1.get_all_node_keys())NEWLINE keys_2 = list(graph_2.get_all_node_keys())NEWLINE assert len(keys_1) == 2 # 1 input node + 1 operation nodeNEWLINE assert keys_1 == keys_2NEWLINENEWLINENEWLINE qnet = NNCFNetwork(model, input_infos=[ModelInputInfo(input_shape_1), ]) # type: NNCFNetworkNEWLINE _ = qnet(torch.zeros(*input_shape_1))NEWLINE _ = qnet(torch.zeros(*input_shape_2))NEWLINE # The second forward run should have led to an increase in registered node countsNEWLINE # since disable_shape_matching was False and the network was run with a differentNEWLINE # shape of input tensorNEWLINE assert qnet.get_graph().get_nodes_count() > graph_1.get_nodes_count()NEWLINENEWLINENEWLINEdef test_check_correct_modules_replacement():NEWLINE model = TwoConvTestModel()NEWLINE nncf_model = NNCFNetwork(TwoConvTestModel(), input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINENEWLINE _, nncf_modules = check_correct_nncf_modules_replacement(model, nncf_model)NEWLINE assert set(nncf_modules) == set(nncf_model.get_nncf_modules())NEWLINENEWLINENEWLINE@register_moduleNEWLINEclass ModuleOfUser(torch.nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.weight = torch.nn.Parameter(torch.ones([1]))NEWLINENEWLINE def forward(self, input_):NEWLINE return input_ * self.weightNEWLINENEWLINENEWLINEclass TwoConvTestModelWithUserModule(TwoConvTestModel):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.user_module = ModuleOfUser()NEWLINENEWLINE def forward(self, x):NEWLINE x = super().forward(x)NEWLINE x = self.user_module(x)NEWLINE return xNEWLINENEWLINENEWLINEdef test_custom_module_registering():NEWLINE model = TwoConvTestModelWithUserModule()NEWLINE nncf_model = NNCFNetwork(model, input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINENEWLINE from nncf.layers import UNWRAPPED_USER_MODULESNEWLINE assert ModuleOfUser in UNWRAPPED_USER_MODULES.registry_dict.values()NEWLINENEWLINE # pylint: disable=protected-accessNEWLINE assert isinstance(nncf_model.user_module, ModuleOfUser)NEWLINE assert isinstance(nncf_model.user_module, _NNCFModuleMixin)NEWLINE assert type(nncf_model.user_module).__name__ == "NNCFUserModuleOfUser"NEWLINENEWLINE user_module_attrs = dir(nncf_model.user_module)NEWLINE for attr in dir(_NNCFModuleMixin):NEWLINE assert attr in user_module_attrsNEWLINENEWLINENEWLINE# pylint: disable=protected-accessNEWLINEdef test_find_node_in_nx_graph_by_scope():NEWLINE model = TwoConvTestModel()NEWLINE nncf_model = NNCFNetwork(deepcopy(model), input_infos=[ModelInputInfo([1, 1, 4, 4])]) # type: NNCFNetworkNEWLINE nncf_graph = nncf_model.get_original_graph()NEWLINENEWLINE # Valid scopes should be successfully foundNEWLINE valid_nncf_modules = nncf_model.get_nncf_modules()NEWLINE nodes_list = list(nncf_graph._nx_graph.nodes)NEWLINE for module_scope, _ in valid_nncf_modules.items():NEWLINE graph_node = nncf_graph.find_node_in_nx_graph_by_scope(module_scope)NEWLINE assert graph_node is not NoneNEWLINE assert isinstance(graph_node, dict)NEWLINE assert graph_node['key'] in nodes_listNEWLINENEWLINE fake_model = BasicConvTestModel()NEWLINE fake_nncf_model = NNCFNetwork(deepcopy(fake_model), input_infos=[ModelInputInfo([1, 1, 4, 4])])NEWLINENEWLINE # Not valid scopes shouldn't be foundNEWLINE fake_nncf_modules = fake_nncf_model.get_nncf_modules()NEWLINE for module_scope, _ in fake_nncf_modules.items():NEWLINE graph_node = nncf_graph.find_node_in_nx_graph_by_scope(module_scope)NEWLINE assert graph_node is NoneNEWLINENEWLINENEWLINEclass InsertionPointTestModel(nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.conv1 = nn.Conv2d(1, 1, 1, 1)NEWLINE self.linear_wts = nn.Parameter(torch.FloatTensor(size=(100, 100)))NEWLINE self.conv2 = nn.Conv2d(1, 1, 1, 1)NEWLINE self.relu = nn.ReLU()NEWLINENEWLINE def forward(self, input_):NEWLINE x = self.conv1(input_)NEWLINE x = x.flatten()NEWLINE x = nn.functional.linear(x, self.linear_wts)NEWLINE x = x.reshape((1, 1, 10, 10))NEWLINE x = self.conv2(x)NEWLINE x = self.relu(x)NEWLINE return xNEWLINENEWLINENEWLINEclass TestInsertionCommands:NEWLINE @pytest.fixture()NEWLINE def setup(self):NEWLINE self.compressed_model = NNCFNetwork(InsertionPointTestModel(),NEWLINE [ModelInputInfo([1, 1, 10, 10])]) # type: NNCFNetworkNEWLINENEWLINE conv1_module_scope = Scope.from_str('InsertionPointTestModel/NNCFConv2d[conv1]')NEWLINE conv1_module_context = InputAgnosticOperationExecutionContext('', conv1_module_scope, 0)NEWLINE point_for_conv1_weights = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv1_inputs = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv1_activations = InsertionPoint(ia_op_exec_context=conv1_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_POST_OP)NEWLINENEWLINE conv2_module_scope = Scope.from_str('InsertionPointTestModel/NNCFConv2d[conv2]')NEWLINE conv2_module_context = InputAgnosticOperationExecutionContext('', conv2_module_scope, 0)NEWLINE point_for_conv2_weights = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv2_inputs = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_PRE_OP)NEWLINE point_for_conv2_activations = InsertionPoint(ia_op_exec_context=conv2_module_context,NEWLINE insertion_type=InsertionType.NNCF_MODULE_POST_OP)NEWLINENEWLINE linear_op_scope = Scope.from_str('InsertionPointTestModel/linear_0')NEWLINE linear_op_context = InputAgnosticOperationExecutionContext('linear',NEWLINE linear_op_scope,NEWLINE 0)NEWLINE point_for_linear_weight_input = InsertionPoint(ia_op_exec_context=linear_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_PRE_HOOK)NEWLINE point_for_linear_activation = InsertionPoint(ia_op_exec_context=linear_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_POST_HOOK)NEWLINENEWLINE relu_op_scope = Scope.from_str('InsertionPointTestModel/ReLU[relu]/relu')NEWLINE relu_op_context = InputAgnosticOperationExecutionContext('relu',NEWLINE relu_op_scope,NEWLINE 0)NEWLINE point_for_relu_inputs = InsertionPoint(ia_op_exec_context=relu_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_PRE_HOOK)NEWLINE point_for_relu_activations = InsertionPoint(ia_op_exec_context=relu_op_context,NEWLINE insertion_type=InsertionType.OPERATOR_POST_HOOK)NEWLINENEWLINE available_points = [point_for_conv1_weights,NEWLINE point_for_conv2_weights,NEWLINE point_for_conv1_inputs,NEWLINE point_for_conv2_inputs,NEWLINE point_for_conv1_activations,NEWLINE point_for_conv2_activations,NEWLINE point_for_linear_activation,NEWLINE point_for_linear_weight_input,NEWLINE point_for_relu_activations,NEWLINE point_for_relu_inputs]NEWLINENEWLINE @pytest.mark.parametrize("insertion_point", available_points)NEWLINE def test_single_insertions(self, setup, insertion_point):NEWLINE if insertion_point.insertion_type in [InsertionType.OPERATOR_PRE_HOOK, InsertionType.OPERATOR_POST_HOOK]:NEWLINE hook = lambda x: xNEWLINE else:NEWLINE hook = BaseOp(lambda x: x)NEWLINENEWLINE command = InsertionCommand(insertion_point, hook)NEWLINE self.compressed_model.register_insertion_command(command)NEWLINE self.compressed_model.commit_compression_changes()NEWLINENEWLINE #pylint:disable=protected-accessNEWLINE if insertion_point.insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE assert ctx._pre_hooks[command.insertion_point.ia_op_exec_context][0] is hookNEWLINE if insertion_point.insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE assert ctx._post_hooks[command.insertion_point.ia_op_exec_context][0] is hookNEWLINE if insertion_point.insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE module = self.compressed_model.get_module_by_scope(NEWLINE command.insertion_point.ia_op_exec_context.scope_in_model)NEWLINE assert module.pre_ops["0"] is hookNEWLINENEWLINE if insertion_point.insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE module = self.compressed_model.get_module_by_scope(NEWLINE command.insertion_point.ia_op_exec_context.scope_in_model)NEWLINE assert module.post_ops["0"] is hookNEWLINENEWLINE priority_types = ["same", "different"]NEWLINE insertion_types = InsertionTypeNEWLINE priority_test_cases = list(itertools.product(priority_types, insertion_types))NEWLINENEWLINE @staticmethodNEWLINE def check_order(iterable1: List, iterable2: List, ordering: List):NEWLINE for idx, order in enumerate(ordering):NEWLINE assert iterable1[idx] is iterable2[order]NEWLINENEWLINE # pylint:disable=undefined-variableNEWLINE @pytest.mark.parametrize("case", priority_test_cases, ids=[x[1].name + '-' + x[0] for x in priority_test_cases])NEWLINE def test_priority(self, case, setup):NEWLINE #pylint:disable=too-many-branchesNEWLINE priority_type = case[0]NEWLINE insertion_type = case[1]NEWLINE if insertion_type in [InsertionType.NNCF_MODULE_PRE_OP, InsertionType.NNCF_MODULE_POST_OP]:NEWLINE hook1 = BaseOp(lambda x: x)NEWLINE hook2 = BaseOp(lambda x: 2 * x)NEWLINE hook3 = BaseOp(lambda x: 3 * x)NEWLINE else:NEWLINE hook1 = lambda x: xNEWLINE hook2 = lambda x: 2 * xNEWLINE hook3 = lambda x: 3 * xNEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE point = self.point_for_conv2_weightsNEWLINE elif insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE point = self.point_for_conv1_activationsNEWLINE elif insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE point = self.point_for_linear_weight_inputNEWLINE elif insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE point = self.point_for_relu_activationsNEWLINENEWLINE if priority_type == "same":NEWLINE # Same-priority commands will be executed in registration orderNEWLINE command1 = InsertionCommand(point, hook1, OperationPriority.DEFAULT_PRIORITY)NEWLINE command2 = InsertionCommand(point, hook2, OperationPriority.DEFAULT_PRIORITY)NEWLINE command3 = InsertionCommand(point, hook3, OperationPriority.DEFAULT_PRIORITY)NEWLINE else:NEWLINE # Prioritized commands will be executed in ascending priority orderNEWLINE command1 = InsertionCommand(point, hook1, OperationPriority.SPARSIFICATION_PRIORITY)NEWLINE command2 = InsertionCommand(point, hook2, OperationPriority.QUANTIZATION_PRIORITY)NEWLINE command3 = InsertionCommand(point, hook3, OperationPriority.DEFAULT_PRIORITY)NEWLINENEWLINE self.compressed_model.register_insertion_command(command1)NEWLINE self.compressed_model.register_insertion_command(command2)NEWLINE self.compressed_model.register_insertion_command(command3)NEWLINE self.compressed_model.commit_compression_changes()NEWLINENEWLINE hook_list = [hook1, hook2, hook3]NEWLINENEWLINE if priority_type == "same":NEWLINE order = [0, 1, 2]NEWLINE elif priority_type == "different":NEWLINE order = [2, 0, 1]NEWLINENEWLINE #pylint:disable=protected-accessNEWLINE if insertion_type == InsertionType.OPERATOR_PRE_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE self.check_order(ctx._pre_hooks[point.ia_op_exec_context], hook_list, order)NEWLINE if insertion_type == InsertionType.OPERATOR_POST_HOOK:NEWLINE ctx = self.compressed_model.get_tracing_context()NEWLINE self.check_order(ctx._post_hooks[point.ia_op_exec_context], hook_list, order)NEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_PRE_OP:NEWLINE module = self.compressed_model.get_module_by_scope(point.ia_op_exec_context.scope_in_model)NEWLINE # Works because Pytorch ModuleDict is orderedNEWLINE self.check_order(list(module.pre_ops.values()), hook_list, order)NEWLINENEWLINE if insertion_type == InsertionType.NNCF_MODULE_POST_OP:NEWLINE module = self.compressed_model.get_module_by_scope(point.ia_op_exec_context.scope_in_model)NEWLINE # Works because Pytorch ModuleDict is orderedNEWLINE self.check_order(list(module.post_ops.values()), hook_list, order)NEWLINENEWLINENEWLINEdef get_two_branch_mock_model_graph() -> nx.DiGraph:NEWLINE mock_node_attrs = get_mock_nncf_node_attrs()NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (B)NEWLINE # / \NEWLINE # (C) (D)NEWLINE # | |NEWLINE # (E) |NEWLINE # \ /NEWLINE # (F)NEWLINE # |NEWLINE # (G)NEWLINE # |NEWLINE # (H)NEWLINENEWLINE node_keys = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **mock_node_attrs)NEWLINENEWLINE mock_graph.add_edges_from([('A', 'B'), ('B', 'C'), ('B', 'D'), ('C', 'E'), ('E', 'F'),NEWLINE ('D', 'F'), ('F', 'G'), ('G', 'H')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEMOCK_OPERATOR_NAME = "conv_transpose2d"NEWLINENEWLINENEWLINEdef get_mock_nncf_node_attrs(op_name=None):NEWLINE op_name_to_set = op_name if op_name is not None else MOCK_OPERATOR_NAMENEWLINE return {NEWLINE NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR: OperationExecutionContext(op_name_to_set,NEWLINE Scope(),NEWLINE 0,NEWLINE [None])NEWLINE }NEWLINENEWLINENEWLINEdef get_mock_model_graph_with_mergeable_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)NEWLINE # |NEWLINE # (batch_norm)NEWLINE # |NEWLINE # (RELU)NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'batch_norm'),NEWLINE ('batch_norm', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEdef get_mock_model_graph_with_no_mergeable_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)NEWLINE # |NEWLINE # (C)NEWLINE # |NEWLINE # (batch_norm)NEWLINE # |NEWLINE # (D)NEWLINE # |NEWLINE # (RELU)NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B', 'C', 'D']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'C'),NEWLINE ('C', 'batch_norm'),NEWLINE ('batch_norm', 'D'),NEWLINE ('D', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEdef get_mock_model_graph_with_broken_output_edge_pattern() -> nx.DiGraph:NEWLINE mock_graph = nx.DiGraph()NEWLINENEWLINE # (A)NEWLINE # |NEWLINE # (conv2d)----\NEWLINE # | |NEWLINE # (batch_norm) |NEWLINE # | |NEWLINE # (RELU) |NEWLINE # | |NEWLINE # (C)--------/NEWLINE # |NEWLINE # (B)NEWLINENEWLINE node_keys = ['conv2d', 'batch_norm', VersionAgnosticNames.RELU, 'A', 'B', 'C']NEWLINE for node_key in node_keys:NEWLINE mock_graph.add_node(node_key, **get_mock_nncf_node_attrs(op_name=node_key))NEWLINENEWLINE mock_graph.add_edges_from([('A', 'conv2d'), ('conv2d', 'batch_norm'),NEWLINE ('conv2d', 'C'),NEWLINE ('batch_norm', VersionAgnosticNames.RELU),NEWLINE (VersionAgnosticNames.RELU, 'C'),NEWLINE ('C', 'B')])NEWLINE return mock_graphNEWLINENEWLINENEWLINEMERGE_PATTERN_TEST_CASES = (NEWLINE [get_mock_model_graph_with_mergeable_pattern, "basic_pattern"],NEWLINE [get_mock_model_graph_with_no_mergeable_pattern, "no_pattern"],NEWLINE [get_mock_model_graph_with_broken_output_edge_pattern, "broken_output_edges_pattern"]NEWLINE)NEWLINENEWLINENEWLINEclass TestInsertionPointGraph:NEWLINE def test_insertion_point_setup(self):NEWLINE # TODO: Change testing premises when module pre/post-op hooks and input/output nodesNEWLINE # are correctly handledNEWLINE mock_graph = get_two_branch_mock_model_graph()NEWLINENEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINENEWLINE ref_node_len = 3 * len(mock_graph.nodes) # 2 additional nodes per each operator nodeNEWLINE ref_edge_len = 3 * len(mock_graph.edges)NEWLINENEWLINE assert len(ip_graph.nodes) == ref_node_lenNEWLINE assert len(ip_graph.edges) == ref_edge_lenNEWLINENEWLINE for node_key, node in mock_graph.nodes.items():NEWLINE ip_graph_op_node = ip_graph.nodes[node_key]NEWLINE assert ip_graph_op_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATORNEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE assert len(preds) == 1NEWLINE assert len(succs) == 1NEWLINE pre_hook_ip_node_key = preds[0]NEWLINE post_hook_ip_node_key = succs[0]NEWLINE pre_hook_ip_node = ip_graph.nodes[preds[0]]NEWLINE post_hook_ip_node = ip_graph.nodes[succs[0]]NEWLINE pre_hook_ip_node_type = pre_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE post_hook_ip_node_type = post_hook_ip_node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE assert pre_hook_ip_node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINE assert post_hook_ip_node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINE ref_associated_ip_node_keys_set = {pre_hook_ip_node_key, post_hook_ip_node_key}NEWLINE assert ref_associated_ip_node_keys_set == ip_graph_op_node[NEWLINE InsertionPointGraph.ASSOCIATED_IP_NODE_KEYS_NODE_ATTR]NEWLINE original_neighbours = mock_graph.neighbors(node_key)NEWLINE for neighbour in original_neighbours:NEWLINE # IP node insertion should not disrupt the graph superstructureNEWLINE ip_graph_paths = list(nx.all_simple_paths(ip_graph, node_key, neighbour))NEWLINE for path in ip_graph_paths:NEWLINE path = path[1:-1]NEWLINE for path_node_key in path:NEWLINE node = ip_graph.nodes[path_node_key]NEWLINE node_type = node[InsertionPointGraph.NODE_TYPE_NODE_ATTR]NEWLINE assert node_type == InsertionPointGraphNodeType.INSERTION_POINTNEWLINENEWLINE for node_key, node in ip_graph.nodes.items():NEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE assert len(preds) != 0 or len(succs) != 0NEWLINENEWLINE for from_node_key, to_node_key in ip_graph.edges.keys():NEWLINE assert from_node_key in ip_graph.nodesNEWLINE assert to_node_key in ip_graph.nodesNEWLINENEWLINE def test_insertion_point_data_in_ip_nodes(self):NEWLINE # TODO: extend for modulesNEWLINE mock_graph = nx.DiGraph()NEWLINE ref_op_exec_context = OperationExecutionContext("baz",NEWLINE Scope.from_str("Test/Scope[foo]/bar"),NEWLINE 0,NEWLINE [None])NEWLINE node_attrs = {NEWLINE NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR: ref_op_exec_contextNEWLINE }NEWLINENEWLINE node_key = 0NEWLINE mock_graph.add_node(node_key, **node_attrs)NEWLINENEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINENEWLINE for node_key in mock_graph.nodes.keys():NEWLINE preds = list(ip_graph.predecessors(node_key))NEWLINE succs = list(ip_graph.successors(node_key))NEWLINE pre_hook_ip_node = ip_graph.nodes[preds[0]]NEWLINE post_hook_ip_node = ip_graph.nodes[succs[0]]NEWLINENEWLINE pre_hook_ip = pre_hook_ip_node[InsertionPointGraph.INSERTION_POINT_DATA_NODE_ATTR]NEWLINE post_hook_ip = post_hook_ip_node[InsertionPointGraph.INSERTION_POINT_DATA_NODE_ATTR]NEWLINE assert pre_hook_ip.insertion_type == InsertionType.OPERATOR_PRE_HOOKNEWLINE assert post_hook_ip.insertion_type == InsertionType.OPERATOR_POST_HOOKNEWLINENEWLINE assert pre_hook_ip.ia_op_exec_context == ref_op_exec_context.input_agnosticNEWLINE assert post_hook_ip.ia_op_exec_context == ref_op_exec_context.input_agnosticNEWLINENEWLINE def test_operator_metatype_marking(self):NEWLINE from nncf.dynamic_graph.operator_metatypes import Conv2dMetatype, BatchNormMetatype, RELUMetatype, \NEWLINE MaxPool2dMetatype, \NEWLINE ConvTranspose2dMetatype, DepthwiseConv2dSubtype, AddMetatype, AvgPool2dMetatype, LinearMetatypeNEWLINE ref_scope_vs_metatype_dict = {NEWLINE "/" + MODEL_INPUT_OP_NAME + "_0": NoopMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConv2d[conv_regular]/conv2d_0": Conv2dMetatype,NEWLINE "ModelForMetatypeTesting/BatchNorm2d[bn]/batch_norm_0": BatchNormMetatype,NEWLINE "ModelForMetatypeTesting/RELU_0": RELUMetatype,NEWLINE "ModelForMetatypeTesting/MaxPool2d[max_pool2d]/max_pool2d_0": MaxPool2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConvTranspose2d[conv_transpose]/conv_transpose2d_0": ConvTranspose2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFConv2d[conv_depthwise]/conv2d_0": DepthwiseConv2dSubtype,NEWLINE "ModelForMetatypeTesting/__iadd___0": AddMetatype,NEWLINE "ModelForMetatypeTesting/AdaptiveAvgPool2d[adaptive_avg_pool]/adaptive_avg_pool2d_0": AvgPool2dMetatype,NEWLINE "ModelForMetatypeTesting/NNCFLinear[linear]/linear_0": LinearMetatypeNEWLINE }NEWLINE class ModelForMetatypeTesting(torch.nn.Module):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.conv_regular = torch.nn.Conv2d(in_channels=3,NEWLINE out_channels=16,NEWLINE kernel_size=3)NEWLINE self.bn = torch.nn.BatchNorm2d(num_features=16)NEWLINE self.max_pool2d = torch.nn.MaxPool2d(kernel_size=2)NEWLINE self.conv_transpose = torch.nn.ConvTranspose2d(in_channels=16,NEWLINE out_channels=8,NEWLINE kernel_size=3)NEWLINE self.conv_depthwise = torch.nn.Conv2d(in_channels=8, out_channels=8,NEWLINE kernel_size=5, groups=8)NEWLINE self.adaptive_avg_pool = torch.nn.AdaptiveAvgPool2d(output_size=1)NEWLINE self.linear = torch.nn.Linear(in_features=8, out_features=1)NEWLINENEWLINE def forward(self, input_):NEWLINE x = self.conv_regular(input_)NEWLINE x = self.bn(x)NEWLINE x = torch.nn.functional.relu(x)NEWLINE x.transpose_(2, 3)NEWLINE x = self.max_pool2d(x)NEWLINE x = self.conv_transpose(x)NEWLINE x = self.conv_depthwise(x)NEWLINE x += torch.ones_like(x)NEWLINE x = self.adaptive_avg_pool(x)NEWLINE x = self.linear(x.flatten())NEWLINE return xNEWLINENEWLINE model = ModelForMetatypeTesting()NEWLINE nncf_network = NNCFNetwork(model, [ModelInputInfo([1, 3, 300, 300])])NEWLINE ip_graph = nncf_network.get_insertion_point_graph()NEWLINENEWLINE for node in ip_graph.nodes().values():NEWLINE if node[InsertionPointGraph.NODE_TYPE_NODE_ATTR] == InsertionPointGraphNodeType.OPERATOR:NEWLINE nncf_node_ref = node[InsertionPointGraph.REGULAR_NODE_REF_NODE_ATTR]NEWLINE scope_str = str(nncf_node_ref[NNCFGraph.OP_EXEC_CONTEXT_NODE_ATTR].input_agnostic)NEWLINE assert scope_str in ref_scope_vs_metatype_dictNEWLINE ref_metatype = ref_scope_vs_metatype_dict[scope_str]NEWLINE assert node[InsertionPointGraph.OPERATOR_METATYPE_NODE_ATTR] == ref_metatypeNEWLINENEWLINE @pytest.mark.parametrize(("mock_graph_factory", "dot_file_name"),NEWLINE MERGE_PATTERN_TEST_CASES,NEWLINE ids=[x[1] for x in MERGE_PATTERN_TEST_CASES])NEWLINE def test_get_ip_graph_with_merged_operations(self, mock_graph_factory, dot_file_name):NEWLINE mock_graph = mock_graph_factory()NEWLINE ip_graph = InsertionPointGraph(mock_graph)NEWLINE merged_ip_graph = ip_graph.get_ip_graph_with_merged_hw_optimized_operations()NEWLINENEWLINE data_dir = TEST_ROOT / 'data/reference_graphs/pattern_merging' # type: PathNEWLINENEWLINE path_to_dot_file = data_dir / '{}.dot'.format(dot_file_name)NEWLINENEWLINE # validate .dot file manually!NEWLINE if not path_to_dot_file.exists():NEWLINE if not data_dir.exists():NEWLINE data_dir.mkdir(parents=True)NEWLINE nx.drawing.nx_pydot.write_dot(merged_ip_graph, str(path_to_dot_file))NEWLINENEWLINE load_graph = nx.drawing.nx_pydot.read_dot(str(path_to_dot_file))NEWLINENEWLINE for key in load_graph.nodes.keys():NEWLINE key.replace(r'\\n', r'\n') # Somehow pydot mangles the \n characters while writing a .dot fileNEWLINENEWLINE sanitized_loaded_keys = [key.replace('\\n', '\n') for key in load_graph.nodes.keys()]NEWLINE sanitized_loaded_edges = [(u.replace('\\n', '\n'),NEWLINE v.replace('\\n', '\n')) for u, v in nx.DiGraph(load_graph).edges]NEWLINENEWLINE assert Counter(sanitized_loaded_keys) == Counter(list(merged_ip_graph.nodes.keys()))NEWLINE assert Counter(sanitized_loaded_edges) == Counter(list(merged_ip_graph.edges))NEWLINE |
import os.pathNEWLINENEWLINEimport torchNEWLINEimport seaborn as snsNEWLINEfrom pandas import DataFrameNEWLINEfrom torch.utils.data import DataLoaderNEWLINEfrom transformers import RobertaTokenizerNEWLINENEWLINEfrom bond.data import DatasetName, DatasetType, SubTokenDataset, load_dataset, load_tags_dictNEWLINEfrom bond.utils import ner_scoresNEWLINENEWLINENEWLINEdef plot_distant_dataset_stats(dataset_name: DatasetName) -> None:NEWLINE tokenizer = RobertaTokenizer.from_pretrained('roberta-base') # for loading datasets - doesn't really matter what tokenizer to useNEWLINENEWLINE distant_dataset = load_dataset(dataset_name, DatasetType.DISTANT, tokenizer, 'roberta-base', 128)NEWLINE gold_dataset = load_dataset(dataset_name, DatasetType.TRAIN, tokenizer, 'roberta-base', 128)NEWLINENEWLINE distant_labels = []NEWLINE for _, labels, mask, _ in DataLoader(distant_dataset, batch_size=1):NEWLINE distant_labels.extend(labels.masked_select(mask > 0).tolist())NEWLINENEWLINE gold_labels = []NEWLINE for _, labels, mask, _ in DataLoader(gold_dataset, batch_size=1):NEWLINE gold_labels.extend(labels.masked_select(mask > 0).tolist())NEWLINENEWLINE stats = ner_scores(gold_labels, distant_labels, load_tags_dict(dataset_name))NEWLINE print(stats) # TODO: do actual stats visuallizationNEWLINENEWLINENEWLINEdef score_cached_dataset(dataset_path: str) -> None:NEWLINE cached_name = os.path.basename(dataset_path)NEWLINE info = cached_name.split('_')NEWLINE tokenizer = RobertaTokenizer.from_pretrained(info[-2])NEWLINE dataset_name = DatasetName(info[0])NEWLINE max_seq_len = int(info[-1][3:])NEWLINENEWLINE distant_dataset: SubTokenDataset = torch.load(dataset_path)NEWLINE gold_dataset = load_dataset(dataset_name, DatasetType.TRAIN, tokenizer, 'roberta-base', max_seq_len)NEWLINENEWLINE distant_labels = []NEWLINE for _, _, _, labels, mask, _, _ in DataLoader(distant_dataset, batch_size=1, collate_fn=distant_dataset.collate_fn):NEWLINE distant_labels.extend(labels.masked_select(mask).tolist())NEWLINENEWLINE gold_labels = []NEWLINE for _, _, _, labels, mask, _, _ in DataLoader(gold_dataset, batch_size=1, collate_fn=gold_dataset.collate_fn):NEWLINE gold_labels.extend(labels.masked_select(mask).tolist())NEWLINENEWLINE assert len(gold_labels) == len(distant_labels)NEWLINE stats = ner_scores(gold_labels, distant_labels, load_tags_dict(dataset_name))NEWLINE print(stats)NEWLINE |
from game.combat.effects.moveeffect.basemoveeffect import BaseMoveEffectNEWLINEfrom game.combat.effects.partialeffect.applystatuseffect import ApplyStatusNEWLINEfrom game.combat.effects import statuseffectNEWLINENEWLINENEWLINEclass Confusion(BaseMoveEffect):NEWLINE def after_action(self):NEWLINE if self.scene.board.random_roll(self.move.chance):NEWLINE ApplyStatus(self.scene, statuseffect.CONFUSION, self.move.user, self.move.target).apply()NEWLINE return True, False, FalseNEWLINE |
"""NEWLINEThe MIT License (MIT)NEWLINENEWLINECopyright (c) 2015-present RapptzNEWLINENEWLINEPermission is hereby granted, free of charge, to any person obtaining aNEWLINEcopy of this software and associated documentation files (the "Software"),NEWLINEto deal in the Software without restriction, including without limitationNEWLINEthe rights to use, copy, modify, merge, publish, distribute, sublicense,NEWLINEand/or sell copies of the Software, and to permit persons to whom theNEWLINESoftware is furnished to do so, subject to the following conditions:NEWLINENEWLINEThe above copyright notice and this permission notice shall be included inNEWLINEall copies or substantial portions of the Software.NEWLINENEWLINETHE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESSNEWLINEOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINEFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THENEWLINEAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINELIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISINGNEWLINEFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHERNEWLINEDEALINGS IN THE SOFTWARE.NEWLINE"""NEWLINENEWLINEfrom __future__ import annotationsNEWLINENEWLINEimport asyncioNEWLINEimport datetimeNEWLINEimport loggingNEWLINEimport sysNEWLINEimport tracebackNEWLINEfrom typing import (NEWLINE Any,NEWLINE AsyncIterator,NEWLINE Callable,NEWLINE Coroutine,NEWLINE Dict,NEWLINE Generator,NEWLINE List,NEWLINE Optional,NEWLINE Sequence,NEWLINE TYPE_CHECKING,NEWLINE Tuple,NEWLINE Type,NEWLINE TypeVar,NEWLINE Union,NEWLINE)NEWLINENEWLINEimport aiohttpNEWLINENEWLINEfrom .user import User, ClientUserNEWLINEfrom .invite import InviteNEWLINEfrom .template import TemplateNEWLINEfrom .widget import WidgetNEWLINEfrom .guild import GuildNEWLINEfrom .emoji import EmojiNEWLINEfrom .channel import _threaded_channel_factory, PartialMessageableNEWLINEfrom .enums import ChannelTypeNEWLINEfrom .mentions import AllowedMentionsNEWLINEfrom .errors import *NEWLINEfrom .enums import StatusNEWLINEfrom .flags import ApplicationFlags, IntentsNEWLINEfrom .gateway import *NEWLINEfrom .activity import ActivityTypes, BaseActivity, create_activityNEWLINEfrom .voice_client import VoiceClientNEWLINEfrom .http import HTTPClientNEWLINEfrom .state import ConnectionStateNEWLINEfrom . import utilsNEWLINEfrom .utils import MISSING, time_snowflakeNEWLINEfrom .object import ObjectNEWLINEfrom .backoff import ExponentialBackoffNEWLINEfrom .webhook import WebhookNEWLINEfrom .appinfo import AppInfoNEWLINEfrom .ui.view import ViewNEWLINEfrom .stage_instance import StageInstanceNEWLINEfrom .threads import ThreadNEWLINEfrom .sticker import GuildSticker, StandardSticker, StickerPack, _sticker_factoryNEWLINENEWLINEif TYPE_CHECKING:NEWLINE from typing_extensions import SelfNEWLINE from types import TracebackTypeNEWLINE from .types.guild import Guild as GuildPayloadNEWLINE from .abc import SnowflakeTime, Snowflake, PrivateChannelNEWLINE from .guild import GuildChannelNEWLINE from .channel import DMChannelNEWLINE from .message import MessageNEWLINE from .member import MemberNEWLINE from .voice_client import VoiceProtocolNEWLINENEWLINE# fmt: offNEWLINE__all__ = (NEWLINE 'Client',NEWLINE)NEWLINE# fmt: onNEWLINENEWLINECoro = TypeVar('Coro', bound=Callable[..., Coroutine[Any, Any, Any]])NEWLINENEWLINE_log = logging.getLogger(__name__)NEWLINENEWLINENEWLINEclass _LoopSentinel:NEWLINE __slots__ = ()NEWLINENEWLINE def __getattr__(self, attr: str) -> None:NEWLINE msg = (NEWLINE 'loop attribute cannot be accessed in non-async contexts. 'NEWLINE 'Consider using either an asynchronous main function and passing it to asyncio.run or 'NEWLINE 'using asynchronous initialisation hooks such as Client.setup_hook'NEWLINE )NEWLINE raise AttributeError(msg)NEWLINENEWLINENEWLINE_loop: Any = _LoopSentinel()NEWLINENEWLINENEWLINEclass Client:NEWLINE r"""Represents a client connection that connects to Discord.NEWLINE This class is used to interact with the Discord WebSocket and API.NEWLINENEWLINE A number of options can be passed to the :class:`Client`.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE max_messages: Optional[:class:`int`]NEWLINE The maximum number of messages to store in the internal message cache.NEWLINE This defaults to ``1000``. Passing in ``None`` disables the message cache.NEWLINENEWLINE .. versionchanged:: 1.3NEWLINE Allow disabling the message cache and change the default size to ``1000``.NEWLINE proxy: Optional[:class:`str`]NEWLINE Proxy URL.NEWLINE proxy_auth: Optional[:class:`aiohttp.BasicAuth`]NEWLINE An object that represents proxy HTTP Basic Authorization.NEWLINE shard_id: Optional[:class:`int`]NEWLINE Integer starting at ``0`` and less than :attr:`.shard_count`.NEWLINE shard_count: Optional[:class:`int`]NEWLINE The total number of shards.NEWLINE application_id: :class:`int`NEWLINE The client's application ID.NEWLINE intents: :class:`Intents`NEWLINE The intents that you want to enable for the session. This is a way ofNEWLINE disabling and enabling certain gateway events from triggering and being sent.NEWLINE If not given, defaults to a regularly constructed :class:`Intents` class.NEWLINENEWLINE .. versionadded:: 1.5NEWLINE member_cache_flags: :class:`MemberCacheFlags`NEWLINE Allows for finer control over how the library caches members.NEWLINE If not given, defaults to cache as much as possible with theNEWLINE currently selected intents.NEWLINENEWLINE .. versionadded:: 1.5NEWLINE chunk_guilds_at_startup: :class:`bool`NEWLINE Indicates if :func:`.on_ready` should be delayed to chunk all guildsNEWLINE at start-up if necessary. This operation is incredibly slow for largeNEWLINE amounts of guilds. The default is ``True`` if :attr:`Intents.members`NEWLINE is ``True``.NEWLINENEWLINE .. versionadded:: 1.5NEWLINE status: Optional[:class:`.Status`]NEWLINE A status to start your presence with upon logging on to Discord.NEWLINE activity: Optional[:class:`.BaseActivity`]NEWLINE An activity to start your presence with upon logging on to Discord.NEWLINE allowed_mentions: Optional[:class:`AllowedMentions`]NEWLINE Control how the client handles mentions by default on every message sent.NEWLINENEWLINE .. versionadded:: 1.4NEWLINE heartbeat_timeout: :class:`float`NEWLINE The maximum numbers of seconds before timing out and restarting theNEWLINE WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful ifNEWLINE processing the initial packets take too long to the point of disconnectingNEWLINE you. The default timeout is 60 seconds.NEWLINE guild_ready_timeout: :class:`float`NEWLINE The maximum number of seconds to wait for the GUILD_CREATE stream to end beforeNEWLINE preparing the member cache and firing READY. The default timeout is 2 seconds.NEWLINENEWLINE .. versionadded:: 1.4NEWLINE assume_unsync_clock: :class:`bool`NEWLINE Whether to assume the system clock is unsynced. This applies to the ratelimit handlingNEWLINE code. If this is set to ``True``, the default, then the library uses the time to resetNEWLINE a rate limit bucket given by Discord. If this is ``False`` then your system clock isNEWLINE used to calculate how long to sleep for. If this is set to ``False`` it is recommended toNEWLINE sync your system clock to Google's NTP server.NEWLINENEWLINE .. versionadded:: 1.3NEWLINE enable_debug_events: :class:`bool`NEWLINE Whether to enable events that are useful only for debugging gateway related information.NEWLINENEWLINE Right now this involves :func:`on_socket_raw_receive` and :func:`on_socket_raw_send`. IfNEWLINE this is ``False`` then those events will not be dispatched (due to performance considerations).NEWLINE To enable these events, this must be set to ``True``. Defaults to ``False``.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE http_trace: :class:`aiohttp.TraceConfig`NEWLINE The trace configuration to use for tracking HTTP requests the library does using ``aiohttp``.NEWLINE This allows you to check requests the library is using. For more information, check theNEWLINE `aiohttp documentation <https://docs.aiohttp.org/en/stable/client_advanced.html#client-tracing>`_.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE AttributesNEWLINE -----------NEWLINE wsNEWLINE The websocket gateway the client is currently connected to. Could be ``None``.NEWLINE """NEWLINENEWLINE def __init__(self, **options: Any) -> None:NEWLINE self.loop: asyncio.AbstractEventLoop = _loopNEWLINE # self.ws is set in the connect methodNEWLINE self.ws: DiscordWebSocket = None # type: ignoreNEWLINE self._listeners: Dict[str, List[Tuple[asyncio.Future, Callable[..., bool]]]] = {}NEWLINE self.shard_id: Optional[int] = options.get('shard_id')NEWLINE self.shard_count: Optional[int] = options.get('shard_count')NEWLINENEWLINE proxy: Optional[str] = options.pop('proxy', None)NEWLINE proxy_auth: Optional[aiohttp.BasicAuth] = options.pop('proxy_auth', None)NEWLINE unsync_clock: bool = options.pop('assume_unsync_clock', True)NEWLINE http_trace: Optional[aiohttp.TraceConfig] = options.pop('http_trace', None)NEWLINE self.http: HTTPClient = HTTPClient(NEWLINE self.loop,NEWLINE proxy=proxy,NEWLINE proxy_auth=proxy_auth,NEWLINE unsync_clock=unsync_clock,NEWLINE http_trace=http_trace,NEWLINE )NEWLINENEWLINE self._handlers: Dict[str, Callable[..., None]] = {NEWLINE 'ready': self._handle_ready,NEWLINE }NEWLINENEWLINE self._hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = {NEWLINE 'before_identify': self._call_before_identify_hook,NEWLINE }NEWLINENEWLINE self._enable_debug_events: bool = options.pop('enable_debug_events', False)NEWLINE self._connection: ConnectionState = self._get_state(**options)NEWLINE self._connection.shard_count = self.shard_countNEWLINE self._closed: bool = FalseNEWLINE self._ready: asyncio.Event = MISSINGNEWLINE self._connection._get_websocket = self._get_websocketNEWLINE self._connection._get_client = lambda: selfNEWLINENEWLINE if VoiceClient.warn_nacl:NEWLINE VoiceClient.warn_nacl = FalseNEWLINE _log.warning("PyNaCl is not installed, voice will NOT be supported")NEWLINENEWLINE async def __aenter__(self) -> Self:NEWLINE await self._async_setup_hook()NEWLINE return selfNEWLINENEWLINE async def __aexit__(NEWLINE self,NEWLINE exc_type: Optional[Type[BaseException]],NEWLINE exc_value: Optional[BaseException],NEWLINE traceback: Optional[TracebackType],NEWLINE ) -> None:NEWLINE if not self.is_closed():NEWLINE await self.close()NEWLINENEWLINE # internalsNEWLINENEWLINE def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket:NEWLINE return self.wsNEWLINENEWLINE def _get_state(self, **options: Any) -> ConnectionState:NEWLINE return ConnectionState(dispatch=self.dispatch, handlers=self._handlers, hooks=self._hooks, http=self.http, **options)NEWLINENEWLINE def _handle_ready(self) -> None:NEWLINE self._ready.set()NEWLINENEWLINE @propertyNEWLINE def latency(self) -> float:NEWLINE """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.NEWLINENEWLINE This could be referred to as the Discord WebSocket protocol latency.NEWLINE """NEWLINE ws = self.wsNEWLINE return float('nan') if not ws else ws.latencyNEWLINENEWLINE def is_ws_ratelimited(self) -> bool:NEWLINE """:class:`bool`: Whether the websocket is currently rate limited.NEWLINENEWLINE This can be useful to know when deciding whether you should query membersNEWLINE using HTTP or via the gateway.NEWLINENEWLINE .. versionadded:: 1.6NEWLINE """NEWLINE return FalseNEWLINENEWLINE @propertyNEWLINE def user(self) -> Optional[ClientUser]:NEWLINE """Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in."""NEWLINE return self._connection.userNEWLINENEWLINE @propertyNEWLINE def guilds(self) -> List[Guild]:NEWLINE """List[:class:`.Guild`]: The guilds that the connected client is a member of."""NEWLINE return self._connection.guildsNEWLINENEWLINE @propertyNEWLINE def emojis(self) -> List[Emoji]:NEWLINE """List[:class:`.Emoji`]: The emojis that the connected client has."""NEWLINE return self._connection.emojisNEWLINENEWLINE @propertyNEWLINE def stickers(self) -> List[GuildSticker]:NEWLINE """List[:class:`.GuildSticker`]: The stickers that the connected client has.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE """NEWLINE return self._connection.stickersNEWLINENEWLINE @propertyNEWLINE def cached_messages(self) -> Sequence[Message]:NEWLINE """Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached.NEWLINENEWLINE .. versionadded:: 1.1NEWLINE """NEWLINE return utils.SequenceProxy(self._connection._messages or [])NEWLINENEWLINE @propertyNEWLINE def private_channels(self) -> List[PrivateChannel]:NEWLINE """List[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on.NEWLINENEWLINE .. note::NEWLINENEWLINE This returns only up to 128 most recent private channels due to an internal workingNEWLINE on how Discord deals with private channels.NEWLINE """NEWLINE return self._connection.private_channelsNEWLINENEWLINE @propertyNEWLINE def voice_clients(self) -> List[VoiceProtocol]:NEWLINE """List[:class:`.VoiceProtocol`]: Represents a list of voice connections.NEWLINENEWLINE These are usually :class:`.VoiceClient` instances.NEWLINE """NEWLINE return self._connection.voice_clientsNEWLINENEWLINE @propertyNEWLINE def application_id(self) -> Optional[int]:NEWLINE """Optional[:class:`int`]: The client's application ID.NEWLINENEWLINE If this is not passed via ``__init__`` then this is retrievedNEWLINE through the gateway when an event contains the data. UsuallyNEWLINE after :func:`~discord.on_connect` is called.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE """NEWLINE return self._connection.application_idNEWLINENEWLINE @propertyNEWLINE def application_flags(self) -> ApplicationFlags:NEWLINE """:class:`~discord.ApplicationFlags`: The client's application flags.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE """NEWLINE return self._connection.application_flagsNEWLINENEWLINE def is_ready(self) -> bool:NEWLINE """:class:`bool`: Specifies if the client's internal cache is ready for use."""NEWLINE return self._ready is not MISSING and self._ready.is_set()NEWLINENEWLINE async def _run_event(NEWLINE self,NEWLINE coro: Callable[..., Coroutine[Any, Any, Any]],NEWLINE event_name: str,NEWLINE *args: Any,NEWLINE **kwargs: Any,NEWLINE ) -> None:NEWLINE try:NEWLINE await coro(*args, **kwargs)NEWLINE except asyncio.CancelledError:NEWLINE passNEWLINE except Exception:NEWLINE try:NEWLINE await self.on_error(event_name, *args, **kwargs)NEWLINE except asyncio.CancelledError:NEWLINE passNEWLINENEWLINE def _schedule_event(NEWLINE self,NEWLINE coro: Callable[..., Coroutine[Any, Any, Any]],NEWLINE event_name: str,NEWLINE *args: Any,NEWLINE **kwargs: Any,NEWLINE ) -> asyncio.Task:NEWLINE wrapped = self._run_event(coro, event_name, *args, **kwargs)NEWLINE # Schedules the taskNEWLINE return self.loop.create_task(wrapped, name=f'discord.py: {event_name}')NEWLINENEWLINE def dispatch(self, event: str, *args: Any, **kwargs: Any) -> None:NEWLINE _log.debug('Dispatching event %s', event)NEWLINE method = 'on_' + eventNEWLINENEWLINE listeners = self._listeners.get(event)NEWLINE if listeners:NEWLINE removed = []NEWLINE for i, (future, condition) in enumerate(listeners):NEWLINE if future.cancelled():NEWLINE removed.append(i)NEWLINE continueNEWLINENEWLINE try:NEWLINE result = condition(*args)NEWLINE except Exception as exc:NEWLINE future.set_exception(exc)NEWLINE removed.append(i)NEWLINE else:NEWLINE if result:NEWLINE if len(args) == 0:NEWLINE future.set_result(None)NEWLINE elif len(args) == 1:NEWLINE future.set_result(args[0])NEWLINE else:NEWLINE future.set_result(args)NEWLINE removed.append(i)NEWLINENEWLINE if len(removed) == len(listeners):NEWLINE self._listeners.pop(event)NEWLINE else:NEWLINE for idx in reversed(removed):NEWLINE del listeners[idx]NEWLINENEWLINE try:NEWLINE coro = getattr(self, method)NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE self._schedule_event(coro, method, *args, **kwargs)NEWLINENEWLINE async def on_error(self, event_method: str, *args: Any, **kwargs: Any) -> None:NEWLINE """|coro|NEWLINENEWLINE The default error handler provided by the client.NEWLINENEWLINE By default this prints to :data:`sys.stderr` however it could beNEWLINE overridden to have a different implementation.NEWLINE Check :func:`~discord.on_error` for more details.NEWLINE """NEWLINE print(f'Ignoring exception in {event_method}', file=sys.stderr)NEWLINE traceback.print_exc()NEWLINENEWLINE # hooksNEWLINENEWLINE async def _call_before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:NEWLINE # This hook is an internal hook that actually calls the public one.NEWLINE # It allows the library to have its own hook without stepping on theNEWLINE # toes of those who need to override their own hook.NEWLINE await self.before_identify_hook(shard_id, initial=initial)NEWLINENEWLINE async def before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:NEWLINE """|coro|NEWLINENEWLINE A hook that is called before IDENTIFYing a session. This is usefulNEWLINE if you wish to have more control over the synchronization of multipleNEWLINE IDENTIFYing clients.NEWLINENEWLINE The default implementation does nothing.NEWLINENEWLINE .. versionadded:: 1.4NEWLINENEWLINE ParametersNEWLINE ------------NEWLINE shard_id: :class:`int`NEWLINE The shard ID that requested being IDENTIFY'dNEWLINE initial: :class:`bool`NEWLINE Whether this IDENTIFY is the first initial IDENTIFY.NEWLINE """NEWLINENEWLINE passNEWLINENEWLINE async def _async_setup_hook(self) -> None:NEWLINE # Called whenever the client needs to initialise asyncio objects with a running loopNEWLINE loop = asyncio.get_running_loop()NEWLINE self.loop = loopNEWLINE self.http.loop = loopNEWLINE self._connection.loop = loopNEWLINE await self._connection.async_setup()NEWLINENEWLINE self._ready = asyncio.Event()NEWLINENEWLINE async def setup_hook(self) -> None:NEWLINE """|coro|NEWLINENEWLINE A coroutine to be called to setup the bot, by default this is blank.NEWLINENEWLINE To perform asynchronous setup after the bot is logged in but beforeNEWLINE it has connected to the Websocket, overwrite this coroutine.NEWLINENEWLINE This is only called once, in :meth:`login`, and will be called beforeNEWLINE any events are dispatched, making it a better solution than doing suchNEWLINE setup in the :func:`~discord.on_ready` event.NEWLINENEWLINE .. warning::NEWLINENEWLINE Since this is called *before* the websocket connection is made thereforeNEWLINE anything that waits for the websocket will deadlock, this includes thingsNEWLINE like :meth:`wait_for` and :meth:`wait_until_ready`.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE """NEWLINE passNEWLINENEWLINE # login state managementNEWLINENEWLINE async def login(self, token: str) -> None:NEWLINE """|coro|NEWLINENEWLINE Logs in the client with the specified credentials andNEWLINE calls the :meth:`setup_hook`.NEWLINENEWLINENEWLINE ParametersNEWLINE -----------NEWLINE token: :class:`str`NEWLINE The authentication token. Do not prefix this token withNEWLINE anything as the library will do it for you.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE LoginFailureNEWLINE The wrong credentials are passed.NEWLINE HTTPExceptionNEWLINE An unknown HTTP related error occurred,NEWLINE usually when it isn't 200 or the known incorrect credentialsNEWLINE passing status code.NEWLINE """NEWLINENEWLINE _log.info('logging in using static token')NEWLINENEWLINE await self._async_setup_hook()NEWLINENEWLINE data = await self.http.static_login(token.strip())NEWLINE self._connection.user = ClientUser(state=self._connection, data=data)NEWLINE await self.setup_hook()NEWLINENEWLINE async def connect(self, *, reconnect: bool = True) -> None:NEWLINE """|coro|NEWLINENEWLINE Creates a websocket connection and lets the websocket listenNEWLINE to messages from Discord. This is a loop that runs the entireNEWLINE event system and miscellaneous aspects of the library. ControlNEWLINE is not resumed until the WebSocket connection is terminated.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE reconnect: :class:`bool`NEWLINE If we should attempt reconnecting, either due to internetNEWLINE failure or a specific failure on Discord's part. CertainNEWLINE disconnects that lead to bad state will not be handled (such asNEWLINE invalid sharding payloads or bad tokens).NEWLINENEWLINE RaisesNEWLINE -------NEWLINE GatewayNotFoundNEWLINE If the gateway to connect to Discord is not found. Usually if thisNEWLINE is thrown then there is a Discord API outage.NEWLINE ConnectionClosedNEWLINE The websocket connection has been terminated.NEWLINE """NEWLINENEWLINE backoff = ExponentialBackoff()NEWLINE ws_params = {NEWLINE 'initial': True,NEWLINE 'shard_id': self.shard_id,NEWLINE }NEWLINE while not self.is_closed():NEWLINE try:NEWLINE coro = DiscordWebSocket.from_client(self, **ws_params)NEWLINE self.ws = await asyncio.wait_for(coro, timeout=60.0)NEWLINE ws_params['initial'] = FalseNEWLINE while True:NEWLINE await self.ws.poll_event()NEWLINE except ReconnectWebSocket as e:NEWLINE _log.info('Got a request to %s the websocket.', e.op)NEWLINE self.dispatch('disconnect')NEWLINE ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)NEWLINE continueNEWLINE except (NEWLINE OSError,NEWLINE HTTPException,NEWLINE GatewayNotFound,NEWLINE ConnectionClosed,NEWLINE aiohttp.ClientError,NEWLINE asyncio.TimeoutError,NEWLINE ) as exc:NEWLINENEWLINE self.dispatch('disconnect')NEWLINE if not reconnect:NEWLINE await self.close()NEWLINE if isinstance(exc, ConnectionClosed) and exc.code == 1000:NEWLINE # clean close, don't re-raise thisNEWLINE returnNEWLINE raiseNEWLINENEWLINE if self.is_closed():NEWLINE returnNEWLINENEWLINE # If we get connection reset by peer then try to RESUMENEWLINE if isinstance(exc, OSError) and exc.errno in (54, 10054):NEWLINE ws_params.update(sequence=self.ws.sequence, initial=False, resume=True, session=self.ws.session_id)NEWLINE continueNEWLINENEWLINE # We should only get this when an unhandled close code happens,NEWLINE # such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)NEWLINE # sometimes, discord sends us 1000 for unknown reasons so we should reconnectNEWLINE # regardless and rely on is_closed insteadNEWLINE if isinstance(exc, ConnectionClosed):NEWLINE if exc.code == 4014:NEWLINE raise PrivilegedIntentsRequired(exc.shard_id) from NoneNEWLINE if exc.code != 1000:NEWLINE await self.close()NEWLINE raiseNEWLINENEWLINE retry = backoff.delay()NEWLINE _log.exception("Attempting a reconnect in %.2fs", retry)NEWLINE await asyncio.sleep(retry)NEWLINE # Always try to RESUME the connectionNEWLINE # If the connection is not RESUME-able then the gateway will invalidate the session.NEWLINE # This is apparently what the official Discord client does.NEWLINE ws_params.update(sequence=self.ws.sequence, resume=True, session=self.ws.session_id)NEWLINENEWLINE async def close(self) -> None:NEWLINE """|coro|NEWLINENEWLINE Closes the connection to Discord.NEWLINE """NEWLINE if self._closed:NEWLINE returnNEWLINENEWLINE self._closed = TrueNEWLINENEWLINE for voice in self.voice_clients:NEWLINE try:NEWLINE await voice.disconnect(force=True)NEWLINE except Exception:NEWLINE # if an error happens during disconnects, disregard it.NEWLINE passNEWLINENEWLINE if self.ws is not None and self.ws.open:NEWLINE await self.ws.close(code=1000)NEWLINENEWLINE await self.http.close()NEWLINENEWLINE if self._ready is not MISSING:NEWLINE self._ready.clear()NEWLINENEWLINE self.loop = MISSINGNEWLINENEWLINE def clear(self) -> None:NEWLINE """Clears the internal state of the bot.NEWLINENEWLINE After this, the bot can be considered "re-opened", i.e. :meth:`is_closed`NEWLINE and :meth:`is_ready` both return ``False`` along with the bot's internalNEWLINE cache cleared.NEWLINE """NEWLINE self._closed = FalseNEWLINE self._ready.clear()NEWLINE self._connection.clear()NEWLINE self.http.recreate()NEWLINENEWLINE async def start(self, token: str, *, reconnect: bool = True) -> None:NEWLINE """|coro|NEWLINENEWLINE A shorthand coroutine for :meth:`login` + :meth:`connect`.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE TypeErrorNEWLINE An unexpected keyword argument was received.NEWLINE """NEWLINE await self.login(token)NEWLINE await self.connect(reconnect=reconnect)NEWLINENEWLINE def run(self, *args: Any, **kwargs: Any) -> None:NEWLINE """A blocking call that abstracts away the event loopNEWLINE initialisation from you.NEWLINENEWLINE If you want more control over the event loop then thisNEWLINE function should not be used. Use :meth:`start` coroutineNEWLINE or :meth:`connect` + :meth:`login`.NEWLINENEWLINE Roughly Equivalent to: ::NEWLINENEWLINE try:NEWLINE asyncio.run(self.start(*args, **kwargs))NEWLINE except KeyboardInterrupt:NEWLINE returnNEWLINENEWLINE .. warning::NEWLINENEWLINE This function must be the last function to call due to the fact that itNEWLINE is blocking. That means that registration of events or anything beingNEWLINE called after this function call will not execute until it returns.NEWLINE """NEWLINENEWLINE async def runner():NEWLINE async with self:NEWLINE await self.start(*args, **kwargs)NEWLINENEWLINE try:NEWLINE asyncio.run(runner())NEWLINE except KeyboardInterrupt:NEWLINE # nothing to do hereNEWLINE # `asyncio.run` handles the loop cleanupNEWLINE # and `self.start` closes all sockets and the HTTPClient instance.NEWLINE returnNEWLINENEWLINE # propertiesNEWLINENEWLINE def is_closed(self) -> bool:NEWLINE """:class:`bool`: Indicates if the websocket connection is closed."""NEWLINE return self._closedNEWLINENEWLINE @propertyNEWLINE def activity(self) -> Optional[ActivityTypes]:NEWLINE """Optional[:class:`.BaseActivity`]: The activity being used uponNEWLINE logging in.NEWLINE """NEWLINE return create_activity(self._connection._activity, self._connection)NEWLINENEWLINE @activity.setterNEWLINE def activity(self, value: Optional[ActivityTypes]) -> None:NEWLINE if value is None:NEWLINE self._connection._activity = NoneNEWLINE elif isinstance(value, BaseActivity):NEWLINE # ConnectionState._activity is typehinted as ActivityPayload, we're passing Dict[str, Any]NEWLINE self._connection._activity = value.to_dict() # type: ignoreNEWLINE else:NEWLINE raise TypeError('activity must derive from BaseActivity.')NEWLINENEWLINE @propertyNEWLINE def status(self) -> Status:NEWLINE """:class:`.Status`:NEWLINE The status being used upon logging on to Discord.NEWLINENEWLINE .. versionadded: 2.0NEWLINE """NEWLINE if self._connection._status in set(state.value for state in Status):NEWLINE return Status(self._connection._status)NEWLINE return Status.onlineNEWLINENEWLINE @status.setterNEWLINE def status(self, value: Status) -> None:NEWLINE if value is Status.offline:NEWLINE self._connection._status = 'invisible'NEWLINE elif isinstance(value, Status):NEWLINE self._connection._status = str(value)NEWLINE else:NEWLINE raise TypeError('status must derive from Status.')NEWLINENEWLINE @propertyNEWLINE def allowed_mentions(self) -> Optional[AllowedMentions]:NEWLINE """Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration.NEWLINENEWLINE .. versionadded:: 1.4NEWLINE """NEWLINE return self._connection.allowed_mentionsNEWLINENEWLINE @allowed_mentions.setterNEWLINE def allowed_mentions(self, value: Optional[AllowedMentions]) -> None:NEWLINE if value is None or isinstance(value, AllowedMentions):NEWLINE self._connection.allowed_mentions = valueNEWLINE else:NEWLINE raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__!r}')NEWLINENEWLINE @propertyNEWLINE def intents(self) -> Intents:NEWLINE """:class:`~discord.Intents`: The intents configured for this connection.NEWLINENEWLINE .. versionadded:: 1.5NEWLINE """NEWLINE return self._connection.intentsNEWLINENEWLINE # helpers/gettersNEWLINENEWLINE @propertyNEWLINE def users(self) -> List[User]:NEWLINE """List[:class:`~discord.User`]: Returns a list of all the users the bot can see."""NEWLINE return list(self._connection._users.values())NEWLINENEWLINE def get_channel(self, id: int, /) -> Optional[Union[GuildChannel, Thread, PrivateChannel]]:NEWLINE """Returns a channel or thread with the given ID.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The ID to search for.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[Union[:class:`.abc.GuildChannel`, :class:`.Thread`, :class:`.abc.PrivateChannel`]]NEWLINE The returned channel or ``None`` if not found.NEWLINE """NEWLINE return self._connection.get_channel(id) # type: ignore - The cache contains all channel typesNEWLINENEWLINE def get_partial_messageable(self, id: int, *, type: Optional[ChannelType] = None) -> PartialMessageable:NEWLINE """Returns a partial messageable with the given channel ID.NEWLINENEWLINE This is useful if you have a channel_id but don't want to do an API callNEWLINE to send messages to it.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The channel ID to create a partial messageable for.NEWLINE type: Optional[:class:`.ChannelType`]NEWLINE The underlying channel type for the partial messageable.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.PartialMessageable`NEWLINE The partial messageableNEWLINE """NEWLINE return PartialMessageable(state=self._connection, id=id, type=type)NEWLINENEWLINE def get_stage_instance(self, id: int, /) -> Optional[StageInstance]:NEWLINE """Returns a stage instance with the given stage channel ID.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The ID to search for.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[:class:`.StageInstance`]NEWLINE The stage instance or ``None`` if not found.NEWLINE """NEWLINE from .channel import StageChannelNEWLINENEWLINE channel = self._connection.get_channel(id)NEWLINENEWLINE if isinstance(channel, StageChannel):NEWLINE return channel.instanceNEWLINENEWLINE def get_guild(self, id: int, /) -> Optional[Guild]:NEWLINE """Returns a guild with the given ID.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The ID to search for.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[:class:`.Guild`]NEWLINE The guild or ``None`` if not found.NEWLINE """NEWLINE return self._connection._get_guild(id)NEWLINENEWLINE def get_user(self, id: int, /) -> Optional[User]:NEWLINE """Returns a user with the given ID.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The ID to search for.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[:class:`~discord.User`]NEWLINE The user or ``None`` if not found.NEWLINE """NEWLINE return self._connection.get_user(id)NEWLINENEWLINE def get_emoji(self, id: int, /) -> Optional[Emoji]:NEWLINE """Returns an emoji with the given ID.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE id: :class:`int`NEWLINE The ID to search for.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[:class:`.Emoji`]NEWLINE The custom emoji or ``None`` if not found.NEWLINE """NEWLINE return self._connection.get_emoji(id)NEWLINENEWLINE def get_sticker(self, id: int, /) -> Optional[GuildSticker]:NEWLINE """Returns a guild sticker with the given ID.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE .. note::NEWLINENEWLINE To retrieve standard stickers, use :meth:`.fetch_sticker`.NEWLINE or :meth:`.fetch_premium_sticker_packs`.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Optional[:class:`.GuildSticker`]NEWLINE The sticker or ``None`` if not found.NEWLINE """NEWLINE return self._connection.get_sticker(id)NEWLINENEWLINE def get_all_channels(self) -> Generator[GuildChannel, None, None]:NEWLINE """A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'.NEWLINENEWLINE This is equivalent to: ::NEWLINENEWLINE for guild in client.guilds:NEWLINE for channel in guild.channels:NEWLINE yield channelNEWLINENEWLINE .. note::NEWLINENEWLINE Just because you receive a :class:`.abc.GuildChannel` does not mean thatNEWLINE you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` shouldNEWLINE be used for that.NEWLINENEWLINE YieldsNEWLINE ------NEWLINE :class:`.abc.GuildChannel`NEWLINE A channel the client can 'access'.NEWLINE """NEWLINENEWLINE for guild in self.guilds:NEWLINE yield from guild.channelsNEWLINENEWLINE def get_all_members(self) -> Generator[Member, None, None]:NEWLINE """Returns a generator with every :class:`.Member` the client can see.NEWLINENEWLINE This is equivalent to: ::NEWLINENEWLINE for guild in client.guilds:NEWLINE for member in guild.members:NEWLINE yield memberNEWLINENEWLINE YieldsNEWLINE ------NEWLINE :class:`.Member`NEWLINE A member the client can see.NEWLINE """NEWLINE for guild in self.guilds:NEWLINE yield from guild.membersNEWLINENEWLINE # listeners/waitersNEWLINENEWLINE async def wait_until_ready(self) -> None:NEWLINE """|coro|NEWLINENEWLINE Waits until the client's internal cache is all ready.NEWLINENEWLINE .. warning::NEWLINENEWLINE Calling this inside :meth:`setup_hook` can lead to a deadlock.NEWLINE """NEWLINE if self._ready is not MISSING:NEWLINE await self._ready.wait()NEWLINENEWLINE def wait_for(NEWLINE self,NEWLINE event: str,NEWLINE *,NEWLINE check: Optional[Callable[..., bool]] = None,NEWLINE timeout: Optional[float] = None,NEWLINE ) -> Any:NEWLINE """|coro|NEWLINENEWLINE Waits for a WebSocket event to be dispatched.NEWLINENEWLINE This could be used to wait for a user to reply to a message,NEWLINE or to react to a message, or to edit a message in a self-containedNEWLINE way.NEWLINENEWLINE The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default,NEWLINE it does not timeout. Note that this does propagate theNEWLINE :exc:`asyncio.TimeoutError` for you in case of timeout and is provided forNEWLINE ease of use.NEWLINENEWLINE In case the event returns multiple arguments, a :class:`tuple` containing thoseNEWLINE arguments is returned instead. Please check theNEWLINE :ref:`documentation <discord-api-events>` for a list of events and theirNEWLINE parameters.NEWLINENEWLINE This function returns the **first event that meets the requirements**.NEWLINENEWLINE ExamplesNEWLINE ---------NEWLINENEWLINE Waiting for a user reply: ::NEWLINENEWLINE @client.eventNEWLINE async def on_message(message):NEWLINE if message.content.startswith('$greet'):NEWLINE channel = message.channelNEWLINE await channel.send('Say hello!')NEWLINENEWLINE def check(m):NEWLINE return m.content == 'hello' and m.channel == channelNEWLINENEWLINE msg = await client.wait_for('message', check=check)NEWLINE await channel.send(f'Hello {msg.author}!')NEWLINENEWLINE Waiting for a thumbs up reaction from the message author: ::NEWLINENEWLINE @client.eventNEWLINE async def on_message(message):NEWLINE if message.content.startswith('$thumb'):NEWLINE channel = message.channelNEWLINE await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate')NEWLINENEWLINE def check(reaction, user):NEWLINE return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}'NEWLINENEWLINE try:NEWLINE reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check)NEWLINE except asyncio.TimeoutError:NEWLINE await channel.send('\N{THUMBS DOWN SIGN}')NEWLINE else:NEWLINE await channel.send('\N{THUMBS UP SIGN}')NEWLINENEWLINENEWLINE ParametersNEWLINE ------------NEWLINE event: :class:`str`NEWLINE The event name, similar to the :ref:`event reference <discord-api-events>`,NEWLINE but without the ``on_`` prefix, to wait for.NEWLINE check: Optional[Callable[..., :class:`bool`]]NEWLINE A predicate to check what to wait for. The arguments must meet theNEWLINE parameters of the event being waited for.NEWLINE timeout: Optional[:class:`float`]NEWLINE The number of seconds to wait before timing out and raisingNEWLINE :exc:`asyncio.TimeoutError`.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE asyncio.TimeoutErrorNEWLINE If a timeout is provided and it was reached.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE AnyNEWLINE Returns no arguments, a single argument, or a :class:`tuple` of multipleNEWLINE arguments that mirrors the parameters passed in theNEWLINE :ref:`event reference <discord-api-events>`.NEWLINE """NEWLINENEWLINE future = self.loop.create_future()NEWLINE if check is None:NEWLINENEWLINE def _check(*args):NEWLINE return TrueNEWLINENEWLINE check = _checkNEWLINENEWLINE ev = event.lower()NEWLINE try:NEWLINE listeners = self._listeners[ev]NEWLINE except KeyError:NEWLINE listeners = []NEWLINE self._listeners[ev] = listenersNEWLINENEWLINE listeners.append((future, check))NEWLINE return asyncio.wait_for(future, timeout)NEWLINENEWLINE # event registrationNEWLINENEWLINE def event(self, coro: Coro) -> Coro:NEWLINE """A decorator that registers an event to listen to.NEWLINENEWLINE You can find more info about the events on the :ref:`documentation below <discord-api-events>`.NEWLINENEWLINE The events must be a :ref:`coroutine <coroutine>`, if not, :exc:`TypeError` is raised.NEWLINENEWLINE ExampleNEWLINE ---------NEWLINENEWLINE .. code-block:: python3NEWLINENEWLINE @client.eventNEWLINE async def on_ready():NEWLINE print('Ready!')NEWLINENEWLINE RaisesNEWLINE --------NEWLINE TypeErrorNEWLINE The coroutine passed is not actually a coroutine.NEWLINE """NEWLINENEWLINE if not asyncio.iscoroutinefunction(coro):NEWLINE raise TypeError('event registered must be a coroutine function')NEWLINENEWLINE setattr(self, coro.__name__, coro)NEWLINE _log.debug('%s has successfully been registered as an event', coro.__name__)NEWLINE return coroNEWLINENEWLINE async def change_presence(NEWLINE self,NEWLINE *,NEWLINE activity: Optional[BaseActivity] = None,NEWLINE status: Optional[Status] = None,NEWLINE ) -> None:NEWLINE """|coro|NEWLINENEWLINE Changes the client's presence.NEWLINENEWLINE ExampleNEWLINE ---------NEWLINENEWLINE .. code-block:: python3NEWLINENEWLINE game = discord.Game("with the API")NEWLINE await client.change_presence(status=discord.Status.idle, activity=game)NEWLINENEWLINE .. versionchanged:: 2.0NEWLINE Removed the ``afk`` keyword-only parameter.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINE This function will now raise :exc:`TypeError` instead ofNEWLINE ``InvalidArgument``.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE activity: Optional[:class:`.BaseActivity`]NEWLINE The activity being done. ``None`` if no currently active activity is done.NEWLINE status: Optional[:class:`.Status`]NEWLINE Indicates what status to change to. If ``None``, thenNEWLINE :attr:`.Status.online` is used.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE TypeErrorNEWLINE If the ``activity`` parameter is not the proper type.NEWLINE """NEWLINENEWLINE if status is None:NEWLINE status_str = 'online'NEWLINE status = Status.onlineNEWLINE elif status is Status.offline:NEWLINE status_str = 'invisible'NEWLINE status = Status.offlineNEWLINE else:NEWLINE status_str = str(status)NEWLINENEWLINE await self.ws.change_presence(activity=activity, status=status_str)NEWLINENEWLINE for guild in self._connection.guilds:NEWLINE me = guild.meNEWLINE if me is None:NEWLINE continueNEWLINENEWLINE if activity is not None:NEWLINE me.activities = (activity,) # type: ignore - Type checker does not understand the downcast hereNEWLINE else:NEWLINE me.activities = ()NEWLINENEWLINE me.status = statusNEWLINENEWLINE # Guild stuffNEWLINENEWLINE async def fetch_guilds(NEWLINE self,NEWLINE *,NEWLINE limit: Optional[int] = 100,NEWLINE before: Optional[SnowflakeTime] = None,NEWLINE after: Optional[SnowflakeTime] = None,NEWLINE ) -> AsyncIterator[Guild]:NEWLINE """Retrieves an :term:`asynchronous iterator` that enables receiving your guilds.NEWLINENEWLINE .. note::NEWLINENEWLINE Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`,NEWLINE :attr:`.Guild.id`, and :attr:`.Guild.name` per :class:`.Guild`.NEWLINENEWLINE .. note::NEWLINENEWLINE This method is an API call. For general usage, consider :attr:`guilds` instead.NEWLINENEWLINE ExamplesNEWLINE ---------NEWLINENEWLINE Usage ::NEWLINENEWLINE async for guild in client.fetch_guilds(limit=150):NEWLINE print(guild.name)NEWLINENEWLINE Flattening into a list ::NEWLINENEWLINE guilds = [guild async for guild in client.fetch_guilds(limit=150)]NEWLINE # guilds is now a list of Guild...NEWLINENEWLINE All parameters are optional.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE limit: Optional[:class:`int`]NEWLINE The number of guilds to retrieve.NEWLINE If ``None``, it retrieves every guild you have access to. Note, however,NEWLINE that this would make it a slow operation.NEWLINE Defaults to ``100``.NEWLINE before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]NEWLINE Retrieves guilds before this date or object.NEWLINE If a datetime is provided, it is recommended to use a UTC aware datetime.NEWLINE If the datetime is naive, it is assumed to be local time.NEWLINE after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]NEWLINE Retrieve guilds after this date or object.NEWLINE If a datetime is provided, it is recommended to use a UTC aware datetime.NEWLINE If the datetime is naive, it is assumed to be local time.NEWLINENEWLINE RaisesNEWLINE ------NEWLINE HTTPExceptionNEWLINE Getting the guilds failed.NEWLINENEWLINE YieldsNEWLINE --------NEWLINE :class:`.Guild`NEWLINE The guild with the guild data parsed.NEWLINE """NEWLINENEWLINE async def _before_strategy(retrieve, before, limit):NEWLINE before_id = before.id if before else NoneNEWLINE data = await self.http.get_guilds(retrieve, before=before_id)NEWLINENEWLINE if data:NEWLINE if limit is not None:NEWLINE limit -= len(data)NEWLINENEWLINE before = Object(id=int(data[-1]['id']))NEWLINENEWLINE return data, before, limitNEWLINENEWLINE async def _after_strategy(retrieve, after, limit):NEWLINE after_id = after.id if after else NoneNEWLINE data = await self.http.get_guilds(retrieve, after=after_id)NEWLINENEWLINE if data:NEWLINE if limit is not None:NEWLINE limit -= len(data)NEWLINENEWLINE after = Object(id=int(data[0]['id']))NEWLINENEWLINE return data, after, limitNEWLINENEWLINE if isinstance(before, datetime.datetime):NEWLINE before = Object(id=time_snowflake(before, high=False))NEWLINE if isinstance(after, datetime.datetime):NEWLINE after = Object(id=time_snowflake(after, high=True))NEWLINENEWLINE predicate: Optional[Callable[[GuildPayload], bool]] = NoneNEWLINE strategy, state = _before_strategy, beforeNEWLINENEWLINE if before and after:NEWLINE predicate = lambda m: int(m['id']) > after.idNEWLINE elif after:NEWLINE strategy, state = _after_strategy, afterNEWLINENEWLINE while True:NEWLINE retrieve = min(100 if limit is None else limit, 100)NEWLINE if retrieve < 1:NEWLINE returnNEWLINENEWLINE data, state, limit = await strategy(retrieve, state, limit)NEWLINENEWLINE # Terminate loop on next iteration; there's no data left after thisNEWLINE if len(data) < 100:NEWLINE limit = 0NEWLINENEWLINE if predicate:NEWLINE data = filter(predicate, data)NEWLINENEWLINE for raw_guild in data:NEWLINE yield Guild(state=self._connection, data=raw_guild)NEWLINENEWLINE async def fetch_template(self, code: Union[Template, str]) -> Template:NEWLINE """|coro|NEWLINENEWLINE Gets a :class:`.Template` from a discord.new URL or code.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE code: Union[:class:`.Template`, :class:`str`]NEWLINE The Discord Template Code or URL (must be a discord.new URL).NEWLINENEWLINE RaisesNEWLINE -------NEWLINE NotFoundNEWLINE The template is invalid.NEWLINE HTTPExceptionNEWLINE Getting the template failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.Template`NEWLINE The template from the URL/code.NEWLINE """NEWLINE code = utils.resolve_template(code)NEWLINE data = await self.http.get_template(code)NEWLINE return Template(data=data, state=self._connection)NEWLINENEWLINE async def fetch_guild(self, guild_id: int, /, *, with_counts: bool = True) -> Guild:NEWLINE """|coro|NEWLINENEWLINE Retrieves a :class:`.Guild` from an ID.NEWLINENEWLINE .. note::NEWLINENEWLINE Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`,NEWLINE :attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.NEWLINENEWLINE .. note::NEWLINENEWLINE This method is an API call. For general usage, consider :meth:`get_guild` instead.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``guild_id`` parameter is now positional-only.NEWLINENEWLINENEWLINE ParametersNEWLINE -----------NEWLINE guild_id: :class:`int`NEWLINE The guild's ID to fetch from.NEWLINE with_counts: :class:`bool`NEWLINE Whether to include count information in the guild. This fills theNEWLINE :attr:`.Guild.approximate_member_count` and :attr:`.Guild.approximate_presence_count`NEWLINE attributes without needing any privileged intents. Defaults to ``True``.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE RaisesNEWLINE ------NEWLINE ForbiddenNEWLINE You do not have access to the guild.NEWLINE HTTPExceptionNEWLINE Getting the guild failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.Guild`NEWLINE The guild from the ID.NEWLINE """NEWLINE data = await self.http.get_guild(guild_id, with_counts=with_counts)NEWLINE return Guild(data=data, state=self._connection)NEWLINENEWLINE async def create_guild(NEWLINE self,NEWLINE *,NEWLINE name: str,NEWLINE icon: bytes = MISSING,NEWLINE code: str = MISSING,NEWLINE ) -> Guild:NEWLINE """|coro|NEWLINENEWLINE Creates a :class:`.Guild`.NEWLINENEWLINE Bot accounts in more than 10 guilds are not allowed to create guilds.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINE ``name`` and ``icon`` parameters are now keyword-only. The `region`` parameter has been removed.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINE This function will now raise :exc:`ValueError` instead ofNEWLINE ``InvalidArgument``.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE name: :class:`str`NEWLINE The name of the guild.NEWLINE icon: Optional[:class:`bytes`]NEWLINE The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`NEWLINE for more details on what is expected.NEWLINE code: :class:`str`NEWLINE The code for a template to create the guild with.NEWLINENEWLINE .. versionadded:: 1.4NEWLINENEWLINE RaisesNEWLINE ------NEWLINE HTTPExceptionNEWLINE Guild creation failed.NEWLINE ValueErrorNEWLINE Invalid icon image format given. Must be PNG or JPG.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE :class:`.Guild`NEWLINE The guild created. This is not the same guild that isNEWLINE added to cache.NEWLINE """NEWLINE if icon is not MISSING:NEWLINE icon_base64 = utils._bytes_to_base64_data(icon)NEWLINE else:NEWLINE icon_base64 = NoneNEWLINENEWLINE if code:NEWLINE data = await self.http.create_from_template(code, name, icon_base64)NEWLINE else:NEWLINE data = await self.http.create_guild(name, icon_base64)NEWLINE return Guild(data=data, state=self._connection)NEWLINENEWLINE async def fetch_stage_instance(self, channel_id: int, /) -> StageInstance:NEWLINE """|coro|NEWLINENEWLINE Gets a :class:`.StageInstance` for a stage channel id.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE channel_id: :class:`int`NEWLINE The stage channel ID.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE NotFoundNEWLINE The stage instance or channel could not be found.NEWLINE HTTPExceptionNEWLINE Getting the stage instance failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.StageInstance`NEWLINE The stage instance from the stage channel ID.NEWLINE """NEWLINE data = await self.http.get_stage_instance(channel_id)NEWLINE guild = self.get_guild(int(data['guild_id']))NEWLINE # Guild can technically be None here but this is being explicitly silenced right now.NEWLINE return StageInstance(guild=guild, state=self._connection, data=data) # type: ignoreNEWLINENEWLINE # Invite managementNEWLINENEWLINE async def fetch_invite(NEWLINE self,NEWLINE url: Union[Invite, str],NEWLINE *,NEWLINE with_counts: bool = True,NEWLINE with_expiration: bool = True,NEWLINE scheduled_event_id: Optional[int] = None,NEWLINE ) -> Invite:NEWLINE """|coro|NEWLINENEWLINE Gets an :class:`.Invite` from a discord.gg URL or ID.NEWLINENEWLINE .. note::NEWLINENEWLINE If the invite is for a guild you have not joined, the guild and channelNEWLINE attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` andNEWLINE :class:`.PartialInviteChannel` respectively.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE url: Union[:class:`.Invite`, :class:`str`]NEWLINE The Discord invite ID or URL (must be a discord.gg URL).NEWLINE with_counts: :class:`bool`NEWLINE Whether to include count information in the invite. This fills theNEWLINE :attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count`NEWLINE fields.NEWLINE with_expiration: :class:`bool`NEWLINE Whether to include the expiration date of the invite. This fills theNEWLINE :attr:`.Invite.expires_at` field.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE scheduled_event_id: Optional[:class:`int`]NEWLINE The ID of the scheduled event this invite is for.NEWLINENEWLINE .. note::NEWLINENEWLINE It is not possible to provide a url that contains an ``event_id`` parameterNEWLINE when using this parameter.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE RaisesNEWLINE -------NEWLINE ValueErrorNEWLINE The url contains an ``event_id``, but ``scheduled_event_id`` has also been provided.NEWLINE NotFoundNEWLINE The invite has expired or is invalid.NEWLINE HTTPExceptionNEWLINE Getting the invite failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.Invite`NEWLINE The invite from the URL/ID.NEWLINE """NEWLINENEWLINE resolved = utils.resolve_invite(url)NEWLINENEWLINE if scheduled_event_id and resolved.event:NEWLINE raise ValueError('Cannot specify scheduled_event_id and contain an event_id in the url.')NEWLINENEWLINE scheduled_event_id = scheduled_event_id or resolved.eventNEWLINENEWLINE data = await self.http.get_invite(NEWLINE resolved.code,NEWLINE with_counts=with_counts,NEWLINE with_expiration=with_expiration,NEWLINE guild_scheduled_event_id=scheduled_event_id,NEWLINE )NEWLINE return Invite.from_incomplete(state=self._connection, data=data)NEWLINENEWLINE async def delete_invite(self, invite: Union[Invite, str], /) -> None:NEWLINE """|coro|NEWLINENEWLINE Revokes an :class:`.Invite`, URL, or ID to an invite.NEWLINENEWLINE You must have the :attr:`~.Permissions.manage_channels` permission inNEWLINE the associated guild to do this.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``invite`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE invite: Union[:class:`.Invite`, :class:`str`]NEWLINE The invite to revoke.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE ForbiddenNEWLINE You do not have permissions to revoke invites.NEWLINE NotFoundNEWLINE The invite is invalid or expired.NEWLINE HTTPExceptionNEWLINE Revoking the invite failed.NEWLINE """NEWLINENEWLINE resolved = utils.resolve_invite(invite)NEWLINE await self.http.delete_invite(resolved.code)NEWLINENEWLINE # Miscellaneous stuffNEWLINENEWLINE async def fetch_widget(self, guild_id: int, /) -> Widget:NEWLINE """|coro|NEWLINENEWLINE Gets a :class:`.Widget` from a guild ID.NEWLINENEWLINE .. note::NEWLINENEWLINE The guild must have the widget enabled to get this information.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``guild_id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE guild_id: :class:`int`NEWLINE The ID of the guild.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE ForbiddenNEWLINE The widget for this guild is disabled.NEWLINE HTTPExceptionNEWLINE Retrieving the widget failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.Widget`NEWLINE The guild's widget.NEWLINE """NEWLINE data = await self.http.get_widget(guild_id)NEWLINENEWLINE return Widget(state=self._connection, data=data)NEWLINENEWLINE async def application_info(self) -> AppInfo:NEWLINE """|coro|NEWLINENEWLINE Retrieves the bot's application information.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE HTTPExceptionNEWLINE Retrieving the information failed somehow.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`.AppInfo`NEWLINE The bot's application information.NEWLINE """NEWLINE data = await self.http.application_info()NEWLINE if 'rpc_origins' not in data:NEWLINE data['rpc_origins'] = NoneNEWLINE return AppInfo(self._connection, data)NEWLINENEWLINE async def fetch_user(self, user_id: int, /) -> User:NEWLINE """|coro|NEWLINENEWLINE Retrieves a :class:`~discord.User` based on their ID.NEWLINE You do not have to share any guilds with the user to get this information,NEWLINE however many operations do require that you do.NEWLINENEWLINE .. note::NEWLINENEWLINE This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``user_id`` parameter is now positional-only.NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE user_id: :class:`int`NEWLINE The user's ID to fetch from.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE NotFoundNEWLINE A user with this ID does not exist.NEWLINE HTTPExceptionNEWLINE Fetching the user failed.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE :class:`~discord.User`NEWLINE The user you requested.NEWLINE """NEWLINE data = await self.http.get_user(user_id)NEWLINE return User(state=self._connection, data=data)NEWLINENEWLINE async def fetch_channel(self, channel_id: int, /) -> Union[GuildChannel, PrivateChannel, Thread]:NEWLINE """|coro|NEWLINENEWLINE Retrieves a :class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, or :class:`.Thread` with the specified ID.NEWLINENEWLINE .. note::NEWLINENEWLINE This method is an API call. For general usage, consider :meth:`get_channel` instead.NEWLINENEWLINE .. versionadded:: 1.2NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``channel_id`` parameter is now positional-only.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE InvalidDataNEWLINE An unknown channel type was received from Discord.NEWLINE HTTPExceptionNEWLINE Retrieving the channel failed.NEWLINE NotFoundNEWLINE Invalid Channel ID.NEWLINE ForbiddenNEWLINE You do not have permission to fetch this channel.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, :class:`.Thread`]NEWLINE The channel from the ID.NEWLINE """NEWLINE data = await self.http.get_channel(channel_id)NEWLINENEWLINE factory, ch_type = _threaded_channel_factory(data['type'])NEWLINE if factory is None:NEWLINE raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))NEWLINENEWLINE if ch_type in (ChannelType.group, ChannelType.private):NEWLINE # the factory will be a DMChannel or GroupChannel hereNEWLINE channel = factory(me=self.user, data=data, state=self._connection) # type: ignoreNEWLINE else:NEWLINE # the factory can't be a DMChannel or GroupChannel hereNEWLINE guild_id = int(data['guild_id']) # type: ignoreNEWLINE guild = self.get_guild(guild_id) or Object(id=guild_id)NEWLINE # GuildChannels expect a Guild, we may be passing an ObjectNEWLINE channel = factory(guild=guild, state=self._connection, data=data) # type: ignoreNEWLINENEWLINE return channelNEWLINENEWLINE async def fetch_webhook(self, webhook_id: int, /) -> Webhook:NEWLINE """|coro|NEWLINENEWLINE Retrieves a :class:`.Webhook` with the specified ID.NEWLINENEWLINE .. versionchanged:: 2.0NEWLINENEWLINE ``webhook_id`` parameter is now positional-only.NEWLINENEWLINE RaisesNEWLINE --------NEWLINE HTTPExceptionNEWLINE Retrieving the webhook failed.NEWLINE NotFoundNEWLINE Invalid webhook ID.NEWLINE ForbiddenNEWLINE You do not have permission to fetch this webhook.NEWLINENEWLINE ReturnsNEWLINE ---------NEWLINE :class:`.Webhook`NEWLINE The webhook you requested.NEWLINE """NEWLINE data = await self.http.get_webhook(webhook_id)NEWLINE return Webhook.from_state(data, state=self._connection)NEWLINENEWLINE async def fetch_sticker(self, sticker_id: int, /) -> Union[StandardSticker, GuildSticker]:NEWLINE """|coro|NEWLINENEWLINE Retrieves a :class:`.Sticker` with the specified ID.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE RaisesNEWLINE --------NEWLINE HTTPExceptionNEWLINE Retrieving the sticker failed.NEWLINE NotFoundNEWLINE Invalid sticker ID.NEWLINENEWLINE ReturnsNEWLINE --------NEWLINE Union[:class:`.StandardSticker`, :class:`.GuildSticker`]NEWLINE The sticker you requested.NEWLINE """NEWLINE data = await self.http.get_sticker(sticker_id)NEWLINE cls, _ = _sticker_factory(data['type'])NEWLINE # The type checker is not smart enough to figure out the constructor is correctNEWLINE return cls(state=self._connection, data=data) # type: ignoreNEWLINENEWLINE async def fetch_premium_sticker_packs(self) -> List[StickerPack]:NEWLINE """|coro|NEWLINENEWLINE Retrieves all available premium sticker packs.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE RaisesNEWLINE -------NEWLINE HTTPExceptionNEWLINE Retrieving the sticker packs failed.NEWLINENEWLINE ReturnsNEWLINE ---------NEWLINE List[:class:`.StickerPack`]NEWLINE All available premium sticker packs.NEWLINE """NEWLINE data = await self.http.list_premium_sticker_packs()NEWLINE return [StickerPack(state=self._connection, data=pack) for pack in data['sticker_packs']]NEWLINENEWLINE async def create_dm(self, user: Snowflake) -> DMChannel:NEWLINE """|coro|NEWLINENEWLINE Creates a :class:`.DMChannel` with this user.NEWLINENEWLINE This should be rarely called, as this is done transparently for mostNEWLINE people.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE ParametersNEWLINE -----------NEWLINE user: :class:`~discord.abc.Snowflake`NEWLINE The user to create a DM with.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE :class:`.DMChannel`NEWLINE The channel that was created.NEWLINE """NEWLINE state = self._connectionNEWLINE found = state._get_private_channel_by_user(user.id)NEWLINE if found:NEWLINE return foundNEWLINENEWLINE data = await state.http.start_private_message(user.id)NEWLINE return state.add_dm_channel(data)NEWLINENEWLINE def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:NEWLINE """Registers a :class:`~discord.ui.View` for persistent listening.NEWLINENEWLINE This method should be used for when a view is comprised of componentsNEWLINE that last longer than the lifecycle of the program.NEWLINENEWLINE .. versionadded:: 2.0NEWLINENEWLINE ParametersNEWLINE ------------NEWLINE view: :class:`discord.ui.View`NEWLINE The view to register for dispatching.NEWLINE message_id: Optional[:class:`int`]NEWLINE The message ID that the view is attached to. This is currently used toNEWLINE refresh the view's state during message update events. If not givenNEWLINE then message update events are not propagated for the view.NEWLINENEWLINE RaisesNEWLINE -------NEWLINE TypeErrorNEWLINE A view was not passed.NEWLINE ValueErrorNEWLINE The view is not persistent. A persistent view has no timeoutNEWLINE and all their components have an explicitly provided custom_id.NEWLINE """NEWLINENEWLINE if not isinstance(view, View):NEWLINE raise TypeError(f'expected an instance of View not {view.__class__!r}')NEWLINENEWLINE if not view.is_persistent():NEWLINE raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')NEWLINENEWLINE self._connection.store_view(view, message_id)NEWLINENEWLINE @propertyNEWLINE def persistent_views(self) -> Sequence[View]:NEWLINE """Sequence[:class:`.View`]: A sequence of persistent views added to the client.NEWLINENEWLINE .. versionadded:: 2.0NEWLINE """NEWLINE return self._connection.persistent_viewsNEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE"""NEWLINEIn this example, we are going to make a dark code editor widget and make it show visualNEWLINEwhitespaces.NEWLINENEWLINE"""NEWLINEimport sysNEWLINEimport osNEWLINEos.environ['QT_API'] = 'pyside2'NEWLINE# os.environ['QT_API'] = 'pyqt5'NEWLINEfrom pyqode.qt import QtWidgets, QtGuiNEWLINEfrom pyqode.core import apiNEWLINEfrom pyqode.core import modesNEWLINEfrom pyqode.core import panelsNEWLINENEWLINENEWLINEdef main():NEWLINE app = QtWidgets.QApplication(sys.argv)NEWLINE window = QtWidgets.QMainWindow()NEWLINENEWLINE # code from the simple exampleNEWLINE editor = api.CodeEdit()NEWLINE editor.file.open(__file__)NEWLINE editor.modes.append(modes.CaretLineHighlighterMode())NEWLINE sh = modes.PygmentsSyntaxHighlighter(editor.document())NEWLINE editor.modes.append(sh)NEWLINE editor.panels.append(panels.SearchAndReplacePanel(),NEWLINE api.Panel.Position.TOP)NEWLINE # make the code edit show whitespaces in dark grayNEWLINE editor.show_white_spaces = TrueNEWLINE editor.whitespaces_foreground = QtGui.QColor('#606020')NEWLINENEWLINE # make a dark editor using the monokai themeNEWLINE sh.pygments_style = 'monokai'NEWLINENEWLINE window.setCentralWidget(editor)NEWLINE window.show()NEWLINENEWLINE app.exec_()NEWLINENEWLINE editor.file.close()NEWLINE del editorNEWLINE del windowNEWLINE del appNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINENEWLINE |
#!/usr/bin/env pythonNEWLINEfrom __future__ import division, print_function, absolute_importNEWLINENEWLINEimport numpy as npNEWLINEfrom numpy.testing import (run_module_suite, assert_allclose, assert_,NEWLINE assert_raises)NEWLINENEWLINEimport pywtNEWLINENEWLINENEWLINEdef test_dwt_idwt_basic():NEWLINE x = [3, 7, 1, 1, -2, 5, 4, 6]NEWLINE cA, cD = pywt.dwt(x, 'db2')NEWLINE cA_expect = [5.65685425, 7.39923721, 0.22414387, 3.33677403, 7.77817459]NEWLINE cD_expect = [-2.44948974, -1.60368225, -4.44140056, -0.41361256,NEWLINE 1.22474487]NEWLINE assert_allclose(cA, cA_expect)NEWLINE assert_allclose(cD, cD_expect)NEWLINENEWLINE x_roundtrip = pywt.idwt(cA, cD, 'db2')NEWLINE assert_allclose(x_roundtrip, x, rtol=1e-10)NEWLINENEWLINENEWLINEdef test_dwt_wavelet_kwd():NEWLINE x = np.array([3, 7, 1, 1, -2, 5, 4, 6])NEWLINE w = pywt.Wavelet('sym3')NEWLINE cA, cD = pywt.dwt(x, wavelet=w, mode='cpd')NEWLINE cA_expect = [4.38354585, 3.80302657, 7.31813271, -0.58565539, 4.09727044,NEWLINE 7.81994027]NEWLINE cD_expect = [-1.33068221, -2.78795192, -3.16825651, -0.67715519,NEWLINE -0.09722957, -0.07045258]NEWLINE assert_allclose(cA, cA_expect)NEWLINE assert_allclose(cD, cD_expect)NEWLINENEWLINENEWLINEdef test_dwt_coeff_len():NEWLINE x = np.array([3, 7, 1, 1, -2, 5, 4, 6])NEWLINE w = pywt.Wavelet('sym3')NEWLINE ln = pywt.dwt_coeff_len(data_len=len(x), filter_len=w.dec_len, mode='sym')NEWLINE assert_(ln == 6)NEWLINE ln_modes = [pywt.dwt_coeff_len(len(x), w.dec_len, mode) for mode inNEWLINE pywt.MODES.modes]NEWLINE assert_allclose(ln_modes, [6, 6, 6, 6, 6, 4])NEWLINENEWLINENEWLINEdef test_idwt_none_input():NEWLINE # None input equals arrays of zeros of the right lengthNEWLINE res1 = pywt.idwt([1,2,0,1], None, 'db2', 'sym')NEWLINE res2 = pywt.idwt([1, 2, 0, 1], [0, 0, 0, 0], 'db2', 'sym')NEWLINE assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)NEWLINENEWLINE res1 = pywt.idwt(None, [1, 2, 0, 1], 'db2', 'sym')NEWLINE res2 = pywt.idwt([0, 0, 0, 0], [1, 2, 0, 1], 'db2', 'sym')NEWLINE assert_allclose(res1, res2, rtol=1e-15, atol=1e-15)NEWLINENEWLINE # Only one argument at a time can be NoneNEWLINE assert_raises(ValueError, pywt.idwt, None, None, 'db2', 'sym')NEWLINENEWLINENEWLINEdef test_idwt_correct_size_kw():NEWLINE res = pywt.idwt([1, 2, 3, 4, 5], [1, 2, 3, 4], 'db2', 'sym',NEWLINE correct_size=True)NEWLINE expected = [1.76776695, 0.61237244, 3.18198052, 0.61237244, 4.59619408,NEWLINE 0.61237244]NEWLINE assert_allclose(res, expected)NEWLINENEWLINE assert_raises(ValueError, pywt.idwt,NEWLINE [1, 2, 3, 4, 5], [1, 2, 3, 4], 'db2', 'sym')NEWLINE assert_raises(ValueError, pywt.idwt, [1, 2, 3, 4], [1, 2, 3, 4, 5], 'db2',NEWLINE 'sym', correct_size=True)NEWLINENEWLINENEWLINEdef test_idwt_invalid_input():NEWLINE # Too short, min length is 4 for 'db4':NEWLINE assert_raises(ValueError, pywt.idwt, [1,2,4], [4,1,3], 'db4', 'sym')NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE run_module_suite()NEWLINE |
from neo.Prompt.Commands.Invoke import InvokeContract, InvokeWithTokenVerificationScriptNEWLINEfrom neo.Core.Fixed8 import Fixed8NEWLINEfrom neo.Core.UInt160 import UInt160NEWLINEfrom neo.Network.common import blocking_prompt as promptNEWLINEfrom decimal import DecimalNEWLINEfrom neo.Core.TX.TransactionAttribute import TransactionAttributeNEWLINEimport binasciiNEWLINEfrom neo.Prompt.CommandBase import CommandBase, CommandDesc, ParameterDescNEWLINEfrom neo.Prompt.PromptData import PromptDataNEWLINEfrom neo.Prompt import Utils as PromptUtilsNEWLINEfrom neo.Implementations.Wallets.peewee.Models import NEP5Token as ModelNEP5TokenNEWLINEfrom neo.Implementations.Notifications.NotificationDB import NotificationDBNEWLINEfrom neo.Core.TX.TransactionAttribute import TransactionAttributeUsageNEWLINEfrom neo.Core.Utils import isValidPublicAddressNEWLINEimport peeweeNEWLINEimport tracebackNEWLINEfrom neo.Prompt.PromptPrinter import prompt_print as printNEWLINEfrom neo.logging import log_managerNEWLINENEWLINElogger = log_manager.getLogger()NEWLINENEWLINENEWLINEclass CommandWalletToken(CommandBase):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINE self.register_sub_command(CommandTokenDelete())NEWLINE self.register_sub_command(CommandTokenSend())NEWLINE self.register_sub_command(CommandTokenSendFrom())NEWLINE self.register_sub_command(CommandTokenHistory())NEWLINE self.register_sub_command(CommandTokenApprove())NEWLINE self.register_sub_command(CommandTokenAllowance())NEWLINE self.register_sub_command(CommandTokenMint())NEWLINE self.register_sub_command(CommandTokenRegister())NEWLINENEWLINE def command_desc(self):NEWLINE return CommandDesc('token', 'various token operations')NEWLINENEWLINE def execute(self, arguments):NEWLINE item = PromptUtils.get_arg(arguments)NEWLINENEWLINE if not item:NEWLINE print(f"run `{self.command_desc().command} help` to see supported queries")NEWLINE return FalseNEWLINENEWLINE try:NEWLINE return self.execute_sub_command(item, arguments[1:])NEWLINE except KeyError:NEWLINE print(f"{item} is an invalid parameter")NEWLINE return FalseNEWLINENEWLINENEWLINEclass CommandTokenDelete(CommandBase):NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) != 1:NEWLINE print("Please specify the required parameter")NEWLINE return FalseNEWLINENEWLINE hash_string = arguments[0]NEWLINE try:NEWLINE script_hash = UInt160.ParseString(hash_string)NEWLINE except Exception:NEWLINE # because UInt160 throws a generic exception. Should be fixed in the futureNEWLINE print("Invalid script hash")NEWLINE return FalseNEWLINENEWLINE # try to find token and collect some dataNEWLINE try:NEWLINE token = ModelNEP5Token.get(ContractHash=script_hash)NEWLINE except peewee.DoesNotExist:NEWLINE print(f"Could not find a token with script_hash {arguments[0]}")NEWLINE return FalseNEWLINENEWLINE success = wallet.DeleteNEP5Token(script_hash)NEWLINE if success:NEWLINE print(f"Token {token.Symbol} with script_hash {arguments[0]} deleted")NEWLINE else:NEWLINE # probably unreachable to due token check earlier. Better safe than sorrowNEWLINE print(f"Could not find a token with script_hash {arguments[0]}")NEWLINENEWLINE return successNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('contract', 'token contract hash (script hash)')NEWLINE return CommandDesc('delete', 'remove a token from the wallet', [p1])NEWLINENEWLINENEWLINEclass CommandTokenSend(CommandBase):NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) < 4:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE if len(arguments) > 6:NEWLINE # the 5th and 6th arguments are optionalNEWLINE print("Too many parameters supplied. Please check your command")NEWLINE return FalseNEWLINENEWLINE arguments, priority_fee = PromptUtils.get_fee(arguments)NEWLINE arguments, user_tx_attributes = PromptUtils.get_tx_attr_from_args(arguments)NEWLINENEWLINE token = arguments[0]NEWLINE from_addr = arguments[1]NEWLINE to_addr = arguments[2]NEWLINE try:NEWLINE amount = float(arguments[3])NEWLINE except ValueError:NEWLINE print(f"{arguments[3]} is not a valid amount")NEWLINE return FalseNEWLINENEWLINE fee = Fixed8.Zero()NEWLINE if priority_fee is not None:NEWLINE fee = priority_feeNEWLINE if fee is False:NEWLINE logger.debug("invalid fee")NEWLINE return FalseNEWLINENEWLINE try:NEWLINE success = token_send(wallet, token, from_addr, to_addr, amount, fee=fee, user_tx_attributes=user_tx_attributes)NEWLINE except ValueError as e:NEWLINE # occurs if arguments are invalidNEWLINE print(str(e))NEWLINE success = FalseNEWLINENEWLINE return successNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('token', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('from_addr', 'address to send token from')NEWLINE p3 = ParameterDesc('to_addr', 'address to send token to')NEWLINE p4 = ParameterDesc('amount', 'number of tokens to send')NEWLINE p5 = ParameterDesc('--fee', 'Attach GAS amount to give your transaction priority (> 0.001) e.g. --fee=0.01', optional=True)NEWLINE p6 = ParameterDesc('--tx-attr', f"a list of transaction attributes to attach to the transaction\n\n"NEWLINE f"{' ':>17} See: http://docs.neo.org/en-us/network/network-protocol.html section 4 for a description of possible attributes\n\n" # noqa: E128 ignore indentationNEWLINE f"{' ':>17} Example:\n"NEWLINE f"{' ':>20} --tx-attr=[{{\"usage\": <value>,\"data\":\"<remark>\"}}, ...]\n"NEWLINE f"{' ':>20} --tx-attr=[{{\"usage\": 0x90,\"data\":\"my brief description\"}}]\n", optional=True)NEWLINENEWLINE return CommandDesc('send', 'send a token from the wallet', [p1, p2, p3, p4, p5, p6])NEWLINENEWLINENEWLINEclass CommandTokenSendFrom(CommandBase):NEWLINE """NEWLINE This command is for old style NEP-5 tokens before the proposal got amended to remove this optional command.NEWLINE """NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) < 4:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE arguments, priority_fee = PromptUtils.get_fee(arguments)NEWLINENEWLINE token_str = arguments[0]NEWLINE from_addr = arguments[1]NEWLINE to_addr = arguments[2]NEWLINENEWLINE try:NEWLINE amount = float(arguments[3])NEWLINE except ValueError:NEWLINE print(f"{arguments[3]} is not a valid amount")NEWLINE return FalseNEWLINENEWLINE p_fee = Fixed8.Zero()NEWLINE if priority_fee is not None:NEWLINE p_fee = priority_feeNEWLINE if p_fee is False:NEWLINE logger.debug("invalid fee")NEWLINE return FalseNEWLINENEWLINE try:NEWLINE token, tx, fee, results = test_token_send_from(wallet, token_str, from_addr, to_addr, amount)NEWLINE except ValueError as e:NEWLINE # invalid arguments or bad allowanceNEWLINE print(str(e))NEWLINE return FalseNEWLINE except Exception as e:NEWLINE # we act as the final capturing placeNEWLINE print("Something really unexpected happened")NEWLINE logger.error(traceback.format_exc())NEWLINE return FalseNEWLINENEWLINE if tx is not None and results is not None:NEWLINE vm_result = results[0].GetBigInteger()NEWLINE if vm_result == 1:NEWLINE print("\n-----------------------------------------------------------")NEWLINE print("Transfer of %s %s from %s to %s" % (NEWLINE string_from_amount(token, amount), token.symbol, from_addr, to_addr))NEWLINE print("Transfer fee: %s " % (fee.value / Fixed8.D))NEWLINE print("-------------------------------------------------------------\n")NEWLINE comb_fee = p_fee + feeNEWLINE if comb_fee != fee:NEWLINE print(f"Priority Fee ({p_fee.value / Fixed8.D}) + Transfer Fee ({fee.value / Fixed8.D}) = {comb_fee.value / Fixed8.D}\n")NEWLINE print("Enter your password to send to the network")NEWLINENEWLINE try:NEWLINE passwd = prompt("[Password]> ", is_password=True)NEWLINE except KeyboardInterrupt:NEWLINE print("Transaction cancelled")NEWLINE return FalseNEWLINE if not wallet.ValidatePassword(passwd):NEWLINE print("incorrect password")NEWLINE return FalseNEWLINENEWLINE return InvokeContract(wallet, tx, comb_fee)NEWLINENEWLINE print(f"Could not transfer tokens. Virtual machine returned: {vm_result}")NEWLINE return FalseNEWLINENEWLINE print(f"Could not transfer tokens. An unknown error occurred resulting in no Transaction object or VM output.")NEWLINE return FalseNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('token', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('from_addr', 'address to send token from')NEWLINE p3 = ParameterDesc('to_addr', 'address to send token to')NEWLINE p4 = ParameterDesc('amount', 'number of tokens to send')NEWLINE p5 = ParameterDesc('--fee', 'Attach GAS amount to give your transaction priority (> 0.001) e.g. --fee=0.01', optional=True)NEWLINENEWLINE return CommandDesc('sendfrom', 'send a token on behalf of another account (requires approval)', [p1, p2, p3, p4, p5])NEWLINENEWLINENEWLINEclass CommandTokenHistory(CommandBase):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) != 1:NEWLINE print("Please specify the required parameter")NEWLINE return FalseNEWLINENEWLINE try:NEWLINE token, events = token_history(wallet, arguments[0])NEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE if events:NEWLINE addresses = wallet.AddressesNEWLINE print("-----------------------------------------------------------")NEWLINE print("Recent transaction history (last = more recent):")NEWLINE for event in events:NEWLINE if event.Type != 'transfer':NEWLINE continueNEWLINE if event.AddressFrom in addresses:NEWLINE print(f"[{event.AddressFrom}]: Sent {string_from_amount(token, event.Amount)}"NEWLINE f" {token.symbol} to {event.AddressTo}")NEWLINE if event.AddressTo in addresses:NEWLINE print(f"[{event.AddressTo}]: Received {string_from_amount(token, event.Amount)}"NEWLINE f" {token.symbol} from {event.AddressFrom}")NEWLINE print("-----------------------------------------------------------")NEWLINE else:NEWLINE print("History contains no transactions")NEWLINE return TrueNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('symbol', 'token symbol or script hash')NEWLINE return CommandDesc('history', 'show transaction history', [p1])NEWLINENEWLINENEWLINEclass CommandTokenApprove(CommandBase):NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) < 4:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE arguments, priority_fee = PromptUtils.get_fee(arguments)NEWLINENEWLINE token_str = arguments[0]NEWLINE from_addr = arguments[1]NEWLINE to_addr = arguments[2]NEWLINENEWLINE try:NEWLINE amount = float(arguments[3])NEWLINE except ValueError:NEWLINE print(f"{arguments[3]} is not a valid amount")NEWLINE return FalseNEWLINENEWLINE p_fee = Fixed8.Zero()NEWLINE if priority_fee is not None:NEWLINE p_fee = priority_feeNEWLINE if p_fee is False:NEWLINE logger.debug("invalid fee")NEWLINE return FalseNEWLINENEWLINE try:NEWLINE token = _validate_nep5_args(wallet, token_str, from_addr, to_addr, amount)NEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE decimal_amount = amount_from_string(token, amount)NEWLINENEWLINE tx, fee, results = token.Approve(wallet, from_addr, to_addr, decimal_amount)NEWLINENEWLINE if tx is not None and results is not None:NEWLINE if results[0].GetBigInteger() == 1:NEWLINE print("\n-----------------------------------------------------------")NEWLINE print(f"Approve allowance of {amount} {token.symbol} from {from_addr} to {to_addr}")NEWLINE print(f"Invocation fee: {fee.value / Fixed8.D}")NEWLINE print("-------------------------------------------------------------\n")NEWLINE comb_fee = p_fee + feeNEWLINE if comb_fee != fee:NEWLINE print(f"Priority Fee ({p_fee.value / Fixed8.D}) + Invocation Fee ({fee.value / Fixed8.D}) = {comb_fee.value / Fixed8.D}\n")NEWLINE print("Enter your password to send to the network")NEWLINENEWLINE try:NEWLINE passwd = prompt("[Password]> ", is_password=True)NEWLINE except KeyboardInterrupt:NEWLINE print("Allowance approval cancelled")NEWLINE return FalseNEWLINE if not wallet.ValidatePassword(passwd):NEWLINE print("incorrect password")NEWLINE return FalseNEWLINENEWLINE return InvokeContract(wallet, tx, comb_fee)NEWLINENEWLINE print("Failed to approve tokens. Make sure you are entitled for approving.")NEWLINE return FalseNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('symbol', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('from_addr', 'address to send token from')NEWLINE p3 = ParameterDesc('to_addr', 'address to send token to')NEWLINE p4 = ParameterDesc('amount', 'number of tokens to send')NEWLINE p5 = ParameterDesc('--fee', 'Attach GAS amount to give your transaction priority (> 0.001) e.g. --fee=0.01', optional=True)NEWLINENEWLINE return CommandDesc('approve', 'approve an allowance', [p1, p2, p3, p4, p5])NEWLINENEWLINE def handle_help(self, arguments):NEWLINE super().handle_help(arguments)NEWLINE print(NEWLINE "\nThis is an optional NEP-5 command (now legacy).\nFor more information see https://github.com/neo-project/proposals/blob/c357f5965afc2155615b6b96c7d15da688f81982/nep-5.mediawiki#approve_optional")NEWLINENEWLINENEWLINEclass CommandTokenAllowance(CommandBase):NEWLINENEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) != 3:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE token_str = arguments[0]NEWLINE from_addr = arguments[1]NEWLINE to_addr = arguments[2]NEWLINENEWLINE try:NEWLINE token = PromptUtils.get_token(wallet, token_str)NEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE try:NEWLINE allowance = token_get_allowance(wallet, token_str, from_addr, to_addr)NEWLINE print(f"{token.symbol} allowance for {from_addr} from {to_addr} : {allowance} ")NEWLINE return TrueNEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('symbol', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('from_addr', 'address to send token from')NEWLINE p3 = ParameterDesc('to_addr', 'address to send token to')NEWLINENEWLINE return CommandDesc('allowance', 'get the amount an account can transfer from another acount', [p1, p2, p3])NEWLINENEWLINENEWLINEclass CommandTokenMint(CommandBase):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) < 2:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE if len(arguments) > 6:NEWLINE # the 3rd and 4th argument are for attaching neo/gas, 5th for attaching a fee, 6th for attaching attributesNEWLINE print("Too many parameters supplied. Please check your command")NEWLINE return FalseNEWLINENEWLINE arguments, priority_fee = PromptUtils.get_fee(arguments)NEWLINE arguments, invoke_attrs = PromptUtils.get_tx_attr_from_args(arguments)NEWLINENEWLINE token_str = arguments[0]NEWLINE try:NEWLINE token = PromptUtils.get_token(wallet, token_str)NEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE to_addr = arguments[1]NEWLINE if not isValidPublicAddress(to_addr):NEWLINE print(f"{to_addr} is not a valid address")NEWLINE return FalseNEWLINENEWLINE remaining_args = arguments[2:]NEWLINE asset_attachments = []NEWLINE for optional in remaining_args:NEWLINE _, neo_to_attach, gas_to_attach = PromptUtils.get_asset_attachments([optional])NEWLINENEWLINE if "attach-neo" in optional:NEWLINE if not neo_to_attach:NEWLINE print(f"Could not parse value from --attach-neo. Value must be an integer")NEWLINE return FalseNEWLINE else:NEWLINE asset_attachments.append(optional)NEWLINENEWLINE if "attach-gas" in optional:NEWLINE if not gas_to_attach:NEWLINE print(f"Could not parse value from --attach-gas")NEWLINE return FalseNEWLINE else:NEWLINE asset_attachments.append(optional)NEWLINENEWLINE fee = Fixed8.Zero()NEWLINE if priority_fee is not None:NEWLINE fee = priority_feeNEWLINE if fee is False:NEWLINE logger.debug("invalid fee")NEWLINE return FalseNEWLINENEWLINE return token_mint(token, wallet, to_addr, asset_attachments=asset_attachments, fee=fee, invoke_attrs=invoke_attrs)NEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('symbol', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('to_addr', 'address to mint tokens to')NEWLINE p3 = ParameterDesc('--attach-neo', 'amount of neo to attach to the transaction', optional=True)NEWLINE p4 = ParameterDesc('--attach-gas', 'amount of gas to attach to the transaction', optional=True)NEWLINE p5 = ParameterDesc('--fee', 'Attach GAS amount to give your transaction priority (> 0.001) e.g. --fee=0.01', optional=True)NEWLINE p6 = ParameterDesc('--tx-attr', f"a list of transaction attributes to attach to the transaction\n\n"NEWLINE f"{' ':>17} See: http://docs.neo.org/en-us/network/network-protocol.html section 4 for a description of possible attributes\n\n" # noqa: E128 ignore indentationNEWLINE f"{' ':>17} Example:\n"NEWLINE f"{' ':>20} --tx-attr=[{{\"usage\": <value>,\"data\":\"<remark>\"}}, ...]\n"NEWLINE f"{' ':>20} --tx-attr=[{{\"usage\": 0x90,\"data\":\"my brief description\"}}]\n", optional=True)NEWLINENEWLINE return CommandDesc('mint', 'mint tokens from a contract', [p1, p2, p3, p4, p5, p6])NEWLINENEWLINENEWLINEclass CommandTokenRegister(CommandBase):NEWLINE def __init__(self):NEWLINE super().__init__()NEWLINENEWLINE def execute(self, arguments):NEWLINE wallet = PromptData.WalletNEWLINENEWLINE if len(arguments) < 2:NEWLINE print("Please specify the required parameters")NEWLINE return FalseNEWLINENEWLINE arguments, priority_fee = PromptUtils.get_fee(arguments)NEWLINENEWLINE token_str = arguments[0]NEWLINE try:NEWLINE token = PromptUtils.get_token(wallet, token_str)NEWLINE except ValueError as e:NEWLINE print(str(e))NEWLINE return FalseNEWLINENEWLINE register_addr = arguments[1:]NEWLINE addr_list = []NEWLINE for addr in register_addr:NEWLINE if isValidPublicAddress(addr):NEWLINE addr_list.append(addr)NEWLINE else:NEWLINE print(f"{addr} is not a valid address")NEWLINE return FalseNEWLINENEWLINE p_fee = Fixed8.Zero()NEWLINE if priority_fee is not None:NEWLINE p_fee = priority_feeNEWLINE if p_fee is False:NEWLINE logger.debug("invalid fee")NEWLINE return FalseNEWLINENEWLINE tx, fee, results = token.CrowdsaleRegister(wallet, addr_list)NEWLINENEWLINE if tx is not None and results is not None:NEWLINE if len(results) > 0 and results[0].GetBigInteger() > 0:NEWLINE print("\n-----------------------------------------------------------")NEWLINE print("[%s] Will register addresses for crowdsale: %s " % (token.symbol, register_addr))NEWLINE print("Invocation Fee: %s " % (fee.value / Fixed8.D))NEWLINE print("-------------------------------------------------------------\n")NEWLINE comb_fee = p_fee + feeNEWLINE if comb_fee != fee:NEWLINE print(f"Priority Fee ({p_fee.value / Fixed8.D}) + Invocation Fee ({fee.value / Fixed8.D}) = {comb_fee.value / Fixed8.D}\n")NEWLINE print("Enter your password to send to the network")NEWLINENEWLINE try:NEWLINE passwd = prompt("[Password]> ", is_password=True)NEWLINE except KeyboardInterrupt:NEWLINE print("Registration cancelled")NEWLINE return FalseNEWLINE if not wallet.ValidatePassword(passwd):NEWLINE print("incorrect password")NEWLINE return FalseNEWLINENEWLINE return InvokeContract(wallet, tx, comb_fee)NEWLINENEWLINE print("Could not register address(es)")NEWLINE return FalseNEWLINENEWLINE def command_desc(self):NEWLINE p1 = ParameterDesc('symbol', 'token symbol or script hash')NEWLINE p2 = ParameterDesc('addresses', 'space separated list of NEO addresses')NEWLINE p3 = ParameterDesc('--fee', 'Attach GAS amount to give your transaction priority (> 0.001) e.g. --fee=0.01', optional=True)NEWLINE return CommandDesc('register', 'register for a crowd sale', [p1, p2, p3])NEWLINENEWLINENEWLINEdef _validate_nep5_args(wallet, token_str, from_addr, to_addr, amount):NEWLINE """NEWLINE A helper function to validate common arguments used in NEP-5 functionsNEWLINENEWLINE Args:NEWLINE wallet (Wallet): a UserWallet instanceNEWLINE token_str (str): symbol name or script_hashNEWLINE from_addr (str): a wallet addressNEWLINE to_addr (str): a wallet addressNEWLINE amount (float): the number of tokens to sendNEWLINENEWLINE Raises:NEWLINE ValueError: for invalid argumentsNEWLINENEWLINE Returns:NEWLINE token (NEP5Token): instanceNEWLINE """NEWLINE try:NEWLINE token = PromptUtils.get_token(wallet, token_str)NEWLINE except ValueError:NEWLINE raiseNEWLINENEWLINE if not isValidPublicAddress(from_addr):NEWLINE raise ValueError("send_from is not a valid address")NEWLINENEWLINE if not isValidPublicAddress(to_addr):NEWLINE raise ValueError("send_to is not a valid address")NEWLINENEWLINE try:NEWLINE # internally this function uses the `Decimal` class which will parse the float amount to its required format.NEWLINE # the name is a bit misleading /shrugNEWLINE amount = amount_from_string(token, amount)NEWLINE except Exception:NEWLINE raise ValueError(f"{amount} is not a valid amount")NEWLINENEWLINE return tokenNEWLINENEWLINENEWLINEdef token_send(wallet, token_str, from_addr, to_addr, amount, fee=Fixed8.Zero(), user_tx_attributes=None):NEWLINE """NEWLINE Send `amount` of tokens from `from_addr` to `to_addr`NEWLINENEWLINE Args:NEWLINE wallet (Wallet): a UserWallet instanceNEWLINE token_str (str): symbol name or script_hashNEWLINE from_addr (str): a wallet addressNEWLINE to_addr (str): a wallet addressNEWLINE amount (float): the number of tokens to sendNEWLINE fee (Fixed8): (optional) a fee to give the transaction priority (> 0.001) NEWLINE user_tx_attributes (list): a list of ``TransactionAttribute``s.NEWLINENEWLINE Raises:NEWLINE ValueError: for invalid argumentsNEWLINENEWLINE Returns:NEWLINE a Transaction object if successful, False otherwise.NEWLINE """NEWLINE if not user_tx_attributes:NEWLINE user_tx_attributes = []NEWLINENEWLINE try:NEWLINE token = _validate_nep5_args(wallet, token_str, from_addr, to_addr, amount)NEWLINE except ValueError:NEWLINE # just making it explicit for the readerNEWLINE raiseNEWLINENEWLINE for attr in user_tx_attributes:NEWLINE if not isinstance(attr, TransactionAttribute):NEWLINE raise ValueError(f"{attr} is not a valid transaction attribute")NEWLINENEWLINE decimal_amount = amount_from_string(token, amount)NEWLINENEWLINE return do_token_transfer(token, wallet, from_addr, to_addr, decimal_amount, fee=fee, tx_attributes=user_tx_attributes)NEWLINENEWLINENEWLINEdef test_token_send_from(wallet, token_str, from_addr, to_addr, amount):NEWLINE """NEWLINE Test sending funds from `addr_from` to `addr_to` without commiting to the network.NEWLINENEWLINE This does a local test to validate all supplied arguments and if the blockchain state allows for the transfer.NEWLINENEWLINE Args:NEWLINE wallet (Wallet): a UserWallet instanceNEWLINE token_str (str): symbol name or script_hashNEWLINE from_addr (str): a wallet addressNEWLINE to_addr (str): a wallet addressNEWLINE amount (float): the number of tokens to sendNEWLINENEWLINE Raises:NEWLINE ValueError: for invalid arguments or if allowance is insufficient.NEWLINENEWLINE Returns:NEWLINE tuple:NEWLINE token (NEP5Token): instanceNEWLINE InvocationTransaction: the transaction.NEWLINE int: the transaction fee.NEWLINE list: the neo VM evaluation stack results.NEWLINE """NEWLINE try:NEWLINE token = _validate_nep5_args(wallet, token_str, from_addr, to_addr, amount)NEWLINE allowance = token_get_allowance(wallet, token_str, from_addr, to_addr, verbose=False)NEWLINENEWLINE if allowance < amount:NEWLINE raise ValueError(f"Insufficient allowance: {allowance}")NEWLINE except ValueError:NEWLINE # bad args or allowanceNEWLINE raiseNEWLINENEWLINE tx, fees, results = token.TransferFrom(wallet, from_addr, to_addr, amount)NEWLINE return token, tx, fees, resultsNEWLINENEWLINENEWLINEdef token_get_allowance(wallet, token_str, from_addr, to_addr, verbose=False):NEWLINE """NEWLINE Query the smart contract for the amount from_addr is allowed to send to to_addrNEWLINENEWLINE Requires amount to be `approved`.NEWLINENEWLINE Args:NEWLINE wallet (Wallet): a UserWallet instanceNEWLINE token_str (str): symbol name or script_hashNEWLINE from_addr (str): a wallet addressNEWLINE to_addr (str): a wallet addressNEWLINE verbose (bool): flag indicating whether to print VM resultsNEWLINENEWLINE Raises:NEWLINE ValueError: for invalid arguments or if allowance could not be queriedNEWLINENEWLINE Returns:NEWLINE int: allowanceNEWLINE """NEWLINE try:NEWLINE token = _validate_nep5_args(wallet, token_str, from_addr, to_addr, amount=0)NEWLINE except ValueError:NEWLINE raiseNEWLINENEWLINE tx, fee, results = token.Allowance(wallet, from_addr, to_addr)NEWLINENEWLINE if tx is not None and results is not None:NEWLINE allowance = results[0].GetBigInteger()NEWLINE if verbose:NEWLINE print("%s allowance for %s from %s : %s " % (token.symbol, from_addr, to_addr, allowance))NEWLINENEWLINE return allowanceNEWLINE else:NEWLINE if verbose:NEWLINE print("Could not get allowance for token %s " % token.symbol)NEWLINE raise ValueError(f"Could not get allowance for token {token.symbol}")NEWLINENEWLINENEWLINEdef token_mint(token, wallet, to_addr, asset_attachments=[], fee=Fixed8.Zero(), invoke_attrs=None):NEWLINE if not invoke_attrs:NEWLINE invoke_attrs = []NEWLINENEWLINE p_fee = feeNEWLINENEWLINE tx, fee, results = token.Mint(wallet, to_addr, asset_attachments, invoke_attrs=invoke_attrs)NEWLINENEWLINE if tx is not None and results is not None:NEWLINE if len(results) > 0 and results[0] is not None:NEWLINE print("\n-----------------------------------------------------------")NEWLINE print(f"[{token.symbol}] Will mint tokens to address: {to_addr}")NEWLINE print(f"Invocation Fee: {fee.value / Fixed8.D}")NEWLINE print("-------------------------------------------------------------\n")NEWLINE comb_fee = p_fee + feeNEWLINE if comb_fee != fee:NEWLINE print(f"Priority Fee ({p_fee.value / Fixed8.D}) + Invocation Fee ({fee.value / Fixed8.D}) = {comb_fee.value / Fixed8.D}\n")NEWLINE print("Enter your password to send to the network")NEWLINENEWLINE try:NEWLINE passwd = prompt("[Password]> ", is_password=True)NEWLINE except KeyboardInterrupt:NEWLINE print("Token mint cancelled")NEWLINE return FalseNEWLINE if not wallet.ValidatePassword(passwd):NEWLINE print("incorrect password")NEWLINE return FalseNEWLINENEWLINE return InvokeWithTokenVerificationScript(wallet, tx, token, comb_fee, invoke_attrs=invoke_attrs)NEWLINENEWLINE print("Failed to mint tokens")NEWLINE return FalseNEWLINENEWLINENEWLINEdef do_token_transfer(token, wallet, from_address, to_address, amount, fee=Fixed8.Zero(), tx_attributes=None):NEWLINE if not tx_attributes:NEWLINE tx_attributes = []NEWLINENEWLINE p_fee = feeNEWLINENEWLINE # because we cannot differentiate between a normal and multisig from_addr, and because we want to makeNEWLINE # sending NEP5 tokens straight forward even when sending from multisig addresses, we include the script_hashNEWLINE # for verification by default to the transaction attributes. See PR/Issue: https://github.com/CityOfZion/neo-python/pull/491NEWLINE from_script_hash = binascii.unhexlify(bytes(wallet.ToScriptHash(from_address).ToString2(), 'utf-8'))NEWLINE tx_attributes.append(TransactionAttribute(usage=TransactionAttributeUsage.Script, data=from_script_hash))NEWLINENEWLINE tx, fee, results = token.Transfer(wallet, from_address, to_address, amount, tx_attributes=tx_attributes)NEWLINENEWLINE if tx is not None and results is not None and len(results) > 0:NEWLINENEWLINE if results[0].GetBigInteger() == 1:NEWLINE print("\n-----------------------------------------------------------")NEWLINE print("Will transfer %s %s from %s to %s" % (string_from_amount(token, amount), token.symbol, from_address, to_address))NEWLINE print("Transfer fee: %s " % (fee.value / Fixed8.D))NEWLINE print("-------------------------------------------------------------\n")NEWLINE comb_fee = p_fee + feeNEWLINE if comb_fee != fee:NEWLINE print(f"Priority Fee ({p_fee.value / Fixed8.D}) + Transfer Fee ({fee.value / Fixed8.D}) = {comb_fee.value / Fixed8.D}\n")NEWLINE print("Enter your password to send to the network")NEWLINENEWLINE try:NEWLINE passwd = prompt("[Password]> ", is_password=True)NEWLINE except KeyboardInterrupt:NEWLINE print("Transfer cancelled")NEWLINE return FalseNEWLINE if not wallet.ValidatePassword(passwd):NEWLINE print("incorrect password")NEWLINE return FalseNEWLINENEWLINE return InvokeContract(wallet, tx, comb_fee)NEWLINENEWLINE print("could not transfer tokens")NEWLINE return FalseNEWLINENEWLINENEWLINEdef token_history(wallet, token_str):NEWLINE notification_db = NotificationDB.instance()NEWLINENEWLINE try:NEWLINE token = PromptUtils.get_token(wallet, token_str)NEWLINE except ValueError:NEWLINE raiseNEWLINENEWLINE events = notification_db.get_by_contract(token.ScriptHash)NEWLINE return token, eventsNEWLINENEWLINENEWLINEdef amount_from_string(token, amount_str):NEWLINE precision_mult = pow(10, token.decimals)NEWLINE amount = Decimal(amount_str) * precision_multNEWLINENEWLINE return int(amount)NEWLINENEWLINENEWLINEdef string_from_amount(token, amount):NEWLINE precision_mult = pow(10, token.decimals)NEWLINE amount = Decimal(amount) / Decimal(precision_mult)NEWLINE formatter_str = '.%sf' % token.decimalsNEWLINE amount_str = format(amount, formatter_str)NEWLINENEWLINE return amount_strNEWLINE |
# Copyright 2019, Google LLC.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINEimport collectionsNEWLINENEWLINEfrom absl.testing import parameterizedNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom tensorflow_federated.python.core.backends.native import execution_contextsNEWLINEfrom tensorflow_federated.python.simulation.baselines import client_specNEWLINEfrom tensorflow_federated.python.simulation.baselines.stackoverflow import word_prediction_preprocessingNEWLINENEWLINENEWLINETEST_DATA = collections.OrderedDict(NEWLINE creation_date=(['unused date']),NEWLINE score=([tf.constant(0, dtype=tf.int64)]),NEWLINE tags=(['unused test tag']),NEWLINE title=(['unused title']),NEWLINE tokens=(['one must imagine']),NEWLINE type=(['unused type']),NEWLINE)NEWLINENEWLINENEWLINEdef _compute_length_of_dataset(ds):NEWLINE return ds.reduce(0, lambda x, _: x + 1)NEWLINENEWLINENEWLINEclass SplitInputTest(tf.test.TestCase):NEWLINENEWLINE def test_split_input_returns_expected_result(self):NEWLINE tokens = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)NEWLINE expected_input = [[0, 1, 2, 3]]NEWLINE expected_target = [[1, 2, 3, 4]]NEWLINE split = word_prediction_preprocessing.split_input_target(tokens)NEWLINE self.assertAllEqual(self.evaluate(split[0]), expected_input)NEWLINE self.assertAllEqual(self.evaluate(split[1]), expected_target)NEWLINENEWLINENEWLINEclass ToIDsFnTest(tf.test.TestCase):NEWLINENEWLINE def test_ids_fn_truncates_on_input_longer_than_sequence_length(self):NEWLINE vocab = ['A', 'B', 'C']NEWLINE max_seq_len = 1NEWLINE bos = word_prediction_preprocessing.get_special_tokens(NEWLINE len(vocab)).beginning_of_sentenceNEWLINE to_ids_fn = word_prediction_preprocessing.build_to_ids_fn(NEWLINE vocab, max_seq_len)NEWLINE data = {'tokens': 'A B C'}NEWLINE processed = to_ids_fn(data)NEWLINE self.assertAllEqual(self.evaluate(processed), [bos, 1])NEWLINENEWLINE def test_build_to_ids_fn_embeds_all_vocab(self):NEWLINE vocab = ['A', 'B', 'C']NEWLINE max_seq_len = 5NEWLINE special_tokens = word_prediction_preprocessing.get_special_tokens(NEWLINE len(vocab))NEWLINE bos = special_tokens.beginning_of_sentenceNEWLINE eos = special_tokens.end_of_sentenceNEWLINE to_ids_fn = word_prediction_preprocessing.build_to_ids_fn(NEWLINE vocab, max_seq_len)NEWLINE data = {'tokens': 'A B C'}NEWLINE processed = to_ids_fn(data)NEWLINE self.assertAllEqual(self.evaluate(processed), [bos, 1, 2, 3, eos])NEWLINENEWLINE def test_pad_token_correct(self):NEWLINE vocab = ['A', 'B', 'C']NEWLINE max_seq_len = 5NEWLINE to_ids_fn = word_prediction_preprocessing.build_to_ids_fn(NEWLINE vocab, max_seq_len)NEWLINE special_tokens = word_prediction_preprocessing.get_special_tokens(NEWLINE len(vocab))NEWLINE pad = special_tokens.paddingNEWLINE bos = special_tokens.beginning_of_sentenceNEWLINE eos = special_tokens.end_of_sentenceNEWLINE data = {'tokens': 'A B C'}NEWLINE processed = to_ids_fn(data)NEWLINE batched_ds = tf.data.Dataset.from_tensor_slices([processed]).padded_batch(NEWLINE 1, padded_shapes=[6])NEWLINE sample_elem = next(iter(batched_ds))NEWLINE self.assertAllEqual(self.evaluate(sample_elem), [[bos, 1, 2, 3, eos, pad]])NEWLINENEWLINE def test_out_of_vocab_tokens_are_correct(self):NEWLINE vocab = ['A', 'B', 'C']NEWLINE max_seq_len = 5NEWLINE num_out_of_vocab_buckets = 2NEWLINE to_ids_fn = word_prediction_preprocessing.build_to_ids_fn(NEWLINE vocab, max_seq_len, num_out_of_vocab_buckets=num_out_of_vocab_buckets)NEWLINE out_of_vocab_tokens = word_prediction_preprocessing.get_special_tokens(NEWLINE len(vocab),NEWLINE num_out_of_vocab_buckets=num_out_of_vocab_buckets).out_of_vocabNEWLINE data = {'tokens': 'A B D'}NEWLINE processed = to_ids_fn(data)NEWLINE self.assertLen(out_of_vocab_tokens, num_out_of_vocab_buckets)NEWLINE self.assertIn(self.evaluate(processed)[3], out_of_vocab_tokens)NEWLINENEWLINENEWLINEclass BatchAndSplitTest(tf.test.TestCase):NEWLINENEWLINE def test_batch_and_split_fn_returns_dataset_with_correct_type_spec(self):NEWLINE token = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)NEWLINE ds = tf.data.Dataset.from_tensor_slices(token)NEWLINE padded_and_batched = word_prediction_preprocessing.batch_and_split(NEWLINE ds, sequence_length=6, batch_size=1)NEWLINE self.assertIsInstance(padded_and_batched, tf.data.Dataset)NEWLINE self.assertEqual(padded_and_batched.element_spec, (tf.TensorSpec(NEWLINE [None, 6], dtype=tf.int64), tf.TensorSpec([None, 6], dtype=tf.int64)))NEWLINENEWLINE def test_batch_and_split_fn_returns_dataset_yielding_expected_elements(self):NEWLINE token = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)NEWLINE ds = tf.data.Dataset.from_tensor_slices(token)NEWLINE padded_and_batched = word_prediction_preprocessing.batch_and_split(NEWLINE ds, sequence_length=6, batch_size=1)NEWLINE num_elems = 0NEWLINE for elem in padded_and_batched:NEWLINE self.assertAllEqual(NEWLINE self.evaluate(elem[0]),NEWLINE tf.constant([[0, 1, 2, 3, 4, 0]], dtype=tf.int64))NEWLINE self.assertAllEqual(NEWLINE self.evaluate(elem[1]),NEWLINE tf.constant([[1, 2, 3, 4, 0, 0]], dtype=tf.int64))NEWLINE num_elems += 1NEWLINE self.assertEqual(num_elems, 1)NEWLINENEWLINENEWLINEclass PreprocessFnTest(tf.test.TestCase, parameterized.TestCase):NEWLINENEWLINE def test_preprocess_fn_with_empty_vocab_raises(self):NEWLINE preprocess_spec = client_spec.ClientSpec(num_epochs=1, batch_size=1)NEWLINE with self.assertRaisesRegex(ValueError, 'vocab must be non-empty'):NEWLINE word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec, vocab=[], sequence_length=10)NEWLINENEWLINE @parameterized.named_parameters(('zero_value', 0), ('negative_value1', -1),NEWLINE ('negative_value2', -2))NEWLINE def test_nonpositive_sequence_length_raises(self, sequence_length):NEWLINE del sequence_length # Unused.NEWLINE preprocess_spec = client_spec.ClientSpec(num_epochs=1, batch_size=1)NEWLINE with self.assertRaisesRegex(ValueError,NEWLINE 'sequence_length must be a positive integer'):NEWLINE word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec, vocab=['A'], sequence_length=0)NEWLINENEWLINE @parameterized.named_parameters(('zero_value', 0), ('negative_value1', -1),NEWLINE ('negative_value2', -2))NEWLINE def test_nonpositive_num_out_of_vocab_buckets_length_raises(NEWLINE self, num_out_of_vocab_buckets):NEWLINE preprocess_spec = client_spec.ClientSpec(num_epochs=1, batch_size=1)NEWLINE with self.assertRaisesRegex(NEWLINE ValueError, 'num_out_of_vocab_buckets must be a positive integer'):NEWLINE word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec,NEWLINE vocab=['A'],NEWLINE sequence_length=10,NEWLINE num_out_of_vocab_buckets=num_out_of_vocab_buckets)NEWLINENEWLINE @parameterized.named_parameters(('param1', 1, 1), ('param2', 4, 2),NEWLINE ('param3', 100, 3))NEWLINE def test_preprocess_fn_returns_correct_dataset_element_spec(NEWLINE self, sequence_length, num_out_of_vocab_buckets):NEWLINE ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)NEWLINE preprocess_spec = client_spec.ClientSpec(NEWLINE num_epochs=1, batch_size=32, max_elements=100)NEWLINE preprocess_fn = word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec,NEWLINE sequence_length=sequence_length,NEWLINE vocab=['one', 'must'],NEWLINE num_out_of_vocab_buckets=num_out_of_vocab_buckets)NEWLINE preprocessed_ds = preprocess_fn(ds)NEWLINE self.assertEqual(NEWLINE preprocessed_ds.element_spec,NEWLINE (tf.TensorSpec(shape=[None, sequence_length], dtype=tf.int64),NEWLINE tf.TensorSpec(shape=[None, sequence_length], dtype=tf.int64)))NEWLINENEWLINE def test_preprocess_fn_returns_correct_sequence_with_1_out_of_vocab_bucket(NEWLINE self):NEWLINE ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)NEWLINE preprocess_spec = client_spec.ClientSpec(NEWLINE num_epochs=1, batch_size=32, max_elements=100)NEWLINE preprocess_fn = word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec,NEWLINE sequence_length=6,NEWLINE vocab=['one', 'must'],NEWLINE num_out_of_vocab_buckets=1)NEWLINENEWLINE preprocessed_ds = preprocess_fn(ds)NEWLINE element = next(iter(preprocessed_ds))NEWLINENEWLINE # BOS is len(vocab)+2, EOS is len(vocab)+3, pad is 0, OOV is len(vocab)+1NEWLINE self.assertAllEqual(NEWLINE self.evaluate(element[0]),NEWLINE tf.constant([[4, 1, 2, 3, 5, 0]], dtype=tf.int64))NEWLINENEWLINE def test_preprocess_fn_returns_correct_sequence_with_3_out_of_vocab_buckets(NEWLINE self):NEWLINE ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)NEWLINE preprocess_spec = client_spec.ClientSpec(NEWLINE num_epochs=1, batch_size=32, max_elements=100)NEWLINE preprocess_fn = word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec,NEWLINE sequence_length=6,NEWLINE vocab=['one', 'must'],NEWLINE num_out_of_vocab_buckets=3)NEWLINE preprocessed_ds = preprocess_fn(ds)NEWLINE element = next(iter(preprocessed_ds))NEWLINE # BOS is len(vocab)+3+1NEWLINE self.assertEqual(self.evaluate(element[0])[0][0], 6)NEWLINE self.assertEqual(self.evaluate(element[0])[0][1], 1)NEWLINE self.assertEqual(self.evaluate(element[0])[0][2], 2)NEWLINE # OOV is [len(vocab)+1, len(vocab)+2, len(vocab)+3]NEWLINE self.assertIn(self.evaluate(element[0])[0][3], [3, 4, 5])NEWLINE # EOS is len(vocab)+3+2NEWLINE self.assertEqual(self.evaluate(element[0])[0][4], 7)NEWLINE # pad is 0NEWLINE self.assertEqual(self.evaluate(element[0])[0][5], 0)NEWLINENEWLINE @parameterized.named_parameters(NEWLINE ('num_epochs_1_batch_size_1', 1, 1),NEWLINE ('num_epochs_4_batch_size_2', 4, 2),NEWLINE ('num_epochs_9_batch_size_3', 9, 3),NEWLINE ('num_epochs_12_batch_size_1', 12, 1),NEWLINE ('num_epochs_3_batch_size_5', 3, 5),NEWLINE ('num_epochs_7_batch_size_2', 7, 2),NEWLINE )NEWLINE def test_ds_length_is_ceil_num_epochs_over_batch_size(self, num_epochs,NEWLINE batch_size):NEWLINE ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)NEWLINE preprocess_spec = client_spec.ClientSpec(NEWLINE num_epochs=num_epochs, batch_size=batch_size)NEWLINE preprocess_fn = word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec, vocab=['A'], sequence_length=10)NEWLINE preprocessed_ds = preprocess_fn(ds)NEWLINE self.assertEqual(NEWLINE _compute_length_of_dataset(preprocessed_ds),NEWLINE tf.cast(tf.math.ceil(num_epochs / batch_size), tf.int32))NEWLINENEWLINE @parameterized.named_parameters(NEWLINE ('max_elements1', 1),NEWLINE ('max_elements3', 3),NEWLINE ('max_elements7', 7),NEWLINE ('max_elements11', 11),NEWLINE ('max_elements18', 18),NEWLINE )NEWLINE def test_ds_length_with_max_elements(self, max_elements):NEWLINE repeat_size = 10NEWLINE ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)NEWLINE preprocess_spec = client_spec.ClientSpec(NEWLINE num_epochs=repeat_size, batch_size=1, max_elements=max_elements)NEWLINE preprocess_fn = word_prediction_preprocessing.create_preprocess_fn(NEWLINE preprocess_spec, vocab=['A'])NEWLINE preprocessed_ds = preprocess_fn(ds)NEWLINE self.assertEqual(NEWLINE _compute_length_of_dataset(preprocessed_ds),NEWLINE min(repeat_size, max_elements))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE execution_contexts.set_local_python_execution_context()NEWLINE tf.test.main()NEWLINE |
# Copyright (c) 2017-2019 Uber Technologies, Inc.NEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINENEWLINE"""NEWLINEThis example demonstrates how to use the Causal Effect Variational AutoencoderNEWLINE[1] implemented in pyro.contrib.cevae.CEVAE, documented atNEWLINEhttp://docs.pyro.ai/en/latest/contrib.cevae.htmlNEWLINENEWLINE**References**NEWLINENEWLINE[1] C. Louizos, U. Shalit, J. Mooij, D. Sontag, R. Zemel, M. Welling (2017).NEWLINE Causal Effect Inference with Deep Latent-Variable Models.NEWLINE http://papers.nips.cc/paper/7223-causal-effect-inference-with-deep-latent-variable-models.pdfNEWLINE https://github.com/AMLab-Amsterdam/CEVAENEWLINE"""NEWLINEimport argparseNEWLINEimport loggingNEWLINENEWLINEimport torchNEWLINENEWLINEimport pyroNEWLINEimport pyro.distributions as distNEWLINEfrom pyro.contrib.cevae import CEVAENEWLINENEWLINElogging.getLogger("pyro").setLevel(logging.DEBUG)NEWLINElogging.getLogger("pyro").handlers[0].setLevel(logging.DEBUG)NEWLINENEWLINENEWLINEdef generate_data(args):NEWLINE """NEWLINE This implements the generative process of [1], but using larger feature andNEWLINE latent spaces ([1] assumes ``feature_dim=1`` and ``latent_dim=5``).NEWLINE """NEWLINE z = dist.Bernoulli(0.5).sample([args.num_data])NEWLINE x = dist.Normal(z, 5 * z + 3 * (1 - z)).sample([args.feature_dim]).t()NEWLINE t = dist.Bernoulli(0.75 * z + 0.25 * (1 - z)).sample()NEWLINE y = dist.Bernoulli(logits=3 * (z + 2 * (2 * t - 2))).sample()NEWLINENEWLINE # Compute true ite for evaluation (via Monte Carlo approximation).NEWLINE t0_t1 = torch.tensor([[0.], [1.]])NEWLINE y_t0, y_t1 = dist.Bernoulli(logits=3 * (z + 2 * (2 * t0_t1 - 2))).meanNEWLINE true_ite = y_t1 - y_t0NEWLINE return x, t, y, true_iteNEWLINENEWLINENEWLINEdef main(args):NEWLINE pyro.enable_validation(__debug__)NEWLINE if args.cuda:NEWLINE torch.set_default_tensor_type('torch.cuda.FloatTensor')NEWLINENEWLINE # Generate synthetic data.NEWLINE pyro.set_rng_seed(args.seed)NEWLINE x_train, t_train, y_train, _ = generate_data(args)NEWLINENEWLINE # Train.NEWLINE pyro.set_rng_seed(args.seed)NEWLINE pyro.clear_param_store()NEWLINE cevae = CEVAE(feature_dim=args.feature_dim,NEWLINE latent_dim=args.latent_dim,NEWLINE hidden_dim=args.hidden_dim,NEWLINE num_layers=args.num_layers,NEWLINE num_samples=10)NEWLINE cevae.fit(x_train, t_train, y_train,NEWLINE num_epochs=args.num_epochs,NEWLINE batch_size=args.batch_size,NEWLINE learning_rate=args.learning_rate,NEWLINE learning_rate_decay=args.learning_rate_decay,NEWLINE weight_decay=args.weight_decay)NEWLINENEWLINE # Evaluate.NEWLINE x_test, t_test, y_test, true_ite = generate_data(args)NEWLINE true_ate = true_ite.mean()NEWLINE print("true ATE = {:0.3g}".format(true_ate.item()))NEWLINE naive_ate = y_test[t_test == 1].mean() - y_test[t_test == 0].mean()NEWLINE print("naive ATE = {:0.3g}".format(naive_ate))NEWLINE if args.jit:NEWLINE cevae = cevae.to_script_module()NEWLINE est_ite = cevae.ite(x_test)NEWLINE est_ate = est_ite.mean()NEWLINE print("estimated ATE = {:0.3g}".format(est_ate.item()))NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE assert pyro.__version__.startswith('1.5.0')NEWLINE parser = argparse.ArgumentParser(description="Causal Effect Variational Autoencoder")NEWLINE parser.add_argument("--num-data", default=1000, type=int)NEWLINE parser.add_argument("--feature-dim", default=5, type=int)NEWLINE parser.add_argument("--latent-dim", default=20, type=int)NEWLINE parser.add_argument("--hidden-dim", default=200, type=int)NEWLINE parser.add_argument("--num-layers", default=3, type=int)NEWLINE parser.add_argument("-n", "--num-epochs", default=50, type=int)NEWLINE parser.add_argument("-b", "--batch-size", default=100, type=int)NEWLINE parser.add_argument("-lr", "--learning-rate", default=1e-3, type=float)NEWLINE parser.add_argument("-lrd", "--learning-rate-decay", default=0.1, type=float)NEWLINE parser.add_argument("--weight-decay", default=1e-4, type=float)NEWLINE parser.add_argument("--seed", default=1234567890, type=int)NEWLINE parser.add_argument("--jit", action="store_true")NEWLINE parser.add_argument("--cuda", action="store_true")NEWLINE args = parser.parse_args()NEWLINE main(args)NEWLINE |
"""Config Flow for PlayStation 4."""NEWLINEfrom collections import OrderedDictNEWLINEimport loggingNEWLINENEWLINEimport voluptuous as volNEWLINENEWLINEfrom homeassistant import config_entriesNEWLINEfrom homeassistant.components.ps4.const import (NEWLINE DEFAULT_NAME, DEFAULT_REGION, DOMAIN, REGIONS)NEWLINEfrom homeassistant.const import (NEWLINE CONF_CODE, CONF_HOST, CONF_IP_ADDRESS, CONF_NAME, CONF_REGION, CONF_TOKEN)NEWLINENEWLINE_LOGGER = logging.getLogger(__name__)NEWLINENEWLINEUDP_PORT = 987NEWLINETCP_PORT = 997NEWLINEPORT_MSG = {UDP_PORT: 'port_987_bind_error', TCP_PORT: 'port_997_bind_error'}NEWLINENEWLINENEWLINE@config_entries.HANDLERS.register(DOMAIN)NEWLINEclass PlayStation4FlowHandler(config_entries.ConfigFlow):NEWLINE """Handle a PlayStation 4 config flow."""NEWLINENEWLINE VERSION = 1NEWLINE CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLLNEWLINENEWLINE def __init__(self):NEWLINE """Initialize the config flow."""NEWLINE from pyps4_homeassistant import HelperNEWLINENEWLINE self.helper = Helper()NEWLINE self.creds = NoneNEWLINE self.name = NoneNEWLINE self.host = NoneNEWLINE self.region = NoneNEWLINE self.pin = NoneNEWLINENEWLINE async def async_step_user(self, user_input=None):NEWLINE """Handle a user config flow."""NEWLINE # Check if able to bind to ports: UDP 987, TCP 997.NEWLINE ports = PORT_MSG.keys()NEWLINE failed = await self.hass.async_add_executor_job(NEWLINE self.helper.port_bind, ports)NEWLINE if failed in ports:NEWLINE reason = PORT_MSG[failed]NEWLINE return self.async_abort(reason=reason)NEWLINE # Skip Creds Step if a device is configured.NEWLINE if self.hass.config_entries.async_entries(DOMAIN):NEWLINE return await self.async_step_link()NEWLINE return await self.async_step_creds()NEWLINENEWLINE async def async_step_creds(self, user_input=None):NEWLINE """Return PS4 credentials from 2nd Screen App."""NEWLINE if user_input is not None:NEWLINE self.creds = await self.hass.async_add_executor_job(NEWLINE self.helper.get_creds)NEWLINENEWLINE if self.creds is not None:NEWLINE return await self.async_step_link()NEWLINE return self.async_abort(reason='credential_error')NEWLINENEWLINE return self.async_show_form(NEWLINE step_id='creds')NEWLINENEWLINE async def async_step_link(self, user_input=None):NEWLINE """Prompt user input. Create or edit entry."""NEWLINE errors = {}NEWLINENEWLINE # Search for device.NEWLINE devices = await self.hass.async_add_executor_job(NEWLINE self.helper.has_devices)NEWLINENEWLINE # Abort if can't find device.NEWLINE if not devices:NEWLINE return self.async_abort(reason='no_devices_found')NEWLINENEWLINE device_list = [NEWLINE device['host-ip'] for device in devices]NEWLINENEWLINE # If entry exists check that devices found aren't configured.NEWLINE if self.hass.config_entries.async_entries(DOMAIN):NEWLINE for entry in self.hass.config_entries.async_entries(DOMAIN):NEWLINE conf_devices = entry.data['devices']NEWLINE for c_device in conf_devices:NEWLINE if c_device['host'] in device_list:NEWLINE # Remove configured device from search list.NEWLINE device_list.remove(c_device['host'])NEWLINE # If list is empty then all devices are configured.NEWLINE if not device_list:NEWLINE return self.async_abort(reason='devices_configured')NEWLINENEWLINE # Login to PS4 with user data.NEWLINE if user_input is not None:NEWLINE self.region = user_input[CONF_REGION]NEWLINE self.name = user_input[CONF_NAME]NEWLINE self.pin = user_input[CONF_CODE]NEWLINE self.host = user_input[CONF_IP_ADDRESS]NEWLINENEWLINE is_ready, is_login = await self.hass.async_add_executor_job(NEWLINE self.helper.link, self.host, self.creds, self.pin)NEWLINENEWLINE if is_ready is False:NEWLINE errors['base'] = 'not_ready'NEWLINE elif is_login is False:NEWLINE errors['base'] = 'login_failed'NEWLINE else:NEWLINE device = {NEWLINE CONF_HOST: self.host,NEWLINE CONF_NAME: self.name,NEWLINE CONF_REGION: self.regionNEWLINE }NEWLINENEWLINE # Create entry.NEWLINE return self.async_create_entry(NEWLINE title='PlayStation 4',NEWLINE data={NEWLINE CONF_TOKEN: self.creds,NEWLINE 'devices': [device],NEWLINE },NEWLINE )NEWLINENEWLINE # Show User Input form.NEWLINE link_schema = OrderedDict()NEWLINE link_schema[vol.Required(CONF_IP_ADDRESS)] = vol.In(list(device_list))NEWLINE link_schema[vol.Required(NEWLINE CONF_REGION, default=DEFAULT_REGION)] = vol.In(list(REGIONS))NEWLINE link_schema[vol.Required(CONF_CODE)] = strNEWLINE link_schema[vol.Required(CONF_NAME, default=DEFAULT_NAME)] = strNEWLINENEWLINE return self.async_show_form(NEWLINE step_id='link',NEWLINE data_schema=vol.Schema(link_schema),NEWLINE errors=errors,NEWLINE )NEWLINE |
from .. import fstNEWLINENEWLINENEWLINEdef fstFromDict(initDict):NEWLINE """NEWLINE fstInitMealy = {NEWLINE 'initState': 'S0',NEWLINE 'inAlphabet': ( 0, 1, ),NEWLINE 'transition': { 'S0': ( 'S1', 'S0', ),NEWLINE 'S1': ( 'S1', 'S0', ), },NEWLINE 'outputMealy':{ 'S0': ( 0, 0, ),NEWLINE 'S1': ( 0, 1, ), },NEWLINE }NEWLINE fstInitMoore = {NEWLINE 'initState': 'S0',NEWLINE 'inAlphabet': ( 0, 1, ),NEWLINE 'transition': { 'S0': ( 'S1', 'S0', ),NEWLINE 'S1': ( 'S1', 'S0', ), },NEWLINE 'outputMoore':{ 'S0':2, 'S1':3, },NEWLINE }NEWLINE fsaInit = {NEWLINE 'initState': 'S0',NEWLINE 'finalStates': ('S1', 'S3'),NEWLINE 'inAlphabet': ( 0, 1, None, ),NEWLINE 'transition': { 'S0': ( None, None, ('S1', 'S3', ), ),NEWLINE 'S1': ( 'S2', 'S1', None, ),NEWLINE 'S2': ( 'S1', 'S2', None, ),NEWLINE 'S3': ( 'S3', 'S4', None, ),NEWLINE 'S4': ( 'S4', 'S3', None, ),NEWLINE },NEWLINE }NEWLINENEWLINE """NEWLINE initState = initDict['initState']NEWLINE inAlphabet = initDict['inAlphabet']NEWLINE transitionFunc = list()NEWLINE isMealy = 'outputMealy' in initDictNEWLINE isMoore = 'outputMoore' in initDictNEWLINE if isMealy or isMoore:NEWLINE outputFunc = list()NEWLINE for st, stNextTuple in initDict['transition'].items():NEWLINE if isMoore:NEWLINE outputFunc.append([st, initDict['outputMoore'][st]])NEWLINE else:NEWLINE for inSig, outSig in zip(inAlphabet, initDict['outputMealy'][st]):NEWLINE outputFunc.append([st, inSig, outSig])NEWLINE for inSig, stNext in zip(inAlphabet, stNextTuple):NEWLINE transitionFunc.append([st, inSig, stNext])NEWLINE return fst(initState=initState,NEWLINE transitionFunction=transitionFunc,NEWLINE outputFunction=outputFunc)NEWLINE if 'finalStates' in initDict:NEWLINE finalStates = initDict['finalStates']NEWLINE for st, stNextTuple in initDict['transition'].items():NEWLINE for inSig, stNext in zip(inAlphabet, stNextTuple):NEWLINE if isinstance(stNext, (tuple,)):NEWLINE for stN in stNext:NEWLINE transitionFunc.append([st, inSig, stN])NEWLINE else:NEWLINE transitionFunc.append([st, inSig, stNext])NEWLINE return fst(initState=initState,NEWLINE transitionFunction=transitionFunc,NEWLINE finalStates=finalStates)NEWLINE |
import torchNEWLINEimport loggingNEWLINENEWLINEimport models.modules.UNet_arch as UNet_archNEWLINElogger = logging.getLogger('base')NEWLINENEWLINENEWLINE####################NEWLINE# define networkNEWLINE####################NEWLINE#### GeneratorNEWLINEdef define_G(opt):NEWLINE opt_net = opt['network_G']NEWLINE which_model = opt_net['which_model_G']NEWLINENEWLINE if which_model == 'HDRUNet':NEWLINE netG = UNet_arch.HDRUNet(in_nc=opt_net['in_nc'], out_nc=opt_net['out_nc'], nf=opt_net['nf'], act_type=opt_net['act_type'])NEWLINE else:NEWLINE raise NotImplementedError('Generator model [{:s}] not recognized'.format(which_model))NEWLINE return netG |
"""Auto-generated file, do not edit by hand. 81 metadata"""NEWLINEfrom ..phonemetadata import NumberFormatNEWLINENEWLINEPHONE_ALT_FORMAT_81 = [NumberFormat(pattern='(\\d{3})(\\d{2})(\\d{4})', format=u'\\1-\\2-\\3', leading_digits_pattern=['(?:12|57|99)0']), NumberFormat(pattern='(\\d{3})(\\d{2})(\\d{2})(\\d{2})', format=u'\\1-\\2-\\3-\\4', leading_digits_pattern=['(?:12|57|99)0']), NumberFormat(pattern='(\\d{3})(\\d{4})(\\d{2})', format=u'\\1-\\2-\\3', leading_digits_pattern=['(?:12|57|99)0'])]NEWLINE |
"""This file and its contents are licensed under the Apache License 2.0. Please see the included NOTICE for copyright information and LICENSE for a copy of the license.NEWLINE"""NEWLINE"""NEWLINEDjango Base settings for Label Studio.NEWLINENEWLINEFor more information on this file, seeNEWLINEhttps://docs.djangoproject.com/en/3.1/topics/settings/NEWLINENEWLINEFor the full list of settings and their values, seeNEWLINEhttps://docs.djangoproject.com/en/3.1/ref/settings/NEWLINE"""NEWLINEimport osNEWLINEimport reNEWLINEimport loggingNEWLINENEWLINE# for printing messages before main logging config appliedNEWLINEif not logging.getLogger().hasHandlers():NEWLINE logging.basicConfig(level=logging.DEBUG, format='%(message)s')NEWLINENEWLINEfrom label_studio.core.utils.io import get_data_dirNEWLINEfrom label_studio.core.utils.params import get_bool_env, get_envNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINE# Hostname is used for proper path generation to the resources, pages, etcNEWLINEHOSTNAME = get_env('HOST', '')NEWLINEif HOSTNAME:NEWLINE if not HOSTNAME.startswith('http://') and not HOSTNAME.startswith('https://'):NEWLINE logger.info("! HOST variable found in environment, but it must start with http:// or https://, ignore it: %s", HOSTNAME)NEWLINE HOSTNAME = ''NEWLINE else:NEWLINE logger.info("=> Hostname correctly is set to: %s", HOSTNAME)NEWLINE if HOSTNAME.endswith('/'):NEWLINE HOSTNAME = HOSTNAME[0:-1]NEWLINENEWLINE # for django url resolverNEWLINE if HOSTNAME:NEWLINE # http[s]://domain.com:8080/script_name => /script_nameNEWLINE pattern = re.compile(r'^http[s]?:\/\/([^:\/\s]+(:\d*)?)(.*)?')NEWLINE match = pattern.match(HOSTNAME)NEWLINE FORCE_SCRIPT_NAME = match.group(3)NEWLINE if FORCE_SCRIPT_NAME:NEWLINE logger.info("=> Django URL prefix is set to: %s", FORCE_SCRIPT_NAME)NEWLINENEWLINEINTERNAL_PORT = '8080'NEWLINENEWLINE# SECURITY WARNING: keep the secret key used in production secret!NEWLINESECRET_KEY = '$(fefwefwef13;LFK{P!)@#*!)kdsjfWF2l+i5e3t(8a1n'NEWLINENEWLINE# SECURITY WARNING: don't run with debug turned on in production!NEWLINEDEBUG = get_bool_env('DEBUG', True)NEWLINEDEBUG_MODAL_EXCEPTIONS = get_bool_env('DEBUG_MODAL_EXCEPTIONS', True)NEWLINENEWLINENEWLINE# Build paths inside the project like this: os.path.join(BASE_DIR, ...)NEWLINEBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))NEWLINENEWLINE# Base path for media root and other uploaded filesNEWLINEBASE_DATA_DIR = get_env('BASE_DATA_DIR', get_data_dir())NEWLINEos.makedirs(BASE_DATA_DIR, exist_ok=True)NEWLINElogger.info('=> Database and media directory: %s', BASE_DATA_DIR)NEWLINENEWLINE# DatabasesNEWLINE# https://docs.djangoproject.com/en/2.1/ref/settings/#databasesNEWLINEDJANGO_DB_MYSQL = 'mysql'NEWLINEDJANGO_DB_SQLITE = 'sqlite'NEWLINEDJANGO_DB = 'default'NEWLINEDATABASE_NAME_DEFAULT = os.path.join(BASE_DATA_DIR, 'label_studio.sqlite3')NEWLINEDATABASE_NAME = get_env('DATABASE_NAME', DATABASE_NAME_DEFAULT)NEWLINEDATABASES_ALL = {NEWLINE 'default': {NEWLINE 'ENGINE': 'django.db.backends.postgresql',NEWLINE 'USER': get_env('POSTGRE_USER', 'postgres'),NEWLINE 'PASSWORD': get_env('POSTGRE_PASSWORD', 'postgres'),NEWLINE 'NAME': get_env('POSTGRE_NAME', 'postgres'),NEWLINE 'HOST': get_env('POSTGRE_HOST', 'localhost'),NEWLINE 'PORT': int(get_env('POSTGRE_PORT', '5432')),NEWLINE },NEWLINE DJANGO_DB_MYSQL: {NEWLINE 'ENGINE': 'django.db.backends.mysql',NEWLINE 'USER': get_env('MYSQL_USER', 'root'),NEWLINE 'PASSWORD': get_env('MYSQL_PASSWORD', ''),NEWLINE 'NAME': get_env('MYSQL_NAME', 'labelstudio'),NEWLINE 'HOST': get_env('MYSQL_HOST', 'localhost'),NEWLINE 'PORT': int(get_env('MYSQL_PORT', '3306')),NEWLINE },NEWLINE DJANGO_DB_SQLITE: {NEWLINE 'ENGINE': 'django.db.backends.sqlite3',NEWLINE 'NAME': DATABASE_NAME,NEWLINE 'OPTIONS': {NEWLINE # 'timeout': 20,NEWLINE }NEWLINE }NEWLINE}NEWLINEDATABASES = {'default': DATABASES_ALL.get(get_env('DJANGO_DB', 'default'))}NEWLINENEWLINELOGGING = {NEWLINE 'version': 1,NEWLINE 'disable_existing_loggers': False,NEWLINE 'formatters': {NEWLINE 'standard': {NEWLINE 'format': '[%(asctime)s] [%(name)s::%(funcName)s::%(lineno)d] [%(levelname)s] %(message)s',NEWLINE },NEWLINE 'message_only': {NEWLINE 'format': '%(message)s',NEWLINE },NEWLINE 'rq_console': {NEWLINE 'format': '%(asctime)s %(message)s',NEWLINE 'datefmt': '%H:%M:%S',NEWLINE },NEWLINE },NEWLINE 'handlers': {NEWLINE 'console_raw': {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'logging.StreamHandler',NEWLINE },NEWLINE 'console': {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'logging.StreamHandler',NEWLINE 'formatter': 'standard'NEWLINE },NEWLINE 'rq_console': {NEWLINE 'level': 'WARNING',NEWLINE 'class': 'rq.utils.ColorizingStreamHandler',NEWLINE 'formatter': 'rq_console',NEWLINE 'exclude': ['%(asctime)s'],NEWLINE }NEWLINE },NEWLINE 'root': {NEWLINE 'handlers': ['console'],NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE }NEWLINE}NEWLINENEWLINEif get_bool_env('GOOGLE_LOGGING_ENABLED', False):NEWLINE logging.info('Google Cloud Logging handler is enabled.')NEWLINE try:NEWLINE import google.cloud.loggingNEWLINE from google.auth.exceptions import GoogleAuthErrorNEWLINENEWLINE client = google.cloud.logging.Client()NEWLINE client.setup_logging()NEWLINENEWLINE LOGGING['handlers']['google_cloud_logging'] = {NEWLINE 'level': get_env('LOG_LEVEL', 'WARNING'),NEWLINE 'class': 'google.cloud.logging.handlers.CloudLoggingHandler',NEWLINE 'client': clientNEWLINE }NEWLINE LOGGING['root']['handlers'].append('google_cloud_logging')NEWLINE except GoogleAuthError as e:NEWLINE logger.exception('Google Cloud Logging handler could not be setup.')NEWLINENEWLINEINSTALLED_APPS = [NEWLINE 'django.contrib.admin',NEWLINE 'django.contrib.auth',NEWLINE 'django.contrib.contenttypes',NEWLINE 'django.contrib.sessions',NEWLINE 'django.contrib.messages',NEWLINE 'django.contrib.staticfiles',NEWLINE 'django.contrib.humanize',NEWLINENEWLINE 'drf_yasg',NEWLINE 'corsheaders',NEWLINE 'django_extensions',NEWLINE 'django_rq',NEWLINE 'django_filters',NEWLINE 'rules',NEWLINE 'annoying',NEWLINENEWLINE 'rest_framework',NEWLINE 'rest_framework_swagger',NEWLINE 'rest_framework.authtoken',NEWLINE 'drf_generators',NEWLINENEWLINE 'core',NEWLINE 'users',NEWLINE 'organizations',NEWLINE 'data_import',NEWLINE 'data_export',NEWLINENEWLINE 'projects',NEWLINE 'tasks',NEWLINE 'data_manager',NEWLINE 'io_storages',NEWLINE 'ml',NEWLINE 'webhooks',NEWLINE]NEWLINENEWLINEMIDDLEWARE = [NEWLINE 'corsheaders.middleware.CorsMiddleware',NEWLINE 'django.middleware.security.SecurityMiddleware',NEWLINE 'django.contrib.sessions.middleware.SessionMiddleware',NEWLINE 'django.middleware.locale.LocaleMiddleware',NEWLINE 'django.middleware.csrf.CsrfViewMiddleware',NEWLINE 'core.middleware.DisableCSRF',NEWLINE 'django.contrib.auth.middleware.AuthenticationMiddleware',NEWLINE 'django.contrib.messages.middleware.MessageMiddleware',NEWLINE 'django.middleware.clickjacking.XFrameOptionsMiddleware',NEWLINE 'core.middleware.CommonMiddlewareAppendSlashWithoutRedirect', # instead of 'CommonMiddleware'NEWLINE 'core.middleware.CommonMiddleware',NEWLINE 'django_user_agents.middleware.UserAgentMiddleware',NEWLINE 'core.middleware.SetSessionUIDMiddleware',NEWLINE 'core.middleware.ContextLogMiddleware',NEWLINE 'core.middleware.DatabaseIsLockedRetryMiddleware',NEWLINE]NEWLINENEWLINEREST_FRAMEWORK = {NEWLINE 'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],NEWLINE 'DEFAULT_AUTHENTICATION_CLASSES': (NEWLINE 'rest_framework.authentication.TokenAuthentication',NEWLINE 'rest_framework.authentication.SessionAuthentication',NEWLINE ),NEWLINE 'DEFAULT_PERMISSION_CLASSES': [NEWLINE 'core.api_permissions.HasObjectPermission',NEWLINE 'rest_framework.permissions.IsAuthenticated',NEWLINENEWLINE ],NEWLINE 'EXCEPTION_HANDLER': 'core.utils.common.custom_exception_handler',NEWLINE 'DEFAULT_RENDERER_CLASSES': (NEWLINE 'rest_framework.renderers.JSONRenderer',NEWLINE ),NEWLINE 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning'NEWLINE}NEWLINENEWLINE# CORS & Host settingsNEWLINEINTERNAL_IPS = [ # django debug toolbar for django==2.2 requirementNEWLINE '127.0.0.1',NEWLINE 'localhost',NEWLINE]NEWLINECORS_ORIGIN_ALLOW_ALL = TrueNEWLINECORS_ALLOW_METHODS = [NEWLINE 'DELETE',NEWLINE 'GET',NEWLINE 'OPTIONS',NEWLINE 'PATCH',NEWLINE 'POST',NEWLINE 'PUT',NEWLINE]NEWLINEALLOWED_HOSTS = ['*']NEWLINENEWLINE# Auth modulesNEWLINEAUTH_USER_MODEL = 'users.User'NEWLINEAUTHENTICATION_BACKENDS = [NEWLINE 'rules.permissions.ObjectPermissionBackend',NEWLINE 'django.contrib.auth.backends.ModelBackend'NEWLINE]NEWLINEUSE_USERNAME_FOR_LOGIN = FalseNEWLINENEWLINEDISABLE_SIGNUP_WITHOUT_LINK = get_bool_env('DISABLE_SIGNUP_WITHOUT_LINK', False)NEWLINENEWLINE# Password validation:NEWLINE# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validatorsNEWLINEAUTH_PASSWORD_VALIDATORS = [NEWLINE {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},NEWLINE {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},NEWLINE]NEWLINENEWLINE# Django templatesNEWLINETEMPLATES_DIR = os.path.join(os.path.dirname(BASE_DIR), 'templates') # ../../from_this = 'web' dirNEWLINETEMPLATES = [NEWLINE {NEWLINE 'BACKEND': 'django.template.backends.django.DjangoTemplates',NEWLINE 'DIRS': [TEMPLATES_DIR],NEWLINE 'APP_DIRS': True,NEWLINE 'OPTIONS': {NEWLINE 'context_processors': [NEWLINE 'django.template.context_processors.debug',NEWLINE 'django.template.context_processors.request',NEWLINE 'django.contrib.auth.context_processors.auth',NEWLINE 'django.contrib.messages.context_processors.messages',NEWLINE 'core.context_processors.settings'NEWLINE ],NEWLINE 'builtins': ['django.templatetags.i18n'],NEWLINE },NEWLINE }NEWLINE]NEWLINENEWLINE# RQNEWLINERQ_QUEUES = {NEWLINE 'default': {NEWLINE 'HOST': 'localhost',NEWLINE 'PORT': 6379,NEWLINE 'DB': 0,NEWLINE 'DEFAULT_TIMEOUT': 180NEWLINE }NEWLINE}NEWLINENEWLINE# Swagger: automatic API documentationNEWLINESWAGGER_SETTINGS = {NEWLINE 'SECURITY_DEFINITIONS': {NEWLINE 'token': {NEWLINE 'type': 'token',NEWLINE 'name': 'Token',NEWLINE 'in': 'header',NEWLINE 'url': '/user/account'NEWLINE }NEWLINE },NEWLINE 'APIS_SORTER': 'alpha',NEWLINE 'SUPPORTED_SUBMIT_METHODS': ['get', 'post', 'put', 'delete', 'patch'],NEWLINE # "DEFAULT_AUTO_SCHEMA_CLASS": "core.utils.CustomAutoSchema",NEWLINE 'OPERATIONS_SORTER': 'alpha'NEWLINE}NEWLINENEWLINESENTRY_DSN = get_env('SENTRY_DSN', None)NEWLINESENTRY_RATE = float(get_env('SENTRY_RATE', 0.25))NEWLINESENTRY_ENVIRONMENT = get_env('SENTRY_ENVIRONMENT', 'stage.opensource')NEWLINESENTRY_REDIS_ENABLED = FalseNEWLINEFRONTEND_SENTRY_DSN = get_env('FRONTEND_SENTRY_DSN', None)NEWLINEFRONTEND_SENTRY_RATE = get_env('FRONTEND_SENTRY_RATE', 0.1)NEWLINEFRONTEND_SENTRY_ENVIRONMENT = get_env('FRONTEND_SENTRY_ENVIRONMENT', 'stage.opensource')NEWLINENEWLINEROOT_URLCONF = 'core.urls'NEWLINEWSGI_APPLICATION = 'core.wsgi.application'NEWLINEGRAPHIQL = TrueNEWLINENEWLINE# InternationalizationNEWLINE# https://docs.djangoproject.com/en/2.1/topics/i18n/NEWLINELANGUAGE_CODE = 'en-us'NEWLINETIME_ZONE = 'UTC'NEWLINEUSE_I18N = FalseNEWLINEUSE_L10N = TrueNEWLINEUSE_TZ = TrueNEWLINENEWLINE# Static files (CSS, JavaScript, Images)NEWLINE# https://docs.djangoproject.com/en/2.1/howto/static-files/NEWLINESTATIC_URL = '/static/'NEWLINE# if FORCE_SCRIPT_NAME:NEWLINE# STATIC_URL = FORCE_SCRIPT_NAME + STATIC_URLNEWLINElogger.info(f'=> Static URL is set to: {STATIC_URL}')NEWLINENEWLINESTATIC_ROOT = os.path.join(BASE_DIR, 'static_build')NEWLINESTATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]NEWLINESTATICFILES_FINDERS = (NEWLINE 'django.contrib.staticfiles.finders.FileSystemFinder',NEWLINE 'django.contrib.staticfiles.finders.AppDirectoriesFinder'NEWLINE)NEWLINESTATICFILES_STORAGE = 'core.storage.SkipMissedManifestStaticFilesStorage'NEWLINENEWLINE# Sessions and CSRFNEWLINESESSION_COOKIE_SECURE = bool(int(get_env('SESSION_COOKIE_SECURE', True)))NEWLINECSRF_COOKIE_SECURE = bool(int(get_env('CSRF_COOKIE_SECURE', SESSION_COOKIE_SECURE)))NEWLINECSRF_COOKIE_HTTPONLY = bool(int(get_env('CSRF_COOKIE_HTTPONLY', SESSION_COOKIE_SECURE)))NEWLINENEWLINE# user media filesNEWLINEMEDIA_ROOT = os.path.join(BASE_DATA_DIR, 'media')NEWLINEos.makedirs(MEDIA_ROOT, exist_ok=True)NEWLINEMEDIA_URL = '/data/'NEWLINEUPLOAD_DIR = 'upload'NEWLINEAVATAR_PATH = 'avatars'NEWLINENEWLINE# project exportsNEWLINEEXPORT_DIR = os.path.join(BASE_DATA_DIR, 'export')NEWLINEEXPORT_URL_ROOT = '/export/'NEWLINE# old export dirNEWLINEos.makedirs(EXPORT_DIR, exist_ok=True)NEWLINE# dir for delayed exportNEWLINEDELAYED_EXPORT_DIR = 'export'NEWLINEos.makedirs(os.path.join(BASE_DATA_DIR, MEDIA_ROOT, DELAYED_EXPORT_DIR), exist_ok=True)NEWLINENEWLINE# file / task size limitsNEWLINEDATA_UPLOAD_MAX_MEMORY_SIZE = int(get_env('DATA_UPLOAD_MAX_MEMORY_SIZE', 250 * 1024 * 1024))NEWLINETASKS_MAX_NUMBER = 1000000NEWLINETASKS_MAX_FILE_SIZE = DATA_UPLOAD_MAX_MEMORY_SIZENEWLINENEWLINETASK_LOCK_TTL = int(get_env('TASK_LOCK_TTL')) if get_env('TASK_LOCK_TTL') else NoneNEWLINETASK_LOCK_DEFAULT_TTL = int(get_env('TASK_LOCK_DEFAULT_TTL', 3600))NEWLINETASK_LOCK_MIN_TTL = int(get_env('TASK_LOCK_MIN_TTL', 120))NEWLINENEWLINE# Email backendNEWLINEFROM_EMAIL = get_env('FROM_EMAIL', 'Label Studio <hello@labelstud.io>')NEWLINEEMAIL_BACKEND = get_env('EMAIL_BACKEND', 'django.core.mail.backends.dummy.EmailBackend')NEWLINENEWLINEENABLE_LOCAL_FILES_STORAGE = get_bool_env('ENABLE_LOCAL_FILES_STORAGE', default=True)NEWLINELOCAL_FILES_SERVING_ENABLED = get_bool_env('LOCAL_FILES_SERVING_ENABLED', default=False)NEWLINENEWLINE""" React Libraries: do not forget to change this dir in /etc/nginx/nginx.conf """NEWLINE# EDITOR = label-studio-frontend repositoryNEWLINEEDITOR_ROOT = os.path.join(BASE_DIR, '../frontend/dist/lsf')NEWLINE# DM = data manager (included into FRONTEND due npm building, we need only version.json file from there)NEWLINEDM_ROOT = os.path.join(BASE_DIR, '../frontend/dist/dm')NEWLINE# FRONTEND = GUI for django backendNEWLINEREACT_APP_ROOT = os.path.join(BASE_DIR, '../frontend/dist/react-app')NEWLINENEWLINE# per project settingsNEWLINEBATCH_SIZE = 1000NEWLINEPROJECT_TITLE_MIN_LEN = 3NEWLINEPROJECT_TITLE_MAX_LEN = 50NEWLINELOGIN_REDIRECT_URL = '/'NEWLINELOGIN_URL = '/'NEWLINEMIN_GROUND_TRUTH = 10NEWLINEDATA_UNDEFINED_NAME = '$undefined$'NEWLINELICENSE = {}NEWLINEVERSIONS = {}NEWLINEVERSION_EDITION = 'Community Edition'NEWLINELATEST_VERSION_CHECK = TrueNEWLINEVERSIONS_CHECK_TIME = 0NEWLINEALLOW_ORGANIZATION_WEBHOOKS = get_bool_env('ALLOW_ORGANIZATION_WEBHOOKS', False)NEWLINECONVERTER_DOWNLOAD_RESOURCES = get_bool_env('CONVERTER_DOWNLOAD_RESOURCES', True)NEWLINEEXPERIMENTAL_FEATURES = get_bool_env('EXPERIMENTAL_FEATURES', False)NEWLINENEWLINECREATE_ORGANIZATION = 'organizations.functions.create_organization'NEWLINEGET_OBJECT_WITH_CHECK_AND_LOG = 'core.utils.get_object.get_object_with_check_and_log'NEWLINESAVE_USER = 'users.functions.save_user'NEWLINEUSER_SERIALIZER = 'users.serializers.BaseUserSerializer'NEWLINEDATA_MANAGER_GET_ALL_COLUMNS = 'data_manager.functions.get_all_columns'NEWLINEDATA_MANAGER_ANNOTATIONS_MAP = {}NEWLINEDATA_MANAGER_ACTIONS = {}NEWLINEDATA_MANAGER_CUSTOM_FILTER_EXPRESSIONS = ''NEWLINEUSER_LOGIN_FORM = 'users.forms.LoginForm'NEWLINEPROJECT_MIXIN = 'core.mixins.DummyModelMixin'NEWLINETASK_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEANNOTATION_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEORGANIZATION_MIXIN = 'core.mixins.DummyModelMixin'NEWLINEUSER_MIXIN = 'users.mixins.UserMixin'NEWLINEGET_STORAGE_LIST = 'io_storages.functions.get_storage_list'NEWLINENEWLINESTORAGE_ANNOTATION_SERIALIZER = 'io_storages.serializers.StorageAnnotationSerializer'NEWLINENEWLINENEWLINEdef project_delete(project):NEWLINE project.delete()NEWLINENEWLINENEWLINEdef user_auth(user_model, email, password):NEWLINE return NoneNEWLINENEWLINENEWLINEdef collect_versions_dummy(**kwargs):NEWLINE return {}NEWLINENEWLINENEWLINEPROJECT_DELETE = project_deleteNEWLINEUSER_AUTH = user_authNEWLINECOLLECT_VERSIONS = collect_versions_dummyNEWLINENEWLINEWEBHOOK_TIMEOUT = float(get_env('WEBHOOK_TIMEOUT', 1.0))NEWLINENEWLINE# fix a problem with Windows mimetypes for JS and PNGNEWLINEimport mimetypesNEWLINEmimetypes.add_type("application/javascript", ".js", True)NEWLINEmimetypes.add_type("image/png", ".png", True)NEWLINE |
# Copyright (c) 2017-2019 Uber Technologies, Inc.NEWLINE# SPDX-License-Identifier: Apache-2.0NEWLINENEWLINE"""NEWLINEThis example demonstrates how to use the Causal Effect Variational AutoencoderNEWLINE[1] implemented in pyro.contrib.cevae.CEVAE, documented atNEWLINEhttp://docs.pyro.ai/en/latest/contrib.cevae.htmlNEWLINENEWLINE**References**NEWLINENEWLINE[1] C. Louizos, U. Shalit, J. Mooij, D. Sontag, R. Zemel, M. Welling (2017).NEWLINE Causal Effect Inference with Deep Latent-Variable Models.NEWLINE http://papers.nips.cc/paper/7223-causal-effect-inference-with-deep-latent-variable-models.pdfNEWLINE https://github.com/AMLab-Amsterdam/CEVAENEWLINE"""NEWLINEimport argparseNEWLINEimport loggingNEWLINENEWLINEimport torchNEWLINENEWLINEimport pyroNEWLINEimport pyro.distributions as distNEWLINEfrom pyro.contrib.cevae import CEVAENEWLINENEWLINElogging.getLogger("pyro").setLevel(logging.DEBUG)NEWLINElogging.getLogger("pyro").handlers[0].setLevel(logging.DEBUG)NEWLINENEWLINENEWLINEdef generate_data(args):NEWLINE """NEWLINE This implements the generative process of [1], but using larger feature andNEWLINE latent spaces ([1] assumes ``feature_dim=1`` and ``latent_dim=5``).NEWLINE """NEWLINE z = dist.Bernoulli(0.5).sample([args.num_data])NEWLINE x = dist.Normal(z, 5 * z + 3 * (1 - z)).sample([args.feature_dim]).t()NEWLINE t = dist.Bernoulli(0.75 * z + 0.25 * (1 - z)).sample()NEWLINE y = dist.Bernoulli(logits=3 * (z + 2 * (2 * t - 2))).sample()NEWLINENEWLINE # Compute true ite for evaluation (via Monte Carlo approximation).NEWLINE t0_t1 = torch.tensor([[0.], [1.]])NEWLINE y_t0, y_t1 = dist.Bernoulli(logits=3 * (z + 2 * (2 * t0_t1 - 2))).meanNEWLINE true_ite = y_t1 - y_t0NEWLINE return x, t, y, true_iteNEWLINENEWLINENEWLINEdef main(args):NEWLINE pyro.enable_validation(__debug__)NEWLINE if args.cuda:NEWLINE torch.set_default_tensor_type('torch.cuda.FloatTensor')NEWLINENEWLINE # Generate synthetic data.NEWLINE pyro.set_rng_seed(args.seed)NEWLINE x_train, t_train, y_train, _ = generate_data(args)NEWLINENEWLINE # Train.NEWLINE pyro.set_rng_seed(args.seed)NEWLINE pyro.clear_param_store()NEWLINE cevae = CEVAE(feature_dim=args.feature_dim,NEWLINE latent_dim=args.latent_dim,NEWLINE hidden_dim=args.hidden_dim,NEWLINE num_layers=args.num_layers,NEWLINE num_samples=10)NEWLINE cevae.fit(x_train, t_train, y_train,NEWLINE num_epochs=args.num_epochs,NEWLINE batch_size=args.batch_size,NEWLINE learning_rate=args.learning_rate,NEWLINE learning_rate_decay=args.learning_rate_decay,NEWLINE weight_decay=args.weight_decay)NEWLINENEWLINE # Evaluate.NEWLINE x_test, t_test, y_test, true_ite = generate_data(args)NEWLINE true_ate = true_ite.mean()NEWLINE print("true ATE = {:0.3g}".format(true_ate.item()))NEWLINE naive_ate = y_test[t_test == 1].mean() - y_test[t_test == 0].mean()NEWLINE print("naive ATE = {:0.3g}".format(naive_ate))NEWLINE if args.jit:NEWLINE cevae = cevae.to_script_module()NEWLINE est_ite = cevae.ite(x_test)NEWLINE est_ate = est_ite.mean()NEWLINE print("estimated ATE = {:0.3g}".format(est_ate.item()))NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE assert pyro.__version__.startswith('1.5.0')NEWLINE parser = argparse.ArgumentParser(description="Causal Effect Variational Autoencoder")NEWLINE parser.add_argument("--num-data", default=1000, type=int)NEWLINE parser.add_argument("--feature-dim", default=5, type=int)NEWLINE parser.add_argument("--latent-dim", default=20, type=int)NEWLINE parser.add_argument("--hidden-dim", default=200, type=int)NEWLINE parser.add_argument("--num-layers", default=3, type=int)NEWLINE parser.add_argument("-n", "--num-epochs", default=50, type=int)NEWLINE parser.add_argument("-b", "--batch-size", default=100, type=int)NEWLINE parser.add_argument("-lr", "--learning-rate", default=1e-3, type=float)NEWLINE parser.add_argument("-lrd", "--learning-rate-decay", default=0.1, type=float)NEWLINE parser.add_argument("--weight-decay", default=1e-4, type=float)NEWLINE parser.add_argument("--seed", default=1234567890, type=int)NEWLINE parser.add_argument("--jit", action="store_true")NEWLINE parser.add_argument("--cuda", action="store_true")NEWLINE args = parser.parse_args()NEWLINE main(args)NEWLINE |
# coding: utf-8NEWLINENEWLINE"""NEWLINE KubernetesNEWLINENEWLINE No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501NEWLINENEWLINE The version of the OpenAPI document: v1.20.7NEWLINE Generated by: https://openapi-generator.techNEWLINE"""NEWLINENEWLINENEWLINEimport pprintNEWLINEimport re # noqa: F401NEWLINENEWLINEimport sixNEWLINENEWLINEfrom kubernetes.client.configuration import ConfigurationNEWLINENEWLINENEWLINEclass V1EventSource(object):NEWLINE """NOTE: This class is auto generated by OpenAPI Generator.NEWLINE Ref: https://openapi-generator.techNEWLINENEWLINE Do not edit the class manually.NEWLINE """NEWLINENEWLINE """NEWLINE Attributes:NEWLINE openapi_types (dict): The key is attribute nameNEWLINE and the value is attribute type.NEWLINE attribute_map (dict): The key is attribute nameNEWLINE and the value is json key in definition.NEWLINE """NEWLINE openapi_types = {NEWLINE 'component': 'str',NEWLINE 'host': 'str'NEWLINE }NEWLINENEWLINE attribute_map = {NEWLINE 'component': 'component',NEWLINE 'host': 'host'NEWLINE }NEWLINENEWLINE def __init__(self, component=None, host=None, local_vars_configuration=None): # noqa: E501NEWLINE """V1EventSource - a model defined in OpenAPI""" # noqa: E501NEWLINE if local_vars_configuration is None:NEWLINE local_vars_configuration = Configuration()NEWLINE self.local_vars_configuration = local_vars_configurationNEWLINENEWLINE self._component = NoneNEWLINE self._host = NoneNEWLINE self.discriminator = NoneNEWLINENEWLINE if component is not None:NEWLINE self.component = componentNEWLINE if host is not None:NEWLINE self.host = hostNEWLINENEWLINE @propertyNEWLINE def component(self):NEWLINE """Gets the component of this V1EventSource. # noqa: E501NEWLINENEWLINE Component from which the event is generated. # noqa: E501NEWLINENEWLINE :return: The component of this V1EventSource. # noqa: E501NEWLINE :rtype: strNEWLINE """NEWLINE return self._componentNEWLINENEWLINE @component.setterNEWLINE def component(self, component):NEWLINE """Sets the component of this V1EventSource.NEWLINENEWLINE Component from which the event is generated. # noqa: E501NEWLINENEWLINE :param component: The component of this V1EventSource. # noqa: E501NEWLINE :type: strNEWLINE """NEWLINENEWLINE self._component = componentNEWLINENEWLINE @propertyNEWLINE def host(self):NEWLINE """Gets the host of this V1EventSource. # noqa: E501NEWLINENEWLINE Node name on which the event is generated. # noqa: E501NEWLINENEWLINE :return: The host of this V1EventSource. # noqa: E501NEWLINE :rtype: strNEWLINE """NEWLINE return self._hostNEWLINENEWLINE @host.setterNEWLINE def host(self, host):NEWLINE """Sets the host of this V1EventSource.NEWLINENEWLINE Node name on which the event is generated. # noqa: E501NEWLINENEWLINE :param host: The host of this V1EventSource. # noqa: E501NEWLINE :type: strNEWLINE """NEWLINENEWLINE self._host = hostNEWLINENEWLINE def to_dict(self):NEWLINE """Returns the model properties as a dict"""NEWLINE result = {}NEWLINENEWLINE for attr, _ in six.iteritems(self.openapi_types):NEWLINE value = getattr(self, attr)NEWLINE if isinstance(value, list):NEWLINE result[attr] = list(map(NEWLINE lambda x: x.to_dict() if hasattr(x, "to_dict") else x,NEWLINE valueNEWLINE ))NEWLINE elif hasattr(value, "to_dict"):NEWLINE result[attr] = value.to_dict()NEWLINE elif isinstance(value, dict):NEWLINE result[attr] = dict(map(NEWLINE lambda item: (item[0], item[1].to_dict())NEWLINE if hasattr(item[1], "to_dict") else item,NEWLINE value.items()NEWLINE ))NEWLINE else:NEWLINE result[attr] = valueNEWLINENEWLINE return resultNEWLINENEWLINE def to_str(self):NEWLINE """Returns the string representation of the model"""NEWLINE return pprint.pformat(self.to_dict())NEWLINENEWLINE def __repr__(self):NEWLINE """For `print` and `pprint`"""NEWLINE return self.to_str()NEWLINENEWLINE def __eq__(self, other):NEWLINE """Returns true if both objects are equal"""NEWLINE if not isinstance(other, V1EventSource):NEWLINE return FalseNEWLINENEWLINE return self.to_dict() == other.to_dict()NEWLINENEWLINE def __ne__(self, other):NEWLINE """Returns true if both objects are not equal"""NEWLINE if not isinstance(other, V1EventSource):NEWLINE return TrueNEWLINENEWLINE return self.to_dict() != other.to_dict()NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Generated by Django 1.11.20 on 2019-02-25 16:08NEWLINEfrom __future__ import unicode_literalsNEWLINENEWLINEimport django.contrib.postgres.fields.hstoreNEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [("research", "0004_public_flag")]NEWLINENEWLINE operations = [NEWLINE migrations.CreateModel(NEWLINE name="Classification",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", django.contrib.postgres.fields.hstore.HStoreField()),NEWLINE ],NEWLINE options={"db_table": "research_classification", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="Education",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", django.contrib.postgres.fields.hstore.HStoreField()),NEWLINE ],NEWLINE options={"db_table": "research_education", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="Expertise",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", django.contrib.postgres.fields.hstore.HStoreField()),NEWLINE ],NEWLINE options={"db_table": "research_expertise", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="Knowledge",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", django.contrib.postgres.fields.hstore.HStoreField()),NEWLINE ],NEWLINE options={"db_table": "research_knowledge", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="Program",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", models.CharField(blank=True, max_length=256, null=True)),NEWLINE ("active", models.BooleanField()),NEWLINE ],NEWLINE options={"db_table": "research_program", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="PublicationAuthorship",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.AutoField(NEWLINE auto_created=True,NEWLINE primary_key=True,NEWLINE serialize=False,NEWLINE verbose_name="ID",NEWLINE ),NEWLINE ),NEWLINE ("name", django.contrib.postgres.fields.hstore.HStoreField()),NEWLINE ],NEWLINE options={"db_table": "research_publicationauthorship", "managed": False},NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name="PublicationOrganization",NEWLINE fields=[NEWLINE (NEWLINE "id",NEWLINE models.CharField(max_length=256, primary_key=True, serialize=False),NEWLINE ),NEWLINE ("assigned", models.DateTimeField(blank=True, null=True)),NEWLINE ],NEWLINE options={"db_table": "research_publicationorganization", "managed": False},NEWLINE ),NEWLINE ]NEWLINE |
# creating list out of dictionariesNEWLINENEWLINEpen_1 = {'color': 'black',NEWLINE 'price': '2.5',NEWLINE 'brand': 'faber castel'NEWLINE }NEWLINENEWLINEpen_2 = {'color': 'blue',NEWLINE 'price': '2.5',NEWLINE 'brand': 'faber castel'NEWLINE }NEWLINENEWLINEpen_3 = {'color': 'red',NEWLINE 'price': '2.5',NEWLINE 'brand': 'faber castel'NEWLINE }NEWLINENEWLINEall_pens = [pen_1, pen_2, pen_3]NEWLINENEWLINEprint(all_pens) |
"""NEWLINEImplementation of custom solvers: advection equation with forward-time, backward-space; Burgers' equation withNEWLINEMacCormack scheme and Korteweg-de Vries equation with Zabusky and Kruska scheme.NEWLINE"""NEWLINEimport numpy as npNEWLINEimport matplotlib.pyplot as pltNEWLINEimport matplotlib as mplNEWLINEimport sympy as spNEWLINEimport warningsNEWLINENEWLINENEWLINE# enable pgf printing of solution plotNEWLINEmpl.use("pgf")NEWLINEpgf_with_custom_preamble = {NEWLINE "font.family": "serif", # use serif/main font for text elementsNEWLINE "pgf.rcfonts": False,NEWLINE "text.usetex": True, # use inline math for ticksNEWLINE}NEWLINEmpl.rcParams.update(pgf_with_custom_preamble)NEWLINENEWLINENEWLINEclass FDgrid:NEWLINE """NEWLINE Class for initialization of the calculation domain and data storage;NEWLINE handles an arbitrary number of ghost cells 'n_ghost'NEWLINE """NEWLINE def __init__(self, x_nodes, n_t, x_min, x_max, u_max_convection, cfl, n_ghost):NEWLINE """NEWLINE Initializes the calculation domainNEWLINENEWLINE :param x_nodes: Number of points in domain OmegaNEWLINE :param n_t: Total number of time steps including IC t = 0NEWLINE :param x_min: left bound of OmegaNEWLINE :param x_max: right bound of OmegaNEWLINE :param u_max_convection: convection speed to calculate dt from cflNEWLINE :param cfl: cfl number of ICNEWLINE :param n_ghost: number of ghost cells for periodic BC needed by scheme (1 for advection and Burgers; 2 for KdV)NEWLINE """NEWLINENEWLINE self.n_x = x_nodes + n_ghost * 2 # ghost nodes at both sidesNEWLINE self.x_nodes = x_nodesNEWLINE self.n_t = n_tNEWLINE self.x_min = x_minNEWLINE self.x_max = x_maxNEWLINE self.n_ghost = n_ghostNEWLINE self.i_ghost_r = x_nodes + n_ghost # index of leftmost ghost node at right boundaryNEWLINE self.i_ghost_l = n_ghost - 1 # index of rightmost ghost node at left boundaryNEWLINE self.dx = (x_max - x_min) / x_nodes # save spatial widthNEWLINE self.dt = (cfl*self.dx)/u_max_convection # set dt according to desired cfl numberNEWLINE self.t_max = self.dt * (n_t - 1) # t = 0 is initial conditionNEWLINE self.grid = np.zeros((self.n_x, n_t), dtype=np.float64) # initialize array to store simulation resultsNEWLINENEWLINE def fill_BC(self, i_time):NEWLINE """fills ghost cells with periodic boundary conditions"""NEWLINE vect_to_set = np.zeros(self.n_x)NEWLINE # copies the data within the domain to a vectorNEWLINE vect_to_set[self.i_ghost_l + 1: self.i_ghost_r] = self.grid[self.i_ghost_l + 1: self.i_ghost_r, i_time]NEWLINE vect_to_set = set_periodic_BC(self, vect_to_set) # sets periodic BCs for vectorNEWLINE self.grid[:, i_time] = vect_to_set # copies filled vector back onto the gridNEWLINENEWLINENEWLINEdef set_periodic_BC(domain, vect):NEWLINE """Helper function called from 'fill_BC' to set the periodic BCs for arbitrary number of ghost cells"""NEWLINE for i in range(domain.n_ghost): # set all values for ghost cells, starting from left for both sidesNEWLINE # value of left ghost cell is value of most right real cellNEWLINE # leftmost left node is to be n_ghost nodes left of the leftmost right ghost nodeNEWLINE vect[i] = vect[domain.i_ghost_r - domain.n_ghost + i] # set left boundary elementNEWLINE # leftmost right ghost node is first real left nodeNEWLINE vect[domain.i_ghost_r + i] = vect[i + domain.n_ghost] # right boundaryNEWLINE return vectNEWLINENEWLINENEWLINEdef solve(x_nodes, n_t, initial_cond, equation, x_min=0., x_max=1., cfl=0.1, a=1., manufactured=False, s=None):NEWLINE """NEWLINENEWLINE :param x_nodes: Number of points in domain OmegaNEWLINE :param n_t: Total number of time steps including IC t = 0NEWLINE :param initial_cond: Numpy array containing the values of the IC; Dimension is x_nodesNEWLINE :param equation: String of equation to be solvedNEWLINE :param x_min: left bound of Omega (default=0.0)NEWLINE :param x_max: right bound of Omega (default=1.0)NEWLINE :param cfl: desired cfl number of IC (default=0.1)NEWLINE :param a: Advection speed; Only used if 'equation' == 'Advection' (default=1.0)NEWLINE :param manufactured: Whether the Method of Manufactured solution is to be calculated (forcing 's' will be applied)NEWLINE (default=False)NEWLINE :param s: Forcing Function of MMS (default=None)NEWLINE :return: FDgrid object containing the simulation results in FDgrid.grid and information about the discretizationNEWLINE """NEWLINENEWLINE # set up calculation domain:NEWLINE if equation == 'Advection':NEWLINE u_max_convection = aNEWLINE if a < 0.: warnings.warn('FTBS only implemented for a > 0: solver will not be stable')NEWLINE else: # for nonlinear equations: calculate maximum convection speed in cfl from initial conditionsNEWLINE u_max_convection = np.max(np.abs(initial_cond))NEWLINE n_ghost = 1 # for FTBS and MacCormackNEWLINE if equation == 'KdV':NEWLINE n_ghost = 2NEWLINE domain = FDgrid(x_nodes, n_t, x_min, x_max, u_max_convection, cfl, n_ghost) # initializes calculation domainNEWLINE domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, 0] = initial_cond # set ICNEWLINE domain.fill_BC(0) # sets ghost cells for ICNEWLINENEWLINE # initialize sympy variables to process forcing term used in MMSNEWLINE x_values = np.arange(x_min, x_max, domain.dx) # for evaluation of forcing functionNEWLINE x = sp.symbols('x')NEWLINE t = sp.symbols('t')NEWLINENEWLINE if equation == 'Advection': # solve advection u_t + a u_x = 0 using FTBSNEWLINE for i_t in range(1, n_t):NEWLINE for i_x in range(domain.i_ghost_l + 1, domain.i_ghost_r): # iterate domain without ghost cellsNEWLINE # FTBS for a > 0:NEWLINE # u_i^n+1 = u_i^n - cfl (u_i^n - u_i-1^n) ; cfl = a * t / dxNEWLINE domain.grid[i_x, i_t] = domain.grid[i_x, i_t - 1] - cfl * \NEWLINE (domain.grid[i_x, i_t - 1] - domain.grid[i_x - 1, i_t - 1])NEWLINE if manufactured: # add forcing from MMSNEWLINE time = (i_t - 1) * domain.dt # to evaluate source term for current time stepNEWLINE domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, i_t] += domain.dt * calculate_forcing_manufactured(NEWLINE x_values, x, t, s, time)NEWLINE domain.fill_BC(i_t)NEWLINENEWLINE elif equation == 'Burgers':NEWLINE # solve Burgers equation u_t + g_x = 0 ; g = u^2/2 using 2nd order scheme in time and space of Mac CormackNEWLINE u_predictor = np.zeros(domain.n_x) # initialize saving of predictor stepNEWLINE for i_t in range(1, n_t):NEWLINE time = (i_t - 1) * domain.dt # time for evaluation source termNEWLINENEWLINE # prediction step:NEWLINE for i_x in range(domain.i_ghost_l + 1, domain.i_ghost_r): # iterate domain without ghost cellsNEWLINE # u_i^n+1_pred = u_i^n - dt/dx(g_i+1^n - g_i^n)NEWLINE u_predictor[i_x] = domain.grid[i_x, i_t - 1] - (domain.dt / domain.dx) *\NEWLINE (0.5 * domain.grid[i_x + 1, i_t - 1] ** 2 - 0.5 * domain.grid[i_x, i_t - 1] ** 2)NEWLINE if manufactured: # add forcing from MMSNEWLINE u_predictor[domain.i_ghost_l + 1:domain.i_ghost_r] += domain.dt * calculate_forcing_manufactured(NEWLINE x_values, x, t, s, time)NEWLINE # set periodic BC for predictor; MacCormack only needs a single ghost cellNEWLINE u_predictor[domain.i_ghost_l] = u_predictor[domain.i_ghost_r - 1]NEWLINE u_predictor[domain.i_ghost_r] = u_predictor[domain.i_ghost_l + 1]NEWLINENEWLINE # correction step:NEWLINE for i_x in range(domain.i_ghost_l + 1, domain.i_ghost_r): # iterate domain without ghost cellsNEWLINE # u_i^n+1 = u_i^n - 0.5*(dt/dx) * ((g_i+1^n - g_i^n) + (g_i^n_pred - g_i-1^n_pred))NEWLINE domain.grid[i_x, i_t] = domain.grid[i_x, i_t - 1] - 0.5 * (domain.dt/domain.dx) * \NEWLINE ((0.5 * domain.grid[i_x + 1, i_t - 1] ** 2 - 0.5 * domain.grid[i_x, i_t - 1] ** 2) +NEWLINE (0.5 * u_predictor[i_x] ** 2 - 0.5 * u_predictor[i_x - 1] ** 2))NEWLINE if manufactured: # forcing needs to be evaluated at intermediate stepNEWLINE domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, i_t] += domain.dt * calculate_forcing_manufactured(NEWLINE x_values, x, t, s, time + 0.5*domain.dt)NEWLINENEWLINE domain.fill_BC(i_t)NEWLINENEWLINE elif equation == 'KdV':NEWLINE # solve KdV u_x + 6*uu_x + u_xxx = 0 using the explicit 2nd order scheme in space and time of Zabusky and KruskaNEWLINENEWLINE # use forward time scheme in first time step to generate data to use for central time steppingNEWLINE for i_x in range(domain.i_ghost_l + 1, domain.i_ghost_r):NEWLINE # u_j^k+1 = u_j^k - (dt/dx)*(u_j+1^k + u_j^k + u_j-1^k) * (u_j+1^k - u_j-1^k) -NEWLINE # 0.5 * dt/dx**3 * (u_j+2^k - 2 * u_j+1^k + 2 * u_j-1^k - u_j-2^k)NEWLINE domain.grid[i_x, 1] = domain.grid[i_x, 0] - (domain.dt/domain.dx) * (domain.grid[i_x + 1, 0] +NEWLINE domain.grid[i_x, 0] + domain.grid[i_x - 1, 0]) * 0.5 * (domain.grid[i_x + 1, 0]NEWLINE - domain.grid[i_x - 1, 0]) - 0.5 * (domain.dt / domain.dx ** 3) * \NEWLINE (domain.grid[i_x + 2, 0] - 2. * domain.grid[i_x + 1, 0] + 2. * domain.grid[i_x - 1, 0]NEWLINE - domain.grid[i_x - 2, 0])NEWLINE if manufactured: # add forcing for MMSNEWLINE domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, 1] += domain.dt * calculate_forcing_manufactured(NEWLINE x_values, x, t, s, 0.)NEWLINE domain.fill_BC(1)NEWLINENEWLINE # central time stepping from now onNEWLINE for i_t in range(2, n_t):NEWLINENEWLINE for i_x in range(domain.i_ghost_l + 1, domain.i_ghost_r):NEWLINE # u_j^k+1 = u_j^k-1 - 2 * (dt/dx) * (u_j+1^k + u_j^k + u_j-1^k) * (u_j+1^k - u_j-1^k) - dt / dx**3 *NEWLINE # (u_j+2^k - 2 * u_j+1^k + 2 * u_j-1^k - u_j-2^k)NEWLINE domain.grid[i_x, i_t] = domain.grid[i_x, i_t - 2] - 2. * (domain.dt / domain.dx) * \NEWLINE (domain.grid[i_x + 1, i_t - 1] + domain.grid[i_x, i_t - 1] +NEWLINE domain.grid[i_x - 1, i_t - 1]) * (domain.grid[i_x + 1, i_t - 1] -NEWLINE domain.grid[i_x - 1, i_t - 1]) - (domain.dt / (domain.dx ** 3)) * \NEWLINE (domain.grid[i_x + 2, i_t - 1] - 2. * domain.grid[i_x + 1, i_t - 1] +NEWLINE 2. * domain.grid[i_x - 1, i_t - 1] - domain.grid[i_x - 2, i_t - 1])NEWLINE if manufactured: # add forcing for MMSNEWLINE time = (i_t - 1) * domain.dtNEWLINE domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, i_t] += 2. * domain.dt * \NEWLINE calculate_forcing_manufactured(x_values, x, t, s, time)NEWLINE domain.fill_BC(i_t)NEWLINENEWLINE else: raise Exception('Equation not implemented! (or typo)')NEWLINENEWLINE return domainNEWLINENEWLINENEWLINEdef calculate_forcing_manufactured(x_values, x, t, s, time):NEWLINE """Calculates the forcing term for MMS from the source term; directly depends on time"""NEWLINE lam_s = sp.lambdify(x, s.subs({t: time}), modules=['numpy'])NEWLINE return lam_s(x_values)NEWLINENEWLINENEWLINEdef visualize(domain):NEWLINE """Function to plot the first and last time step of a simulation; to check if everything worked as expected"""NEWLINE tn = np.arange(0., domain.n_t * domain.dt, domain.dt) # array with all timestampsNEWLINE xn = np.arange(domain.x_min, domain.x_max, domain.dx) # array with all x_valuesNEWLINENEWLINE fig = plt.figure(figsize=(5., 3.3))NEWLINE colorlist = [(0., 101 / 256., 189 / 256., 1.), (227/256., 114/256., 34/256., 1.)]NEWLINENEWLINE for index, i in enumerate([0, domain.n_t-1]): # plot IC and last time stepNEWLINE subfig = fig.add_subplot(1, 1, 1)NEWLINE label = 't = ' + str(round(tn[i], 2))NEWLINE subfig.plot(xn, domain.grid[domain.i_ghost_l + 1:domain.i_ghost_r, i], label=label, color=colorlist[index])NEWLINE subfig.legend()NEWLINENEWLINE plt.xlabel('$x$')NEWLINE plt.ylabel('$u(x, t)$')NEWLINE plt.title('Time evolution of solution')NEWLINENEWLINE plt.savefig('transport-equation.png')NEWLINE plt.savefig('transport-equation.pgf')NEWLINE |
from __future__ import unicode_literalsNEWLINENEWLINEfrom .theplatform import ThePlatformFeedIENEWLINEfrom ..utils import (NEWLINE ExtractorError,NEWLINE int_or_none,NEWLINE find_xpath_attr,NEWLINE xpath_element,NEWLINE xpath_text,NEWLINE update_url_query,NEWLINE)NEWLINENEWLINENEWLINEclass CBSBaseIE(ThePlatformFeedIE):NEWLINE def _parse_smil_subtitles(self, smil, namespace=None, subtitles_lang='en'):NEWLINE subtitles = {}NEWLINE for k, ext in [('sMPTE-TTCCURL', 'tt'), ('ClosedCaptionURL', 'ttml'), ('webVTTCaptionURL', 'vtt')]:NEWLINE cc_e = find_xpath_attr(smil, self._xpath_ns('.//param', namespace), 'name', k)NEWLINE if cc_e is not None:NEWLINE cc_url = cc_e.get('value')NEWLINE if cc_url:NEWLINE subtitles.setdefault(subtitles_lang, []).append({NEWLINE 'ext': ext,NEWLINE 'url': cc_url,NEWLINE })NEWLINE return subtitlesNEWLINENEWLINENEWLINEclass CBSIE(CBSBaseIE):NEWLINE _VALID_URL = r'(?:cbs:|https?://(?:www\.)?(?:cbs\.com/shows/[^/]+/video|colbertlateshow\.com/(?:video|podcasts))/)(?P<id>[\w-]+)'NEWLINENEWLINE _TESTS = [{NEWLINE 'url': 'http://www.cbs.com/shows/garth-brooks/video/_u7W953k6la293J7EPTd9oHkSPs6Xn6_/connect-chat-feat-garth-brooks/',NEWLINE 'info_dict': {NEWLINE 'id': '_u7W953k6la293J7EPTd9oHkSPs6Xn6_',NEWLINE 'ext': 'mp4',NEWLINE 'title': 'Connect Chat feat. Garth Brooks',NEWLINE 'description': 'Connect with country music singer Garth Brooks, as he chats with fans on Wednesday November 27, 2013. Be sure to tune in to Garth Brooks: Live from Las Vegas, Friday November 29, at 9/8c on CBS!',NEWLINE 'duration': 1495,NEWLINE 'timestamp': 1385585425,NEWLINE 'upload_date': '20131127',NEWLINE 'uploader': 'CBSI-NEW',NEWLINE },NEWLINE 'params': {NEWLINE # m3u8 downloadNEWLINE 'skip_download': True,NEWLINE },NEWLINE '_skip': 'Blocked outside the US',NEWLINE }, {NEWLINE 'url': 'http://colbertlateshow.com/video/8GmB0oY0McANFvp2aEffk9jZZZ2YyXxy/the-colbeard/',NEWLINE 'only_matching': True,NEWLINE }, {NEWLINE 'url': 'http://www.colbertlateshow.com/podcasts/dYSwjqPs_X1tvbV_P2FcPWRa_qT6akTC/in-the-bad-room-with-stephen/',NEWLINE 'only_matching': True,NEWLINE }]NEWLINENEWLINE def _extract_video_info(self, content_id, site='cbs', mpx_acc=2198311517):NEWLINE items_data = self._download_xml(NEWLINE 'http://can.cbs.com/thunder/player/videoPlayerService.php',NEWLINE content_id, query={'partner': site, 'contentId': content_id})NEWLINE video_data = xpath_element(items_data, './/item')NEWLINE title = xpath_text(video_data, 'videoTitle', 'title') or xpath_text(video_data, 'videotitle', 'title')NEWLINE tp_path = 'dJ5BDC/media/guid/%d/%s' % (mpx_acc, content_id)NEWLINE tp_release_url = 'http://link.theplatform.com/s/' + tp_pathNEWLINENEWLINE asset_types = []NEWLINE subtitles = {}NEWLINE formats = []NEWLINE last_e = NoneNEWLINE for item in items_data.findall('.//item'):NEWLINE asset_type = xpath_text(item, 'assetType')NEWLINE if not asset_type or asset_type in asset_types or 'HLS_FPS' in asset_type or 'DASH_CENC' in asset_type:NEWLINE continueNEWLINE asset_types.append(asset_type)NEWLINE query = {NEWLINE 'mbr': 'true',NEWLINE 'assetTypes': asset_type,NEWLINE }NEWLINE if asset_type.startswith('HLS') or asset_type in ('OnceURL', 'StreamPack'):NEWLINE query['formats'] = 'MPEG4,M3U'NEWLINE elif asset_type in ('RTMP', 'WIFI', '3G'):NEWLINE query['formats'] = 'MPEG4,FLV'NEWLINE try:NEWLINE tp_formats, tp_subtitles = self._extract_theplatform_smil(NEWLINE update_url_query(tp_release_url, query), content_id,NEWLINE 'Downloading %s SMIL data' % asset_type)NEWLINE except ExtractorError as e:NEWLINE last_e = eNEWLINE continueNEWLINE formats.extend(tp_formats)NEWLINE subtitles = self._merge_subtitles(subtitles, tp_subtitles)NEWLINE if last_e and not formats:NEWLINE raise last_eNEWLINE self._sort_formats(formats)NEWLINENEWLINE info = self._extract_theplatform_metadata(tp_path, content_id)NEWLINE info.update({NEWLINE 'id': content_id,NEWLINE 'title': title,NEWLINE 'series': xpath_text(video_data, 'seriesTitle'),NEWLINE 'season_number': int_or_none(xpath_text(video_data, 'seasonNumber')),NEWLINE 'episode_number': int_or_none(xpath_text(video_data, 'episodeNumber')),NEWLINE 'duration': int_or_none(xpath_text(video_data, 'videoLength'), 1000),NEWLINE 'thumbnail': xpath_text(video_data, 'previewImageURL'),NEWLINE 'formats': formats,NEWLINE 'subtitles': subtitles,NEWLINE })NEWLINE return infoNEWLINENEWLINE def _real_extract(self, url):NEWLINE content_id = self._match_id(url)NEWLINE return self._extract_video_info(content_id)NEWLINE |
# -*- coding:utf-8 -*-NEWLINE# @Time: 2021/1/18 8:49NEWLINE# @Author: Zhanyi HouNEWLINE# @Email: 1295752786@qq.comNEWLINE# @File: syntaxana.pyNEWLINE# -*- coding: utf-8 -*-NEWLINE'''NEWLINEpowered by NovalIDENEWLINE来自NovalIDE的词法分析模块NEWLINE作者:侯展意NEWLINE词法分析模块的重要组成单元、NEWLINE依靠各种正则表达式进行特征的提取。NEWLINENEWLINE'''NEWLINEfrom typing import List, Tuple, DictNEWLINEimport reNEWLINENEWLINENEWLINEdef getReplacingDic() -> Dict:NEWLINE dic = {}NEWLINE dic[','] = ','NEWLINE dic['。'] = '.'NEWLINE dic[';'] = ';'NEWLINE dic[':'] = ':'NEWLINE dic['‘'] = '\''NEWLINE dic['’'] = '\''NEWLINE dic['“'] = '\"'NEWLINE dic['”'] = '\"'NEWLINE dic['【'] = '['NEWLINE dic['】'] = ']'NEWLINE dic['('] = '('NEWLINE dic[')'] = ')'NEWLINE return dicNEWLINENEWLINENEWLINEdef getIndent(s: str) -> Tuple[str, int]:NEWLINE s = s.replace('\t', ' ') # tab替换成四个空格NEWLINE s = s.rstrip()NEWLINE if (len(s) > 0):NEWLINE for i, ch in enumerate(s):NEWLINE if (ch != ' '):NEWLINE return s[i:], iNEWLINE return "", i + 1NEWLINE else:NEWLINE return "", 0NEWLINENEWLINENEWLINEdef removeComment(s: str) -> str:NEWLINE pos = s.find('#')NEWLINE if (pos != -1):NEWLINE return s[:pos]NEWLINE else:NEWLINE return sNEWLINENEWLINENEWLINEdef getStringContent(row):NEWLINE passNEWLINENEWLINENEWLINEdef removeStringContent(row: str) -> str:NEWLINE row = row.replace('\"', '\'')NEWLINE if (row.count('\'') >= 2):NEWLINE s = getAllFromRegex(regex=r'[\'](.*?)[\']', st=row)NEWLINE for item in s:NEWLINE row = row.replace('\'%s\'' % item, '\'\'') # 带着分号一起换掉。NEWLINE return rowNEWLINE else:NEWLINE return rowNEWLINENEWLINENEWLINEdef parseVarType(row: str):NEWLINE getInfoFromRegex(r'[\'](.*?)[\']', row)NEWLINENEWLINENEWLINEdef getAllFromRegex(regex: str, st: str) -> List[str]:NEWLINE foundList = re.findall(re.compile(regex, re.S), st)NEWLINENEWLINE return foundListNEWLINENEWLINENEWLINEdef getInfoFromRegex(regex: str, st: str) -> str: # 从正则表达式中获取信息的函数。如果没有任何结果则返回0。NEWLINE foundList = re.findall(re.compile(regex, re.S), st)NEWLINE item = ''NEWLINE if (foundList != []):NEWLINE item = foundList[0]NEWLINE return itemNEWLINENEWLINENEWLINEdef getWordsFromString(s: str) -> list:NEWLINE if (s != ''):NEWLINE syms = s.split(',') # 用逗号分隔开。NEWLINE for i in range(len(syms)):NEWLINE syms[i] = syms[i].strip()NEWLINE return symsNEWLINE else:NEWLINE return []NEWLINENEWLINENEWLINEdef countPar(row: str) -> Tuple[int, int, int]: # 检测三类括号的数量。NEWLINE lparNum = row.count('(')NEWLINE rparNum = row.count(')')NEWLINE lbraceNum = row.count('{')NEWLINE rbraceNum = row.count('}')NEWLINE lbracketNum = row.count('[')NEWLINE rbracketNum = row.count(']')NEWLINENEWLINE return lparNum - rparNum, lbraceNum - rbraceNum, lbracketNum - rbracketNum # 返回左括号数量减去右括号数量。NEWLINENEWLINENEWLINEdef checkPar(row: str) -> int:NEWLINE a, b, c = countPar(row)NEWLINE if (a == 0) & (b == 0) & (c == 0):NEWLINE return 1NEWLINE else:NEWLINE if (a < 0) | (b < 0) | (c < 0):NEWLINE return -1NEWLINE else:NEWLINE return 0NEWLINENEWLINENEWLINEdef getBracketedContent(row: str) -> Tuple[str, str, str]: # 获取任何类型括号最外层内部的东西。(不是小括号!!!)NEWLINE # 返回值:一个表示括号类型的量,以及一个有关括号中内容的字符串,以及括号前的内容。NEWLINE lst = [-1, -1, -1]NEWLINE symList = ['(', '[', '{']NEWLINE symListCouple = [')', ']', '}']NEWLINE length = len(row)NEWLINE for i in range(len(lst)):NEWLINE lst[i] = row.find(symList[i])NEWLINE if (lst[i] == -1):NEWLINE lst[i] = lengthNEWLINE minVal = min(lst)NEWLINE if (minVal == length): # 说明根本没括号NEWLINE return '', '', row[:minVal] # 所以返回值不仅没有括号,还没有括号中的内容(废话),只是返回括号前面的东西。NEWLINE else:NEWLINE pos = lst.index(minVal) # 获取最小值的索引NEWLINE regex = r'[%s](.*)[%s]' % (symList[pos], symListCouple[pos])NEWLINE return symList[pos], getInfoFromRegex(regex=regex, st=row), row[:minVal]NEWLINENEWLINENEWLINEdef getFuncArgs(row: str) -> List[str]: # 获取函数的输入参数。NEWLINENEWLINE s = getInfoFromRegex(regex=r'[(](.*)[)]', st=row)NEWLINENEWLINE li = getWordsFromString(s)NEWLINENEWLINE if (len(li) > 0):NEWLINE if (li[0] == 'self'): # 不允许函数的第一个参数名字叫self。NEWLINE li.pop(0)NEWLINE for i in range(len(li)):NEWLINE eqSymPos = li[i].find('=')NEWLINENEWLINE if (eqSymPos != -1): # 如果eqSymPos中有一个等号NEWLINE li[i] = li[i][:eqSymPos] # 那么将等号去除NEWLINE colonSymPos = li[i].find(':')NEWLINE if colonSymPos != -1:NEWLINE li[i] = li[i][:colonSymPos]NEWLINENEWLINE return liNEWLINENEWLINENEWLINEdef getFuncName(row: str) -> str: # 获取函数的名称。NEWLINE return getInfoFromRegex(regex=r'def\s(.*?)[(]', st=row) # 注意,需要匹配函数名,其中还有个空格。NEWLINENEWLINENEWLINEdef getLocalVarNames(row: str) -> List[str]: # 获取局部变量的名称。NEWLINE li = getInfoFromRegex(regex=r'(.*?)[=]', st=row) # 注意,需要匹配局部变量的名称,其中还有个空格。NEWLINENEWLINE words = getWordsFromString(li)NEWLINE result = []NEWLINE for w in words: # 如果是函数的方法,则不可。NEWLINE if (w.find('.') == -1):NEWLINE result.append(w)NEWLINE return resultNEWLINENEWLINENEWLINEdef is_number(str_number: str) -> bool:NEWLINE if (str_number.split(".")[0]).isdigit() or str_number.isdigit() or (str_number.split('-')[-1]).split(".")[NEWLINE -1].isdigit():NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINENEWLINENEWLINEdef getForVariables(row: str) -> List[int]:NEWLINE '''NEWLINE 获取for循环中定义的变量。NEWLINE '''NEWLINE s = getInfoFromRegex(r'for(.*?)in', row)NEWLINE s = s.strip()NEWLINE return getWordsFromString(s)NEWLINENEWLINENEWLINEdef getVarType(row: str) -> str:NEWLINE '''NEWLINE 获取变量的类型,比如集合,数字等等。NEWLINE '''NEWLINE bracket, content, outer = getBracketedContent(row)NEWLINE li = outer.split('=')NEWLINE if (len(li) >= 1):NEWLINENEWLINE if (li[1].strip() == ''): # 这种情况下为直接赋值的语句,NEWLINE if (bracket == '('):NEWLINE return ':tuple'NEWLINE elif (bracket == '['):NEWLINE return ':list'NEWLINE else:NEWLINE st = li[1].split(',')[0]NEWLINE if (is_number(st)):NEWLINE return ':number'NEWLINENEWLINE return ''NEWLINENEWLINENEWLINEclass Row():NEWLINE def __init__(self, pos: int, text: str, indent: int) -> None:NEWLINE self.pos = posNEWLINE self.text = textNEWLINE self.indent = indentNEWLINENEWLINE def __repr__(self) -> str:NEWLINE return 'row:' + repr(self.pos) + "\t indent:" + repr(self.indent) + "\t text:" + self.text + '\n'NEWLINENEWLINENEWLINEdef regularize(rawText: List[str]) -> List[Row]:NEWLINE global kwdTuple, indexList, charStrNEWLINENEWLINE f = rawText # 获取打开的文件数组,每个元素是一行。NEWLINE regularifiedText = ''NEWLINE rowList = []NEWLINE currentRow = Row(0, '', 0) # 创建一个没有含义的对象,这样方便类型检查。NEWLINE inStaticFunction = FalseNEWLINE inFunctionDefinition = FalseNEWLINE skipLine = FalseNEWLINE currentFuncIndent = 0NEWLINE currentIndent = 0NEWLINE funcIndent = 0NEWLINENEWLINE for i, l in enumerate(f):NEWLINE l = removeStringContent(l)NEWLINE l = removeComment(l)NEWLINENEWLINE if (skipLine == False):NEWLINE row, currentIndent = getIndent(l) # 获取当前的行名和缩进,同时修剪掉行首的空格NEWLINE currentRow = Row(i, row, currentIndent)NEWLINE rowList.append(currentRow)NEWLINENEWLINE else:NEWLINE currentRow.text += l.strip() # 如果判断出这一行还没有结束,就不用获取当前的缩进,直接缀连即可。NEWLINE rowList.append(Row(i, '', 0)) # 这一行相应的没有任何内容NEWLINENEWLINE cp = checkPar(currentRow.text)NEWLINENEWLINE if (cp == 0): # 如果括号不匹配,那么就再继续进行,直至寻找到符合要求的行为止。NEWLINE skipLine = TrueNEWLINE if (len(currentRow.text) >= 200): # 长度超出,强行退出。NEWLINE skipLine = FalseNEWLINE continueNEWLINE elif (cp == -1): # 如果右边括号反倒更多,就跳出这种情况。NEWLINE skipLine = FalseNEWLINE continueNEWLINE else:NEWLINE skipLine = FalseNEWLINE return rowListNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE regularize(['', '', ''])NEWLINE |
import numpy as npNEWLINEimport copyNEWLINENEWLINEfrom . import ekf_utilsNEWLINENEWLINEgtrack_MIN_DISPERSION_ALPHA = 0.1NEWLINEgtrack_EST_POINTS = 10NEWLINEgtrack_MIN_POINTS_TO_UPDATE_DISPERSION = 3NEWLINEgtrack_KNOWN_TARGET_POINTS_THRESHOLD = 50NEWLINENEWLINENEWLINE# GTRACK Module calls this function to instantiate GTRACK Unit with desired configuration parameters. NEWLINE# Function returns a handle, which is used my module to call units' methodsNEWLINENEWLINEdef unit_create(params):NEWLINE inst = ekf_utils.GtrackUnitInstance()NEWLINENEWLINE inst.gatingParams = params.gatingParamsNEWLINE inst.stateParams = params.stateParamsNEWLINE inst.allocationParams = params.allocationParamsNEWLINE inst.unrollingParams = params.unrollingParamsNEWLINE inst.variationParams = params.variationParamsNEWLINE inst.sceneryParams = params.sceneryParamsNEWLINENEWLINE inst.uid = params.uidNEWLINE inst.maxAcceleration = params.maxAccelerationNEWLINE inst.maxRadialVelocity = params.maxRadialVelocityNEWLINE inst.radialVelocityResolution = params.radialVelocityResolutionNEWLINE inst.verbose = params.verboseNEWLINE inst.initialRadialVelocity = params.initialRadialVelocityNEWLINENEWLINE inst.F4 = params.F4NEWLINE inst.Q4 = params.Q4NEWLINE inst.F6 = params.F6NEWLINE inst.Q6 = params.Q6NEWLINENEWLINE if params.stateVectorType == ekf_utils.gtrack_STATE_VECTOR_TYPE().gtrack_STATE_VECTORS_2DA:NEWLINE inst.stateVectorType = ekf_utils.gtrack_STATE_VECTOR_TYPE().gtrack_STATE_VECTORS_2DANEWLINE inst.stateVectorLength = 6NEWLINE inst.measurementVectorLength = 3NEWLINE else:NEWLINE raise ValueError('not supported, unit_create')NEWLINENEWLINE inst.dt = params.deltaTNEWLINE inst.state = ekf_utils.TrackState().TRACK_STATE_FREENEWLINENEWLINE return instNEWLINENEWLINENEWLINE# GTRACK Module calls this function to run GTRACK unit prediction step NEWLINEdef unit_predict(handle):NEWLINE inst = handleNEWLINE inst.heartBeatCount += 1NEWLINE temp1 = np.zeros(shape=(36,), dtype=np.float32)NEWLINE temp2 = np.zeros(shape=(36,), dtype=np.float32)NEWLINE temp3 = np.zeros(shape=(36,), dtype=np.float32)NEWLINENEWLINE # Current state vector lengthNEWLINE sLen = inst.stateVectorLengthNEWLINENEWLINE if inst.processVariance != 0:NEWLINE inst.S_apriori_hat = ekf_utils.gtrack_matrixMultiply(sLen, sLen, 1, inst.F, inst.S_hat)NEWLINE temp1 = ekf_utils.gtrack_matrixMultiply(6, 6, 6, inst.F, inst.P_hat)NEWLINE temp2 = ekf_utils.gtrack_matrixTransposeMultiply(6, 6, 6, temp1, inst.F)NEWLINE temp1 = ekf_utils.gtrack_matrixScalerMultiply(sLen, sLen, inst.Q, inst.processVariance)NEWLINE temp3 = ekf_utils.gtrack_matrixAdd(sLen, sLen, temp1, temp2)NEWLINENEWLINE inst.P_apriori_hat = ekf_utils.gtrack_matrixMakeSymmetrical(sLen, temp3)NEWLINE else:NEWLINE inst.S_apriori_hat = copy.deepcopy(inst.S_hat)NEWLINE inst.P_apriori_hat = copy.deepcopy(inst.P_hat)NEWLINENEWLINE ekf_utils.gtrack_cartesian2spherical(inst.stateVectorType, inst.S_apriori_hat, inst.H_s)NEWLINENEWLINENEWLINE# GTRACK Module calls this function to obtain the measurement vector scoring from the GTRACK unit perspectiveNEWLINEdef unit_score(handle, point, best_score, best_ind, num):NEWLINE limits = np.zeros(shape=(3,), dtype=np.float32)NEWLINE u_tilda = np.zeros(shape=(3,), dtype=np.float32)NEWLINENEWLINE inst = handleNEWLINENEWLINE limits[0] = inst.gatingParams.limits[0].lengthNEWLINE limits[1] = inst.gatingParams.limits[0].widthNEWLINE limits[2] = inst.gatingParams.limits[0].velNEWLINENEWLINE if inst.processVariance == 0:NEWLINE inst.G = 1NEWLINE else:NEWLINE inst.G = ekf_utils.gtrack_gateCreateLim(inst.gatingParams.volume, inst.gC_inv, inst.H_s[0], limits)NEWLINENEWLINE det = ekf_utils.gtrack_matrixDet3(inst.gC)NEWLINENEWLINE log_det = np.float32(np.log(det))NEWLINENEWLINE for n in range(num):NEWLINE if best_ind[n] == ekf_utils.gtrack_ID_POINT_BEHIND_THE_WALL:NEWLINE continueNEWLINENEWLINE u_tilda[0] = np.float32(point[n].range - inst.H_s[0])NEWLINE u_tilda[1] = np.float32(point[n].angle - inst.H_s[1])NEWLINENEWLINE if inst.velocityHandling < ekf_utils.VelocityHandlingState().VELOCITY_LOCKED:NEWLINE # Radial velocity estimation is not yet known, unroll based on velocity measured at allocation timeNEWLINE rv_out = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.allocationVelocity,NEWLINE point[n].doppler)NEWLINE u_tilda[2] = np.float32(rv_out - inst.allocationVelocity)NEWLINE else:NEWLINE # Radial velocity estimation is known NEWLINE rv_out = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.H_s[2], point[n].doppler)NEWLINE u_tilda[2] = np.float32(rv_out - inst.H_s[2])NEWLINENEWLINE chi2 = ekf_utils.gtrack_computeMahalanobis3(u_tilda, inst.gC_inv)NEWLINE # print(inst.gC_inv)NEWLINENEWLINE if chi2 < inst.G:NEWLINE score = np.float32(log_det + chi2)NEWLINE if score < best_score[n]:NEWLINE best_score[n] = scoreNEWLINE best_ind[n] = np.uint8(inst.uid)NEWLINE point[n].doppler = rv_outNEWLINENEWLINENEWLINE# GTRACK Module calls this function to start target tracking. This function is called during modules' allocation step,NEWLINE# once new set of points passes allocation thresholds NEWLINEdef unit_start(handle, time_stamp, tid, um):NEWLINE inst = handleNEWLINENEWLINE m = np.zeros(shape=(3,), dtype=np.float32)NEWLINENEWLINE inst.tid = tidNEWLINE inst.heartBeatCount = time_stampNEWLINE inst.allocationTime = time_stampNEWLINE inst.allocationRange = um[0]NEWLINE inst.allocationVelocity = um[2]NEWLINE inst.associatedPoints = 0NEWLINENEWLINE inst.state = ekf_utils.TrackState().TRACK_STATE_DETECTIONNEWLINE inst.currentStateVectorType = ekf_utils.gtrack_STATE_VECTOR_TYPE().gtrack_STATE_VECTORS_2DANEWLINE inst.stateVectorLength = 6NEWLINENEWLINE inst.processVariance = (0.5 * inst.maxAcceleration) * (0.5 * inst.maxAcceleration)NEWLINENEWLINE inst.F = inst.F6NEWLINE inst.Q = inst.Q6NEWLINENEWLINE inst.velocityHandling = ekf_utils.VelocityHandlingState().VELOCITY_INITNEWLINENEWLINE m[2] = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.initialRadialVelocity, um[2])NEWLINENEWLINE inst.rangeRate = m[2]NEWLINENEWLINE m[0] = um[0]NEWLINE m[1] = um[1]NEWLINENEWLINE ekf_utils.gtrack_spherical2cartesian(inst.currentStateVectorType, m, inst.S_apriori_hat)NEWLINE inst.H_s = copy.deepcopy(m)NEWLINENEWLINE inst.P_apriori_hat = copy.deepcopy(ekf_utils.pinit6x6)NEWLINE inst.gD = copy.deepcopy(ekf_utils.zero3x3)NEWLINE inst.G = 1.NEWLINENEWLINENEWLINE# GTRACK Module calls this function to perform an update step for the tracking unit. NEWLINEdef unit_update(handle, point, var, pInd, num):NEWLINE J = np.zeros(shape=(18,), dtype=np.float32) # 3x6NEWLINE PJ = np.zeros(shape=(18,), dtype=np.float32) # 6x3NEWLINE JPJ = np.zeros(shape=(9,), dtype=np.float32) # 3x3NEWLINE U = np.zeros(shape=(3,), dtype=np.float32)NEWLINE u_tilda = np.zeros(shape=(3,), dtype=np.float32)NEWLINE cC = np.zeros(shape=(9,), dtype=np.float32)NEWLINE cC_inv = np.zeros(shape=(9,), dtype=np.float32)NEWLINE K = np.zeros(shape=(18,), dtype=np.float32) # 6x3NEWLINENEWLINE u_mean = ekf_utils.gtrack_measurementPoint()NEWLINENEWLINE D = np.zeros(shape=(9,), dtype=np.float32)NEWLINE Rm = np.zeros(shape=(9,), dtype=np.float32)NEWLINE Rc = np.zeros(shape=(9,), dtype=np.float32)NEWLINENEWLINE temp1 = np.zeros(shape=(36,), dtype=np.float32)NEWLINENEWLINE inst = handleNEWLINE mlen = inst.measurementVectorLengthNEWLINE slen = inst.stateVectorLengthNEWLINENEWLINE myPointNum = 0NEWLINENEWLINE for n in range(num):NEWLINE if pInd[n] == inst.uid:NEWLINE myPointNum += 1NEWLINE u_mean.range += point[n].rangeNEWLINE u_mean.angle += point[n].angleNEWLINENEWLINE if var != None:NEWLINE Rm[0] += var[n].rangeVarNEWLINE Rm[4] += var[n].angleVarNEWLINE Rm[8] += var[n].dopplerVarNEWLINENEWLINE if myPointNum == 1:NEWLINE rvPilot = point[n].dopplerNEWLINE u_mean.doppler = rvPilotNEWLINE else:NEWLINE rvCurrent = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, rvPilot, point[n].doppler)NEWLINE point[n].doppler = rvCurrentNEWLINE u_mean.doppler += rvCurrentNEWLINENEWLINE if myPointNum == 0:NEWLINE # INACTIVENEWLINE if (np.abs(inst.S_hat[2]) < inst.radialVelocityResolution) and \NEWLINE (np.abs(inst.S_hat[3]) < inst.radialVelocityResolution):NEWLINE inst.S_hat = np.zeros(shape=(inst.S_hat.shape), dtype=np.float32)NEWLINENEWLINE inst.S_hat[0] = inst.S_apriori_hat[0]NEWLINE inst.S_hat[1] = inst.S_apriori_hat[1]NEWLINENEWLINE inst.P_hat = copy.deepcopy(inst.P_apriori_hat)NEWLINENEWLINE inst.processVariance = 0NEWLINE else:NEWLINE inst.S_hat = copy.deepcopy(inst.S_apriori_hat)NEWLINE inst.P_hat = copy.deepcopy(inst.P_apriori_hat)NEWLINENEWLINE unit_event(inst, myPointNum)NEWLINE return inst.stateNEWLINENEWLINE inst.associatedPoints += myPointNumNEWLINENEWLINE if inst.processVariance == 0:NEWLINE inst.processVariance = np.float32((0.5 * (inst.maxAcceleration)) * (0.5 * (inst.maxAcceleration)))NEWLINENEWLINE u_mean.range = np.float32(u_mean.range / myPointNum)NEWLINE u_mean.angle = np.float32(u_mean.angle / myPointNum)NEWLINE u_mean.doppler = np.float32(u_mean.doppler / myPointNum)NEWLINENEWLINE if var != None:NEWLINE Rm[0] = np.float32(Rm[0] / myPointNum)NEWLINE Rm[4] = np.float32(Rm[4] / myPointNum)NEWLINE Rm[8] = np.float32(Rm[8] / myPointNum)NEWLINE else:NEWLINE dRangeVar = np.float32((inst.variationParams.lengthStd) * (inst.variationParams.lengthStd))NEWLINE dDopplerVar = np.float32((inst.variationParams.dopplerStd) * (inst.variationParams.dopplerStd))NEWLINENEWLINE Rm[0] = dRangeVarNEWLINE angleStd = np.float32(2 * np.float32(np.arctan(0.5 * (inst.variationParams.widthStd) / inst.H_s[0])))NEWLINE Rm[4] = angleStd * angleStdNEWLINE Rm[8] = dDopplerVarNEWLINENEWLINE U[0] = u_mean.rangeNEWLINE U[1] = u_mean.angleNEWLINE U[2] = u_mean.dopplerNEWLINENEWLINE velocity_state_handling(inst, U)NEWLINENEWLINE if myPointNum > gtrack_MIN_POINTS_TO_UPDATE_DISPERSION:NEWLINE for n in range(num):NEWLINE if pInd[n] == inst.uid:NEWLINE D[0] += np.float32((point[n].range - u_mean.range) * (point[n].range - u_mean.range))NEWLINE D[4] += np.float32((point[n].angle - u_mean.angle) * (point[n].angle - u_mean.angle))NEWLINE D[8] += np.float32((point[n].doppler - u_mean.doppler) * (point[n].doppler - u_mean.doppler))NEWLINE D[1] += np.float32((point[n].range - u_mean.range) * (point[n].angle - u_mean.angle))NEWLINE D[2] += np.float32((point[n].range - u_mean.range) * (point[n].doppler - u_mean.doppler))NEWLINE D[5] += np.float32((point[n].angle - u_mean.angle) * (point[n].doppler - u_mean.doppler))NEWLINENEWLINE D[0] = np.float32(D[0] / myPointNum)NEWLINE D[4] = np.float32(D[4] / myPointNum)NEWLINE D[8] = np.float32(D[8] / myPointNum)NEWLINE D[1] = np.float32(D[1] / myPointNum)NEWLINE D[2] = np.float32(D[2] / myPointNum)NEWLINE D[5] = np.float32(D[5] / myPointNum)NEWLINENEWLINE alpha = np.float32(myPointNum / (inst.associatedPoints))NEWLINE # print(alpha)NEWLINE if alpha < gtrack_MIN_DISPERSION_ALPHA:NEWLINE alpha = gtrack_MIN_DISPERSION_ALPHANEWLINENEWLINE inst.gD[0] = np.float32((1. - alpha) * inst.gD[0] + alpha * D[0])NEWLINE inst.gD[1] = np.float32((1. - alpha) * inst.gD[1] + alpha * D[1])NEWLINE inst.gD[2] = np.float32((1. - alpha) * inst.gD[2] + alpha * D[2])NEWLINE inst.gD[3] = np.float32(inst.gD[1])NEWLINE inst.gD[4] = np.float32((1. - alpha) * inst.gD[4] + alpha * D[4])NEWLINE inst.gD[5] = np.float32((1. - alpha) * inst.gD[5] + alpha * D[5])NEWLINE inst.gD[6] = np.float32(inst.gD[2])NEWLINE inst.gD[7] = np.float32(inst.gD[5])NEWLINE inst.gD[8] = np.float32((1. - alpha) * inst.gD[8] + alpha * D[8])NEWLINENEWLINE if myPointNum > gtrack_EST_POINTS:NEWLINE alpha = 0NEWLINE else:NEWLINE alpha = np.float32((gtrack_EST_POINTS - myPointNum) / ((gtrack_EST_POINTS - 1) * myPointNum))NEWLINENEWLINE Rc[0] = np.float32((Rm[0] / myPointNum) + alpha * (inst.gD[0]))NEWLINE Rc[4] = np.float32((Rm[4] / myPointNum) + alpha * (inst.gD[4]))NEWLINE Rc[8] = np.float32((Rm[8] / myPointNum) + alpha * (inst.gD[8]))NEWLINENEWLINE ekf_utils.gtrack_computeJacobian(inst.currentStateVectorType, inst.S_apriori_hat, J)NEWLINENEWLINE u_tilda = ekf_utils.gtrack_matrixSub(mlen, 1, U, inst.H_s)NEWLINE PJ = ekf_utils.gtrack_matrixComputePJT(inst.P_apriori_hat, J)NEWLINE JPJ = ekf_utils.gtrack_matrixMultiply(mlen, slen, mlen, J, PJ)NEWLINE cC = ekf_utils.gtrack_matrixAdd(mlen, mlen, JPJ, Rc)NEWLINENEWLINE cC_inv = ekf_utils.gtrack_matrixInv3(cC)NEWLINENEWLINE K = ekf_utils.gtrack_matrixMultiply(slen, mlen, mlen, PJ, cC_inv)NEWLINENEWLINE temp1 = ekf_utils.gtrack_matrixMultiply(slen, mlen, 1, K, u_tilda)NEWLINE inst.S_hat = ekf_utils.gtrack_matrixAdd(slen, 1, inst.S_apriori_hat, temp1)NEWLINE # print(temp1)NEWLINENEWLINE temp1 = ekf_utils.gtrack_matrixTransposeMultiply(slen, mlen, slen, K, PJ)NEWLINE inst.P_hat = ekf_utils.gtrack_matrixSub(slen, slen, inst.P_apriori_hat, temp1)NEWLINENEWLINE temp1 = ekf_utils.gtrack_matrixAdd(mlen, mlen, JPJ, Rm)NEWLINE inst.gC = ekf_utils.gtrack_matrixAdd(mlen, mlen, temp1, inst.gD)NEWLINENEWLINE inst.gC_inv = ekf_utils.gtrack_matrixInv3(inst.gC)NEWLINENEWLINE unit_event(inst, myPointNum)NEWLINE return inst.stateNEWLINENEWLINENEWLINE# this is the helper function for GTRACK unit updateNEWLINEdef velocity_state_handling(handle, um):NEWLINE inst = handleNEWLINE rvIn = um[2]NEWLINE # print(inst.velocityHandling)NEWLINENEWLINE if inst.velocityHandling == ekf_utils.VelocityHandlingState().VELOCITY_INIT:NEWLINE um[2] = inst.rangeRateNEWLINE inst.velocityHandling = ekf_utils.VelocityHandlingState().VELOCITY_RATE_FILTERNEWLINE elif inst.velocityHandling == ekf_utils.VelocityHandlingState().VELOCITY_RATE_FILTER:NEWLINE instanteneousRangeRate = np.float32(NEWLINE (um[0] - inst.allocationRange) / ((inst.heartBeatCount - inst.allocationTime) * (inst.dt)))NEWLINE inst.rangeRate = np.float32((inst.unrollingParams.alpha) * (inst.rangeRate) + (NEWLINE 1 - (inst.unrollingParams.alpha)) * instanteneousRangeRate)NEWLINE um[2] = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.rangeRate, rvIn)NEWLINENEWLINE rrError = np.float32((instanteneousRangeRate - inst.rangeRate) / inst.rangeRate)NEWLINENEWLINE if np.abs(rrError) < inst.unrollingParams.confidence:NEWLINE inst.velocityHandling = ekf_utils.VelocityHandlingState().VELOCITY_TRACKINGNEWLINE elif inst.velocityHandling == ekf_utils.VelocityHandlingState().VELOCITY_TRACKING:NEWLINE instanteneousRangeRate = np.float32(NEWLINE (um[0] - inst.allocationRange) / ((inst.heartBeatCount - inst.allocationTime) * inst.dt))NEWLINENEWLINE inst.rangeRate = np.float32(NEWLINE (inst.unrollingParams.alpha) * inst.rangeRate + (1 - inst.unrollingParams.alpha) * instanteneousRangeRate)NEWLINE um[2] = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.rangeRate, rvIn)NEWLINE rvError = np.float32((inst.H_s[2] - um[2]) / um[2])NEWLINE if np.abs(rvError) < 0.1:NEWLINE inst.velocityHandling = ekf_utils.VelocityHandlingState().VELOCITY_LOCKEDNEWLINE elif inst.velocityHandling == ekf_utils.VelocityHandlingState().VELOCITY_LOCKED:NEWLINE um[2] = ekf_utils.gtrack_unrollRadialVelocity(inst.maxRadialVelocity, inst.H_s[2], um[2])NEWLINENEWLINENEWLINE# GTRACK Module calls this function to run GTRACK unit level state machineNEWLINEdef unit_event(handle, num):NEWLINE inst = handleNEWLINENEWLINE if inst.state == ekf_utils.TrackState().TRACK_STATE_DETECTION:NEWLINE if num > inst.allocationParams.pointsThre:NEWLINE inst.detect2freeCount = 0NEWLINE inst.detect2activeCount += 1NEWLINE if inst.detect2activeCount > inst.stateParams.det2actThre:NEWLINE inst.state = ekf_utils.TrackState().TRACK_STATE_ACTIVENEWLINE else:NEWLINE if num == 0:NEWLINE inst.detect2freeCount += 1NEWLINE if inst.detect2activeCount > 0:NEWLINE inst.detect2activeCount -= 1NEWLINE if inst.detect2freeCount > inst.stateParams.det2freeThre:NEWLINE inst.state = ekf_utils.TrackState().TRACK_STATE_FREENEWLINE elif inst.state == ekf_utils.TrackState().TRACK_STATE_ACTIVE:NEWLINE if num != 0:NEWLINE inst.active2freeCount = 0NEWLINE else:NEWLINE inst.active2freeCount += 1NEWLINENEWLINE if inst.sceneryParams.numStaticBoxes != 0:NEWLINE thre = inst.stateParams.exit2freeThreNEWLINE for numBoxes in range(inst.sceneryParams.numStaticBoxes):NEWLINE if ekf_utils.isPointInsideBox(inst.S_hat[0], inst.S_hat[1],NEWLINE inst.sceneryParams.boundaryBox[numBoxes]) == 1:NEWLINE if inst.processVariance == 0:NEWLINE thre = inst.stateParams.static2freeThreNEWLINE else:NEWLINE thre = inst.stateParams.active2freeThreNEWLINE breakNEWLINE else:NEWLINE thre = inst.stateParams.active2freeThreNEWLINENEWLINE if thre > inst.heartBeatCount:NEWLINE thre = np.uint16(inst.heartBeatCount)NEWLINENEWLINE if inst.active2freeCount > thre:NEWLINE inst.state = ekf_utils.TrackState().TRACK_STATE_FREENEWLINENEWLINENEWLINE# GTRACK Module calls this function to report GTRACK unit results to the target descriptorNEWLINEdef unit_report(handle, target):NEWLINE inst = handleNEWLINENEWLINE target.uid = inst.uidNEWLINE target.tid = inst.tidNEWLINENEWLINE target.S = copy.deepcopy(inst.S_hat)NEWLINE target.EC = copy.deepcopy(inst.gC_inv)NEWLINE target.G = inst.GNEWLINE |
import numpy as npNEWLINEimport sysNEWLINEimport osNEWLINEimport timeNEWLINEimport copyNEWLINEimport datetimeNEWLINEimport pickleNEWLINEimport torch.nn as nnNEWLINEfrom torch.utils.data import DataLoaderNEWLINEimport torch.optim as optimNEWLINEimport torch.optim.lr_scheduler as lr_schedulerNEWLINEimport argparseNEWLINEimport platformNEWLINEimport subprocessNEWLINEfrom sklearn.metrics import roc_auc_score, average_precision_scoreNEWLINEfrom social_data_loader import SocialEvolutionDatasetNEWLINEfrom github_data_loader import GithubDatasetNEWLINEfrom example_data_loader import ExampleDatasetNEWLINEfrom utils import *NEWLINEfrom dyrep import DyRepNEWLINEfrom freq import FreqBaselineNEWLINENEWLINEdef load_checkpoint(file):NEWLINE # TODO: Loading the checkpoint stopped working, need to fix.NEWLINE print('loading the model')NEWLINE state = torch.load(file)NEWLINE pos1 = file.find('checkpoint_dygraphs')NEWLINE experiment_ID = str.join('_', file[pos1:].split('_')[2:-2])NEWLINE model.load_state_dict(state['state_dict'])NEWLINE optimizer.load_state_dict(state['optimizer'])NEWLINE scheduler.load_state_dict(state['scheduler'])NEWLINE model.Lambda_dict = state['Lambda_dict']NEWLINE model.time_keys = state['time_keys']NEWLINE print('loading from epoch %d, batch %d done' % (state['epoch'], state['batch_idx']))NEWLINE return state['epoch'], state['batch_idx'], state['time_bar'], state['node_degree_global'], experiment_IDNEWLINENEWLINENEWLINEdef save_checkpoint(batch_idx, epoch):NEWLINE try:NEWLINE fname = '%s/checkpoints/checkpoint_dygraphs_%s_epoch%d_batch%d.pth.tar' % (args.results, experiment_ID, epoch, batch_idx)NEWLINE state = {NEWLINE 'epoch': epoch,NEWLINE 'batch_idx': batch_idx,NEWLINE 'args': args,NEWLINE 'time_bar': time_bar,NEWLINE 'node_degree_global': node_degree_global,NEWLINE 'Lambda_dict': model.Lambda_dict,NEWLINE 'time_keys': model.time_keys,NEWLINE 'state_dict': model.state_dict(),NEWLINE 'scheduler': scheduler.state_dict(),NEWLINE 'optimizer': optimizer.state_dict(),NEWLINE }NEWLINE if os.path.isfile(fname):NEWLINE print('WARNING: file %s exists and will be overwritten' % fname)NEWLINE torch.save(state, fname)NEWLINE print('the model is saved to %s' % fname)NEWLINE except Exception as e:NEWLINE print('error saving the model', e)NEWLINENEWLINENEWLINEdef test(model, n_test_batches=10, epoch=0):NEWLINE model.eval()NEWLINE loss = 0NEWLINE losses =[ [np.Inf, 0], [np.Inf, 0] ]NEWLINE n_samples = 0NEWLINE # Time slots with 10 days intervals as in the DyRep paperNEWLINE timeslots = [t.toordinal() for t in test_loader.dataset.TEST_TIMESLOTS]NEWLINE event_types = list(test_loader.dataset.event_types_num.keys()) #['comm', 'assoc']NEWLINE # sort it by kNEWLINE for event_t in test_loader.dataset.event_types_num:NEWLINE event_types[test_loader.dataset.event_types_num[event_t]] = event_tNEWLINENEWLINE event_types += ['Com']NEWLINENEWLINE mar, hits_10 = {}, {}NEWLINE for event_t in event_types:NEWLINE mar[event_t] = []NEWLINE hits_10[event_t] = []NEWLINE for c, slot in enumerate(timeslots):NEWLINE mar[event_t].append([])NEWLINE hits_10[event_t].append([])NEWLINENEWLINENEWLINE start = time.time()NEWLINE with torch.no_grad():NEWLINE for batch_idx, data in enumerate(test_loader):NEWLINE data[2] = data[2].float().to(args.device)NEWLINE data[4] = data[4].double().to(args.device)NEWLINE data[5] = data[5].double()NEWLINE output = model(data)NEWLINE loss += (-torch.sum(torch.log(output[0]) + 1e-10) + torch.sum(output[1])).item()NEWLINE for i in range(len(losses)):NEWLINE m1 = output[i].min()NEWLINE m2 = output[i].max()NEWLINE if m1 < losses[i][0]:NEWLINE losses[i][0] = m1NEWLINE if m2 > losses[i][1]:NEWLINE losses[i][1] = m2NEWLINE n_samples += 1NEWLINE A_pred, Survival_term = output[2]NEWLINE u, v, k = data[0], data[1], data[3]NEWLINENEWLINE time_cur = data[5]NEWLINE m, h = MAR(A_pred, u, v, k, Survival_term=Survival_term, freq_prior=freq.H_train_norm if args.freq else None)NEWLINE assert len(time_cur) == len(m) == len(h) == len(k)NEWLINE for t, m, h, k_ in zip(time_cur, m, h, k):NEWLINE d = datetime.datetime.fromtimestamp(t.item()).toordinal()NEWLINE event_t = event_types[k_.item()]NEWLINE for c, slot in enumerate(timeslots):NEWLINE if d <= slot:NEWLINE mar[event_t][c].append(m)NEWLINE hits_10[event_t][c].append(h)NEWLINE if k_ > 0:NEWLINE mar['Com'][c].append(m)NEWLINE hits_10['Com'][c].append(h)NEWLINE if c > 0:NEWLINE assert slot > timeslots[c-1] and d > timeslots[c-1], (d, slot, timeslots[c-1])NEWLINE breakNEWLINENEWLINE if batch_idx % 10 == 0 and args.verbose:NEWLINE print('test', batch_idx)NEWLINENEWLINE if n_test_batches is not None and batch_idx >= n_test_batches - 1:NEWLINE breakNEWLINENEWLINE time_iter = time.time() - startNEWLINENEWLINE print('\nTEST batch={}/{}, loss={:.3f}, psi={}, loss1 min/max={:.4f}/{:.4f}, 'NEWLINE 'loss2 min/max={:.4f}/{:.4f}, integral time stamps={}, sec/iter={:.4f}'.NEWLINE format(batch_idx + 1, len(test_loader), (loss / n_samples),NEWLINE [model.psi[c].item() for c in range(len(model.psi))],NEWLINE losses[0][0], losses[0][1], losses[1][0], losses[1][1],NEWLINE len(model.Lambda_dict), time_iter / (batch_idx + 1)))NEWLINENEWLINE # Report results for different time slots in the test setNEWLINE if args.verbose:NEWLINE for c, slot in enumerate(timeslots):NEWLINE s = 'Slot {}: '.format(c)NEWLINE for event_t in event_types:NEWLINE sfx = '' if event_t == event_types[-1] else ', 'NEWLINE if len(mar[event_t][c]) > 0:NEWLINE s += '{} ({} events): MAR={:.2f}+-{:.2f}, HITS_10={:.3f}+-{:.3f}'.\NEWLINE format(event_t, len(mar[event_t][c]), np.mean(mar[event_t][c]), np.std(mar[event_t][c]),NEWLINE np.mean(hits_10[event_t][c]), np.std(hits_10[event_t][c]))NEWLINE else:NEWLINE s += '{} (no events)'.format(event_t)NEWLINE s += sfxNEWLINE print(s)NEWLINENEWLINE mar_all, hits_10_all = {}, {}NEWLINE for event_t in event_types:NEWLINE mar_all[event_t] = []NEWLINE hits_10_all[event_t] = []NEWLINE for c, slot in enumerate(timeslots):NEWLINE mar_all[event_t].extend(mar[event_t][c])NEWLINE hits_10_all[event_t].extend(hits_10[event_t][c])NEWLINENEWLINE s = 'Epoch {}: results per event type for all test time slots: \n'.format(epoch)NEWLINE print(''.join(['-']*100))NEWLINE for event_t in event_types:NEWLINE if len(mar_all[event_t]) > 0:NEWLINE s += '====== {:10s}\t ({:7s} events): \tMAR={:.2f}+-{:.2f}\t HITS_10={:.3f}+-{:.3f}'.\NEWLINE format(event_t, str(len(mar_all[event_t])), np.mean(mar_all[event_t]), np.std(mar_all[event_t]),NEWLINE np.mean(hits_10_all[event_t]), np.std(hits_10_all[event_t]))NEWLINE else:NEWLINE s += '====== {:10s}\t (no events)'.format(event_t)NEWLINE if event_t != event_types[-1]:NEWLINE s += '\n'NEWLINE print(s)NEWLINE print(''.join(['-'] * 100))NEWLINENEWLINE return mar_all, hits_10_all, loss / n_samplesNEWLINENEWLINENEWLINEdef get_temporal_variables():NEWLINE variables = {}NEWLINE variables['time_bar'] = copy.deepcopy(time_bar)NEWLINE variables['node_degree_global'] = copy.deepcopy(node_degree_global)NEWLINE variables['time_keys'] = copy.deepcopy(model.time_keys)NEWLINE variables['z'] = model.z.clone()NEWLINE variables['S'] = model.S.clone()NEWLINE variables['A'] = model.A.clone()NEWLINE variables['Lambda_dict'] = model.Lambda_dict.clone()NEWLINE return variablesNEWLINENEWLINENEWLINEdef set_temporal_variables(variables, model, train_loader, test_loader):NEWLINE time_bar = copy.deepcopy(variables['time_bar'])NEWLINE train_loader.dataset.time_bar = time_barNEWLINE test_loader.dataset.time_bar = time_barNEWLINE model.node_degree_global = copy.deepcopy(variables['node_degree_global'])NEWLINE model.time_keys = copy.deepcopy(variables['time_keys'])NEWLINE model.z = variables['z'].clone()NEWLINE model.S = variables['S'].clone()NEWLINE model.A = variables['A'].clone()NEWLINE model.Lambda_dict = variables['Lambda_dict'].clone()NEWLINE return time_barNEWLINENEWLINEif __name__ == '__main__':NEWLINENEWLINE parser = argparse.ArgumentParser(description='DyGraphs Training Parameters')NEWLINE parser.add_argument('--data_dir', type=str, default='./')NEWLINE parser.add_argument('--dataset', type=str, default='social', choices=['social', 'github', 'example'])NEWLINE parser.add_argument('--prob', default=0.8, help='filter events by this probability value in the Social Evolution data')NEWLINE parser.add_argument('--batch_size', type=int, default=200, help='batch size (sequence length)')NEWLINE parser.add_argument('--n_hid', type=int, default=32, help='hidden layer size')NEWLINE parser.add_argument('--epochs', type=int, default=5, help='number of epochs')NEWLINE parser.add_argument('--seed', type=int, default=1111, help='random seed')NEWLINE parser.add_argument('--lr', type=float, default=0.0002, help='Learning Rate')NEWLINE parser.add_argument('--lr_decay_step', type=str, default='10',NEWLINE help='number of epochs after which to reduce learning rate')NEWLINE parser.add_argument('--weight', type=float, default=1, help='weight for the second term in the loss')NEWLINE parser.add_argument('--wdecay', type=float, default=0, help='weight decay')NEWLINE parser.add_argument('--model', type=str, default='dyrep', help='trained model', choices=['dyrep', 'gcn', 'gat'])NEWLINE parser.add_argument('--bilinear', action='store_true', default=False, help='use bilinear intensity (omega) model')NEWLINE parser.add_argument('--bilinear_enc', action='store_true', default=False, help='use bilinear NRI')NEWLINE parser.add_argument('--encoder', type=str, default=None, choices=['linear', 'mlp', 'mlp1', 'rand'])NEWLINE parser.add_argument('--sparse', action='store_true', default=False,NEWLINE help='sparsity prior as in some tasks in Kipf et al., ICML 2018')NEWLINE parser.add_argument('--n_rel', type=int, default=2, help='number of edges for learned graphs')NEWLINE parser.add_argument('--device', type=str, default='cuda')NEWLINE parser.add_argument('--association', type=str, default='CloseFriend', help='The long term graph of the Social Evolution data used as long term edges')NEWLINE parser.add_argument('--resume', type=str, default='')NEWLINE parser.add_argument('--log_interval', type=int, default=20, help='print interval')NEWLINE parser.add_argument('--results', type=str, default='results', help='results file path')NEWLINE parser.add_argument('--soft_attn', action='store_true', default=False)NEWLINE parser.add_argument('--freq', action='store_true', default=False, help='use the Frequency bias')NEWLINE parser.add_argument('--verbose', action='store_true', default=False, help='print a lot of debugging stuff and results details')NEWLINENEWLINE args = parser.parse_args()NEWLINENEWLINE args.lr_decay_step = list(map(int, args.lr_decay_step.split(',')))NEWLINE args.torch = torch.__version__NEWLINE print('\n~~~~~ Script arguments ~~~~~')NEWLINE for arg in vars(args):NEWLINE print(arg, getattr(args, arg))NEWLINENEWLINE dt = datetime.datetime.now()NEWLINE print('start time:', dt)NEWLINE experiment_ID = '%s_%06d' % (platform.node(), dt.microsecond)NEWLINE print('experiment_ID: ', experiment_ID)NEWLINENEWLINE try:NEWLINE gitcommit = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip()NEWLINE print('gitcommit', gitcommit, '\n')NEWLINE except Exception as e:NEWLINE print('gitcommit is not available', e)NEWLINENEWLINE # Set seedNEWLINE np.random.seed(args.seed)NEWLINE rnd = np.random.RandomState(args.seed)NEWLINE torch.backends.cudnn.deterministic = TrueNEWLINE torch.backends.cudnn.benchmark = TrueNEWLINE torch.manual_seed(args.seed)NEWLINE torch.cuda.manual_seed(args.seed)NEWLINE torch.cuda.manual_seed_all(args.seed)NEWLINENEWLINE if args.dataset == 'social':NEWLINE try:NEWLINE data = SocialEvolutionDataset.load_data(args.data_dir, args.prob)NEWLINE except FileNotFoundError as e:NEWLINE raise ValueError('Original nor preprocessed data not found. Please consult README.md to prepare data before running the code. Error:', e)NEWLINENEWLINE train_set = SocialEvolutionDataset(data['initial_embeddings'], data['train'], args.association, verbose=args.verbose)NEWLINE test_set = SocialEvolutionDataset(data['initial_embeddings'], data['test'], args.association,NEWLINE data_train=data['train'], verbose=args.verbose)NEWLINE initial_embeddings = data['initial_embeddings'].copy()NEWLINE A_initial = train_set.get_Adjacency()[0]NEWLINE elif args.dataset == 'github':NEWLINE train_set = GithubDataset('train', data_dir=args.data_dir)NEWLINE test_set = GithubDataset('test', data_dir=args.data_dir)NEWLINE initial_embeddings = np.random.randn(train_set.N_nodes, args.n_hid)NEWLINE A_initial = train_set.get_Adjacency()[0]NEWLINE elif args.dataset == 'example':NEWLINE train_set = ExampleDataset('train')NEWLINE test_set = ExampleDataset('test')NEWLINE initial_embeddings = np.random.randn(train_set.N_nodes, args.n_hid)NEWLINE A_initial = train_set.get_Adjacency()[0]NEWLINE else:NEWLINE raise NotImplementedError(args.dataset)NEWLINENEWLINE def initalize_state(dataset, keepS=False):NEWLINE '''Initializes node embeddings and the graph to the original state after every epoch'''NEWLINENEWLINE Adj_all = dataset.get_Adjacency()[0]NEWLINENEWLINE if not isinstance(Adj_all, list):NEWLINE Adj_all = [Adj_all]NEWLINENEWLINE node_degree_global = []NEWLINE for rel, A in enumerate(Adj_all):NEWLINE node_degree_global.append(np.zeros(A.shape[0]))NEWLINE for u in range(A.shape[0]):NEWLINE node_degree_global[rel][u] = np.sum(A[u])NEWLINENEWLINE Adj_all = Adj_all[0]NEWLINE if args.verbose:NEWLINE print('Adj_all', Adj_all.shape, len(node_degree_global), node_degree_global[0].min(), node_degree_global[0].max())NEWLINE time_bar = np.zeros((dataset.N_nodes, 1)) + dataset.FIRST_DATE.timestamp()NEWLINENEWLINE model.initialize(node_embeddings=initial_embeddings,NEWLINE A_initial=Adj_all, keepS=keepS) # train_loader.dataset.H_trainNEWLINENEWLINENEWLINE model.to(args.device)NEWLINE return time_bar, node_degree_globalNEWLINENEWLINE train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False)NEWLINE test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=False)NEWLINENEWLINE freq = FreqBaseline(train_set, test_set, verbose=args.verbose)NEWLINENEWLINE model = DyRep(node_embeddings=initial_embeddings,NEWLINE N_nodes=train_set.N_nodes,NEWLINE A_initial=A_initial,NEWLINE n_hidden=args.n_hid,NEWLINE bilinear=args.bilinear,NEWLINE bilinear_enc=args.bilinear_enc,NEWLINE sparse=args.sparse,NEWLINE encoder=args.encoder,NEWLINE n_rel=args.n_rel,NEWLINE rnd=rnd,NEWLINE device=args.device,NEWLINE model=args.model,NEWLINE soft_attn=args.soft_attn,NEWLINE freq=freq.H_train_norm if args.freq else None,NEWLINE verbose=args.verbose,NEWLINE node_degree_global=None).to(args.device)NEWLINENEWLINE print('') # new stringNEWLINE if args.verbose:NEWLINE print('model', model)NEWLINE print('number of training parameters: %d' %NEWLINE np.sum([np.prod(p.size()) if p.requires_grad else 0 for p in model.parameters()]))NEWLINENEWLINE params_main, params_enc = [], []NEWLINE for name, param in model.named_parameters():NEWLINE if name.find('encoder') >= 0 and param.requires_grad:NEWLINE params_enc.append(param)NEWLINE elif param.requires_grad:NEWLINE params_main.append(param)NEWLINENEWLINENEWLINE optimizer = optim.Adam([{"params": params_main, "weight_decay": args.wdecay},NEWLINE {"params": params_enc, "weight_decay": 1e-4}], lr=args.lr, betas=(0.5, 0.999))NEWLINE scheduler = lr_scheduler.MultiStepLR(optimizer, args.lr_decay_step, gamma=0.5)NEWLINENEWLINE if args.resume != '':NEWLINE epoch_start, batch_start, time_bar, node_degree_global, experiment_ID = load_checkpoint(args.resume)NEWLINE resume = TrueNEWLINE model.node_degree_global = node_degree_globalNEWLINE else:NEWLINE epoch_start = 1NEWLINE batch_start = 0NEWLINE resume = FalseNEWLINENEWLINENEWLINE losses_events, losses_nonevents, losses_KL, losses_sum = [], [], [], []NEWLINE test_MAR, test_HITS10, test_loss = [], [], []NEWLINE print('\nStarting training...')NEWLINE for epoch in range(epoch_start, args.epochs + 1):NEWLINENEWLINE if not (resume and epoch == epoch_start):NEWLINE # Reinitialize node embeddings and adjacency matrices, but keep the model parameters intactNEWLINE time_bar, node_degree_global = initalize_state(train_loader.dataset, keepS=epoch > 1)NEWLINE model.node_degree_global = node_degree_globalNEWLINENEWLINE train_loader.dataset.time_bar = time_barNEWLINE test_loader.dataset.time_bar = time_barNEWLINENEWLINE start = time.time()NEWLINENEWLINE for batch_idx, data_batch in enumerate(train_loader):NEWLINENEWLINE if resume and batch_idx <= batch_start:NEWLINE continueNEWLINE model.train()NEWLINENEWLINE optimizer.zero_grad()NEWLINE data_batch[2] = data_batch[2].float().to(args.device)NEWLINE data_batch[4] = data_batch[4].double().to(args.device)NEWLINE data_batch[5] = data_batch[5].double() # no need of GPUNEWLINE output = model(data_batch)NEWLINE losses = [-torch.sum(torch.log(output[0]) + 1e-10), args.weight * torch.sum(output[1])] #NEWLINENEWLINE # KL losses (one item per event)NEWLINE if len(output[-1]) > 0:NEWLINE losses.extend(output[-1])NEWLINE losses_KL.append(torch.stack(losses[2:]).sum().item())NEWLINENEWLINE loss = torch.sum(torch.stack(losses)) / args.batch_sizeNEWLINE loss.backward()NEWLINE nn.utils.clip_grad_value_(model.parameters(), 100)NEWLINENEWLINE optimizer.step()NEWLINENEWLINE losses_events.append(losses[0].item())NEWLINE losses_nonevents.append(losses[1].item())NEWLINE losses_sum.append(loss.item())NEWLINENEWLINE assert np.allclose(train_loader.dataset.time_bar, time_bar)NEWLINE assert np.allclose(test_loader.dataset.time_bar, time_bar)NEWLINENEWLINE model.psi.data = torch.clamp(model.psi.data, 1e-1, 1e+3) # to prevent overflow in computing LambdaNEWLINENEWLINE time_iter = time.time() - startNEWLINENEWLINE model.z = model.z.detach() # to reset the computational graph and avoid backpropagating second timeNEWLINE model.S = model.S.detach()NEWLINENEWLINE if (batch_idx + 1) % args.log_interval == 0 or batch_idx == len(train_loader) - 1:NEWLINE # Report (intermediate) resultsNEWLINENEWLINE print('\nTRAIN epoch={}/{}, batch={}/{}, sec/iter: {:.4f}, loss={:.3f}, loss components: {}'.format(epoch,NEWLINE args.epochs,NEWLINE batch_idx + 1,NEWLINE len(train_loader),NEWLINE time_iter / (batch_idx + 1),NEWLINE loss.item(), [l.item() for l in losses]))NEWLINENEWLINE if args.encoder is not None:NEWLINE S = model.S.data.cpu().numpy()NEWLINE S_batch = output[3].sum(axis=0)NEWLINE A_all_first, keys, A_all_last = train_loader.dataset.get_Adjacency(multirelations=True)NEWLINENEWLINE for survey, A_all in zip(['first', 'last'], [A_all_first, A_all_last]):NEWLINE for rel, key in enumerate(keys):NEWLINE if len(A_all.shape) == 2:NEWLINE A_all = A_all[:, :, None]NEWLINENEWLINE A = A_all[:, :, rel].flatten()NEWLINE for edge_type in range(S.shape[2]):NEWLINE prec = average_precision_score(y_true=A, y_score=S[:, :, edge_type].flatten())NEWLINE acc = np.mean(np.equal(A, (S[:, :, edge_type].flatten() > 0).astype(np.float)))NEWLINE auc = roc_auc_score(y_true=A, y_score=S[:, :, edge_type].flatten())NEWLINE c = np.corrcoef(A.flatten(), S[:, :, edge_type].flatten())[0, 1]NEWLINENEWLINE prec_batch = average_precision_score(y_true=A, y_score=S_batch[:, :, edge_type].flatten())NEWLINE acc_batch = np.mean(np.equal(A, (S_batch[:, :, edge_type].flatten() > 0).astype(np.float)))NEWLINE auc_batch = roc_auc_score(y_true=A, y_score=S_batch[:, :, edge_type].flatten())NEWLINE c_batch = np.corrcoef(A.flatten(), S_batch[:, :, edge_type].flatten())[0, 1]NEWLINENEWLINE print('{}: Edge {} with {}: acc={:.4f}, auc={:.4f}, prec={:.4f}, corr={:.4f}, 'NEWLINE 'acc_batch={:.4f}, auc_batch={:.4f}, prec_batch={:.4f}, corr_batch={:.4f}'.NEWLINE format(survey, edge_type, key, acc, auc, prec, c,NEWLINE acc_batch, auc_batch, prec_batch, c_batch))NEWLINENEWLINE for edge_type in range(S.shape[2]):NEWLINE c = np.corrcoef(freq.H_train.flatten(), S[:, :, edge_type].flatten())[0, 1]NEWLINE c_batch = np.corrcoef(freq.H_train.flatten(), S_batch[:, :, edge_type].flatten())[0, 1]NEWLINE print('Edge {} with H_train: corr={:.4f}, corr_batch={:.4f}'.format(edge_type, c, c_batch))NEWLINENEWLINENEWLINE # save node embeddings and other data before testing since these variables will be updated during testingNEWLINE variables = get_temporal_variables()NEWLINE if args.verbose:NEWLINE print('time', datetime.datetime.fromtimestamp(np.max(time_bar)))NEWLINE save_checkpoint(batch_idx + 1, epoch)NEWLINENEWLINE result = test(model, n_test_batches=None if batch_idx == len(train_loader) - 1 else 10, epoch=epoch)NEWLINE test_MAR.append(np.mean(result[0]['Com']))NEWLINE test_HITS10.append(np.mean(result[1]['Com']))NEWLINE test_loss.append(result[2])NEWLINENEWLINENEWLINE # restore node embeddings and other dataNEWLINE time_bar = set_temporal_variables(variables, model, train_loader, test_loader)NEWLINENEWLINE scheduler.step()NEWLINENEWLINENEWLINE print('end time:', datetime.datetime.now())NEWLINENEWLINE |
class StatusAuditoria:NEWLINE CONCLUIDO = "OK"NEWLINE NAO_CONCLUIDO = "NOK"NEWLINE |
import gymNEWLINEimport randomNEWLINEimport numpy as npNEWLINEimport argparseNEWLINEfrom arguments import get_argsNEWLINEfrom actorcritic import Actor, second, act, actorNEWLINEimport torchNEWLINEfrom torch.autograd import VariableNEWLINEimport torch.nn.functional as FNEWLINEimport torch.optim as optimNEWLINEimport torch.cudaNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom torch.distributions import NormalNEWLINEimport osNEWLINEimport randomNEWLINEimport torch.nn as nnNEWLINEfrom itertools import countNEWLINEimport timeNEWLINEimport csvNEWLINENEWLINEdef ensure_shared_grads(model, shared_model):NEWLINE for param, shared_param in zip(model.parameters(),shared_model.parameters()):NEWLINE if shared_param.grad is not None:NEWLINE returnNEWLINE shared_param._grad = param.gradNEWLINENEWLINE# process the inputsNEWLINEdef process_inputs(o, g, o_mean, o_std, g_mean, g_std, args):NEWLINE o_clip = np.clip(o, -args.clip_obs, args.clip_obs)NEWLINE g_clip = np.clip(g, -args.clip_obs, args.clip_obs)NEWLINE o_norm = np.clip((o_clip - o_mean) / (o_std), -args.clip_range, args.clip_range)NEWLINE g_norm = np.clip((g_clip - g_mean) / (g_std), -args.clip_range, args.clip_range)NEWLINE inputs = np.concatenate([o_norm, g_norm])NEWLINE inputs = torch.tensor(inputs, dtype=torch.float32)NEWLINE return inputsNEWLINENEWLINENEWLINENEWLINEdef train(rank, args, shared_model, counter, lock, optimizer=None):NEWLINE NEWLINE args2 = get_args()NEWLINE # load the model paramNEWLINE model_path_approach = args2.save_dir + args2.env_name + '/approach.pt'NEWLINE o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, model_approach = torch.load(model_path_approach, map_location=lambda storage, loc: storage)NEWLINE model_path_manipulate = args2.save_dir + args2.env_name + '/manipulate.pt'NEWLINE o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, model_manipulate = torch.load(model_path_manipulate, map_location=lambda storage, loc: storage)NEWLINE model_path_retract = args2.save_dir + args2.env_name + '/retract.pt'NEWLINE o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, model_retract = torch.load(model_path_retract, map_location=lambda storage, loc: storage)NEWLINENEWLINE FloatTensor = torch.cuda.FloatTensor if args.use_cuda else torch.FloatTensorNEWLINE NEWLINE env = gym.make("FetchPickAndPlace-v1")NEWLINE env2 = gym.wrappers.FlattenDictWrapper(env, dict_keys=['observation', 'desired_goal'])NEWLINE observation = env.reset()NEWLINENEWLINE env_params = {'obs': observation['observation'].shape[0], NEWLINE 'goal': observation['desired_goal'].shape[0], NEWLINE 'action': env.action_space.shape[0], NEWLINE 'action_max': env.action_space.high[0],NEWLINE }NEWLINE hlc = Actor()NEWLINE # create the actor networkNEWLINE actor_network_approach = actor(env_params)NEWLINE actor_network_approach.load_state_dict(model_approach)NEWLINE actor_network_approach.eval()NEWLINE actor_network_manipulate = actor(env_params)NEWLINE actor_network_manipulate.load_state_dict(model_manipulate)NEWLINE actor_network_manipulate.eval()NEWLINE actor_network_retract = actor(env_params)NEWLINE actor_network_retract.load_state_dict(model_retract)NEWLINE actor_network_retract.eval()NEWLINENEWLINE if args.use_cuda:NEWLINE hlc.cuda()NEWLINENEWLINENEWLINE NEWLINE for p in hlc.fc1.parameters():NEWLINE p.requires_grad = FalseNEWLINE for p in hlc.fc2.parameters():NEWLINE p.requires_grad = FalseNEWLINE NEWLINE if optimizer is None:NEWLINE optimizer = optim.Adam(shared_model.parameters(), lr=args.lr)NEWLINE NEWLINE hlc.train()NEWLINE NEWLINE done = True NEWLINE for num_iter in count():NEWLINE with lock:NEWLINE counter.value += 1NEWLINE #print(num_iter, counter.value)NEWLINE observation = env.reset()NEWLINE NEWLINE goal = observation['desired_goal']NEWLINE objectPos = observation['observation'][3:6]NEWLINE object_rel_pos = observation['observation'][6:9]NEWLINE object_oriented_goal = object_rel_pos.copy()NEWLINE object_oriented_goal[2] += 0.03 # first make the gripper go slightly above the object NEWLINE timeStep = 0 #count the total number of timestepsNEWLINE grip_pos = -object_rel_pos + objectPosNEWLINE NEWLINE object_pos_goal = objectPos.copy()NEWLINE if grip_pos[0] > objectPos[0]:NEWLINE object_pos_goal[0] += 0.003NEWLINE else:NEWLINE object_pos_goal[0] -= 0.003NEWLINENEWLINE if grip_pos[1] > objectPos[1]:NEWLINE object_pos_goal[1] += 0.002NEWLINE else:NEWLINE object_pos_goal[1] -= 0.002NEWLINENEWLINE object_pos_goal[2] -= -0.031NEWLINENEWLINE if rank == 0:NEWLINENEWLINE if num_iter % args.save_interval == 0 and num_iter > 0:NEWLINE #print ("Saving model at :" + args.save_path) NEWLINE torch.save(shared_model.state_dict(), args.save_path1)NEWLINENEWLINE if num_iter % (args.save_interval * 2.5) == 0 and num_iter > 0 and rank == 1: # Second saver in-case first processes crashes NEWLINE #print ("Saving model for process 1 at :" + args.save_path) NEWLINE torch.save(shared_model.state_dict(), args.save_path1)NEWLINE NEWLINE hlc.load_state_dict(shared_model.state_dict())NEWLINE values, log_probs, rewards, entropies = [], [], [], []NEWLINE if done:NEWLINE cx = Variable(torch.zeros(1, 32)).type(FloatTensor)NEWLINE hx = Variable(torch.zeros(1, 32)).type(FloatTensor)NEWLINE else:NEWLINE cx = Variable(cx.data).type(FloatTensor)NEWLINE hx = Variable(hx.data).type(FloatTensor)NEWLINENEWLINE state_inp = torch.from_numpy(env2.observation(observation)).type(FloatTensor)NEWLINE #criterion = nn.MSELoss()NEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y)NEWLINE log_prob = F.log_softmax(y, dim=-1)NEWLINE act_model = prob.max(-1, keepdim=True)[1].dataNEWLINE entropy = -(log_prob * prob).sum(-1, keepdim=True)NEWLINE log_prob = log_prob.gather(-1, Variable(act_model))NEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINE #action_out = torch.tensor([[0]])NEWLINE entropies.append(entropy), log_probs.append(log_prob), values.append(value)NEWLINE #print(action_out)NEWLINE obs = observation["observation"]NEWLINE observation_new = observationNEWLINENEWLINE while np.linalg.norm(grip_pos - object_pos_goal) >= 0.031 and timeStep <= 20:NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE actions[3] = 0.05NEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new["observation"]NEWLINE g = observation_new["desired_goal"]NEWLINENEWLINE objectPos_new = observation_new["observation"][3:6]NEWLINE object_rel_pos_new = observation_new["observation"][6:9]NEWLINE objectPos = objectPos_newNEWLINE grip_pos_new = -object_rel_pos_new.copy() + objectPos_new.copy()NEWLINENEWLINE grip_pos = grip_pos_newNEWLINE object_oriented_goal = object_rel_pos_newNEWLINE NEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE if timeStep >= 21: NEWLINE breakNEWLINE NEWLINE NEWLINE reward = torch.Tensor([-1.0]).type(FloatTensor)NEWLINE rewards.append(reward)NEWLINE NEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y)NEWLINE log_prob = F.log_softmax(y, dim=-1)NEWLINE act_model = prob.max(-1, keepdim=True)[1].dataNEWLINE entropy = -(log_prob * prob).sum(-1, keepdim=True)NEWLINE log_prob = log_prob.gather(-1, Variable(act_model))NEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINE entropies.append(entropy), log_probs.append(log_prob), values.append(value)NEWLINE #action_out = torch.tensor([[1]])NEWLINENEWLINE NEWLINE while np.linalg.norm(grip_pos - objectPos) >= 0.015 and timeStep < env._max_episode_steps:NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE actions[3] = -0.01NEWLINE NEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new["observation"]NEWLINE objectPos = observation_new["observation"][3:6]NEWLINE object_rel_pos = observation_new["observation"][6:9]NEWLINE NEWLINE grip_pos_new = -object_rel_pos + objectPosNEWLINE grip_pos = grip_pos_newNEWLINENEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE if timeStep >= env._max_episode_steps: NEWLINE breakNEWLINE NEWLINE NEWLINE reward = torch.Tensor([-1.0]).type(FloatTensor)NEWLINE rewards.append(reward)NEWLINENEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y)NEWLINE log_prob = F.log_softmax(y, dim=-1)NEWLINE act_model = prob.max(-1, keepdim=True)[1].dataNEWLINE entropy = -(log_prob * prob).sum(-1, keepdim=True)NEWLINE log_prob = log_prob.gather(-1, Variable(act_model))NEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINE entropies.append(entropy), log_probs.append(log_prob), values.append(value)NEWLINE #action_out = torch.tensor([[2]])NEWLINENEWLINE while np.linalg.norm(goal - objectPos) >= 0.01 and timeStep < env._max_episode_steps:NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE actions[3] = -0.01NEWLINENEWLINE # put actions into the environmentNEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new['observation']NEWLINE #inputs = process_inputs(obs, g, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args)NEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE objectPos = observation_new['observation'][3:6]NEWLINE object_rel_pos = observation_new['observation'][6:9]NEWLINE if timeStep >= env._max_episode_steps: NEWLINE breakNEWLINENEWLINE while True: #limit the number of timesteps in the episode to a fixed durationNEWLINE #env.render()NEWLINE action = [0, 0, 0, 0]NEWLINE action[3] = -0.01 # keep the gripper closedNEWLINENEWLINE obsDataNew, reward, done, info = env.step(action)NEWLINE timeStep += 1NEWLINENEWLINE objectPos = obsDataNew['observation'][3:6]NEWLINE object_rel_pos = obsDataNew['observation'][6:9]NEWLINENEWLINE if timeStep >= env._max_episode_steps: breakNEWLINE NEWLINE if info['is_success'] == 1.0:NEWLINE reward = torch.Tensor([10.0]).type(FloatTensor)NEWLINE else:NEWLINE reward = torch.Tensor([-1.0]).type(FloatTensor)NEWLINE rewards.append(reward)NEWLINE NEWLINE R = torch.zeros(1, 1)NEWLINE values.append(Variable(R).type(FloatTensor))NEWLINE policy_loss = 0NEWLINE value_loss = 0NEWLINE R = Variable(R).type(FloatTensor)NEWLINE gae = torch.zeros(1, 1).type(FloatTensor)NEWLINENEWLINE for i in reversed(range(len(rewards))):NEWLINE R = args.gamma * R + rewards[i]NEWLINE advantage = R - values[i]NEWLINE value_loss = value_loss + 0.5 * advantage.pow(2)NEWLINENEWLINE delta_t = rewards[i] + args.gamma * values[i + 1].data - values[i].dataNEWLINE gae = gae * args.gamma * args.tau + delta_tNEWLINENEWLINE policy_loss = policy_loss - log_probs[i] * Variable(gae).type(FloatTensor)NEWLINENEWLINE total_loss = policy_loss + args.value_loss_coef * value_lossNEWLINE optimizer.zero_grad()NEWLINENEWLINE (total_loss).backward(retain_graph=True)NEWLINE torch.nn.utils.clip_grad_norm_(hlc.parameters(), args.max_grad_norm)NEWLINENEWLINE ensure_shared_grads(hlc, shared_model)NEWLINE optimizer.step()NEWLINENEWLINEdef test(rank, args, shared_model, counter):NEWLINE NEWLINE args2 = get_args()NEWLINE # load the model paramNEWLINE model_path_approach = args2.save_dir + args2.env_name + '/approach.pt'NEWLINE o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, model_approach = torch.load(model_path_approach, map_location=lambda storage, loc: storage)NEWLINE model_path_manipulate = args2.save_dir + args2.env_name + '/manipulate.pt'NEWLINE o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, model_manipulate = torch.load(model_path_manipulate, map_location=lambda storage, loc: storage)NEWLINE model_path_retract = args2.save_dir + args2.env_name + '/retract.pt'NEWLINE o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, model_retract = torch.load(model_path_retract, map_location=lambda storage, loc: storage)NEWLINENEWLINE FloatTensor = torch.cuda.FloatTensor if args.use_cuda else torch.FloatTensorNEWLINE NEWLINE env = gym.make("FetchPickAndPlace-v1")NEWLINE env2 = gym.wrappers.FlattenDictWrapper(env, dict_keys=['observation', 'desired_goal'])NEWLINE observation = env.reset()NEWLINENEWLINE env_params = {'obs': observation['observation'].shape[0], NEWLINE 'goal': observation['desired_goal'].shape[0], NEWLINE 'action': env.action_space.shape[0], NEWLINE 'action_max': env.action_space.high[0],NEWLINE }NEWLINE hlc = Actor()NEWLINE # create the actor networkNEWLINE actor_network_approach = actor(env_params)NEWLINE actor_network_approach.load_state_dict(model_approach)NEWLINE actor_network_approach.eval()NEWLINE actor_network_manipulate = actor(env_params)NEWLINE actor_network_manipulate.load_state_dict(model_manipulate)NEWLINE actor_network_manipulate.eval()NEWLINE actor_network_retract = actor(env_params)NEWLINE actor_network_retract.load_state_dict(model_retract)NEWLINE actor_network_retract.eval()NEWLINE if args.use_cuda:NEWLINE hlc.cuda()NEWLINE NEWLINE done = True NEWLINENEWLINE savefile = os.getcwd() + '/train/mario_curves.csv'NEWLINE title = ['No. episodes', 'No. of success']NEWLINE with open(savefile, 'a', newline='') as sfile:NEWLINE writer = csv.writer(sfile)NEWLINE writer.writerow(title) NEWLINENEWLINE hlc.eval()NEWLINE while True:NEWLINE hlc.load_state_dict(shared_model.state_dict())NEWLINE hlc.eval()NEWLINE ep_num = 0NEWLINE success = 0NEWLINE num_ep = counter.valueNEWLINE while ep_num < 50:NEWLINE ep_num +=1NEWLINE observation = env.reset() NEWLINE #lastObs = observationNEWLINE goal = observation['desired_goal']NEWLINE objectPos = observation['observation'][3:6]NEWLINE object_rel_pos = observation['observation'][6:9]NEWLINE object_oriented_goal = object_rel_pos.copy()NEWLINE object_oriented_goal[2] += 0.03 # first make the gripper go slightly above the object NEWLINE timeStep = 0NEWLINE grip_pos = -object_rel_pos + objectPosNEWLINE NEWLINE object_pos_goal = objectPos.copy()NEWLINE if grip_pos[0] > objectPos[0]:NEWLINE object_pos_goal[0] += 0.003NEWLINE else:NEWLINE object_pos_goal[0] -= 0.003NEWLINENEWLINE if grip_pos[1] > objectPos[1]:NEWLINE object_pos_goal[1] += 0.002NEWLINE else:NEWLINE object_pos_goal[1] -= 0.002NEWLINENEWLINE object_pos_goal[2] -= -0.031NEWLINENEWLINE if done:NEWLINE cx = Variable(torch.zeros(1, 32)).type(FloatTensor)NEWLINE hx = Variable(torch.zeros(1, 32)).type(FloatTensor)NEWLINE else:NEWLINE cx = Variable(cx.data).type(FloatTensor)NEWLINE hx = Variable(hx.data).type(FloatTensor)NEWLINENEWLINE state_inp = torch.from_numpy(env2.observation(observation)).type(FloatTensor)NEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y)NEWLINE act_model = prob.max(-1, keepdim=True)[1].dataNEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINENEWLINENEWLINE #print('action_out before approach:', action_out)NEWLINE obs = observation["observation"]NEWLINE while np.linalg.norm(grip_pos - object_pos_goal) >= 0.031 and timeStep <= 20:NEWLINE #env.render()NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINENEWLINE actions[3] = 0.05NEWLINENEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new["observation"]NEWLINE g = observation_new["desired_goal"]NEWLINENEWLINE objectPos_new = observation_new["observation"][3:6]NEWLINE object_rel_pos_new = observation_new["observation"][6:9]NEWLINE objectPos = objectPos_newNEWLINE grip_pos_new = -object_rel_pos_new + objectPos_newNEWLINENEWLINE grip_pos = grip_pos_newNEWLINE object_oriented_goal = object_rel_pos_newNEWLINE NEWLINE #print('timestep: {},reward eval: {}'.format(timeStep, reward))NEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE NEWLINE NEWLINENEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y)NEWLINE act_model = prob.max(-1, keepdim=True)[1].dataNEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINE NEWLINE while np.linalg.norm(grip_pos - objectPos) >= 0.015 and timeStep < env._max_episode_steps:NEWLINE #env.render()NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE NEWLINE actions[3] = -0.01NEWLINE NEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new["observation"]NEWLINE objectPos = observation_new["observation"][3:6]NEWLINE object_rel_pos = observation_new["observation"][6:9]NEWLINE NEWLINE grip_pos_new = -object_rel_pos + objectPosNEWLINE grip_pos = grip_pos_newNEWLINENEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE if timeStep >= env._max_episode_steps: NEWLINE breakNEWLINE NEWLINE value, y, (hx, cx) = hlc(state_inp, hx, cx)NEWLINE prob = F.softmax(y) NEWLINE act_model = prob.max(-1, keepdim=True)[1].data NEWLINE action_out = act_model.to(torch.device("cpu"))NEWLINE NEWLINE NEWLINE while np.linalg.norm(goal - objectPos) >= 0.01 and timeStep < env._max_episode_steps:NEWLINE #env.render()NEWLINE actions = [0, 0, 0, 0]NEWLINE if action_out == 0:NEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, object_pos_goal, o_mean_approach, o_std_approach, g_mean_approach, g_std_approach, args2)NEWLINE pi = actor_network_approach(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINENEWLINE elif action_out == 1:NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, objectPos, o_mean_manipulate, o_std_manipulate, g_mean_manipulate, g_std_manipulate, args2)NEWLINE pi = actor_network_manipulate(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE else: NEWLINENEWLINE with torch.no_grad():NEWLINE #input_tensor = _preproc_inputs(obs, objectPos)NEWLINE input_tensor = process_inputs(obs, goal, o_mean_retract, o_std_retract, g_mean_retract, g_std_retract, args2)NEWLINE pi = actor_network_retract(input_tensor)NEWLINE # convert the actionsNEWLINE actions = pi.detach().cpu().numpy().squeeze()NEWLINE NEWLINE actions[3] = -0.01NEWLINENEWLINE # put actions into the environmentNEWLINE observation_new, _, _, info = env.step(actions)NEWLINE obs = observation_new['observation']NEWLINE NEWLINE timeStep += 1NEWLINE state_inp = torch.from_numpy(env2.observation(observation_new)).type(FloatTensor)NEWLINE objectPos = observation_new['observation'][3:6]NEWLINE object_rel_pos = observation_new['observation'][6:9]NEWLINE if timeStep >= env._max_episode_steps: NEWLINE breakNEWLINE NEWLINE while True: #limit the number of timesteps in the episode to a fixed durationNEWLINE #env.render()NEWLINE action = [0, 0, 0, 0]NEWLINE action[3] = -0.01 # keep the gripper closedNEWLINENEWLINE obsDataNew, reward, done, info = env.step(action)NEWLINE timeStep += 1NEWLINENEWLINE objectPos = obsDataNew['observation'][3:6]NEWLINE object_rel_pos = obsDataNew['observation'][6:9]NEWLINE if timeStep >= env._max_episode_steps: breakNEWLINE NEWLINE if info['is_success'] == 1.0:NEWLINE success +=1NEWLINE NEWLINE if ep_num % 49==0: NEWLINE print("num episodes {}, success {}".format(num_ep, success*2))NEWLINE data = [counter.value, success*2]NEWLINE with open(savefile, 'a', newline='') as sfile:NEWLINE writer = csv.writer(sfile)NEWLINE writer.writerows([data])NEWLINE #time.sleep(15)NEWLINE |
# -*- coding: utf-8 -*-NEWLINE# Copyright 2020 Google Inc.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE# Make sure that your AWS credentials are configured correclty, seeNEWLINE# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html #pylint: disable=line-too-longNEWLINE"""Demo CLI tool for AWS."""NEWLINENEWLINEfrom datetime import datetimeNEWLINEfrom typing import TYPE_CHECKINGNEWLINENEWLINEfrom libcloudforensics.providers.aws.internal import accountNEWLINEfrom libcloudforensics.providers.aws.internal import log as aws_logNEWLINEfrom libcloudforensics.providers.aws import forensicsNEWLINENEWLINEif TYPE_CHECKING:NEWLINE import argparseNEWLINENEWLINENEWLINEdef ListInstances(args: 'argparse.Namespace') -> None:NEWLINE """List EC2 instances in AWS account.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINENEWLINE aws_account = account.AWSAccount(args.zone)NEWLINE instances = aws_account.ListInstances()NEWLINENEWLINE print('Instances found:')NEWLINE for instance in instances:NEWLINE boot_volume = instances[instance].GetBootVolume().volume_idNEWLINE print('Name: {0:s}, Boot volume: {1:s}'.format(instance, boot_volume))NEWLINENEWLINENEWLINEdef ListVolumes(args: 'argparse.Namespace') -> None:NEWLINE """List EBS volumes in AWS account.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINENEWLINE aws_account = account.AWSAccount(args.zone)NEWLINE volumes = aws_account.ListVolumes()NEWLINENEWLINE print('Volumes found:')NEWLINE for volume in volumes:NEWLINE print('Name: {0:s}, Zone: {1:s}'.format(NEWLINE volume, volumes[volume].availability_zone))NEWLINENEWLINENEWLINEdef CreateVolumeCopy(args: 'argparse.Namespace') -> None:NEWLINE """Create a AWS Volume copy.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINE print('Starting volume copy...')NEWLINE volume_copy = forensics.CreateVolumeCopy(args.zone,NEWLINE dst_zone=args.dst_zone,NEWLINE instance_id=args.instance_id,NEWLINE volume_id=args.volume_id,NEWLINE src_profile=args.src_profile,NEWLINE dst_profile=args.dst_profile)NEWLINE print(NEWLINE 'Done! Volume {0:s} successfully created. You will find it in 'NEWLINE 'your AWS account under the name {1:s}.'.format(NEWLINE volume_copy.volume_id, volume_copy.name))NEWLINENEWLINENEWLINEdef QueryLogs(args: 'argparse.Namespace') -> None:NEWLINE """Query AWS CloudTrail log events.NEWLINENEWLINE Args:NEWLINE args (argparse.Namespace): Arguments from ArgumentParser.NEWLINE """NEWLINE ct = aws_log.AWSCloudTrail(account.AWSAccount(args.zone))NEWLINENEWLINE params = {}NEWLINE if args.filter:NEWLINE params['qfilter'] = args.filterNEWLINE if args.start:NEWLINE params['starttime'] = datetime.strptime(args.start, '%Y-%m-%d %H:%M:%S')NEWLINE if args.end:NEWLINE params['endtime'] = datetime.strptime(args.end, '%Y-%m-%d %H:%M:%S')NEWLINENEWLINE result = ct.LookupEvents(**params)NEWLINENEWLINE if result:NEWLINE print('Log events found: {0:d}'.format(len(result)))NEWLINE for event in result:NEWLINE print(event)NEWLINE |
#!/usr/bin/env pythonNEWLINE__author__ = 'mike knowles'NEWLINENEWLINEif __name__ == '__main__':NEWLINE pass |
# -*- coding: utf-8 -*-NEWLINENEWLINEimport ioNEWLINEimport reNEWLINENEWLINEimport demjsonNEWLINEimport requestsNEWLINEimport pandas as pdNEWLINENEWLINEfrom zvt.api.common import china_stock_code_to_idNEWLINEfrom zvt.api.technical import init_securities, df_to_dbNEWLINEfrom zvt.domain import Provider, StockIndex, StockCategoryNEWLINEfrom zvt.recorders.consts import DEFAULT_SH_ETF_LIST_HEADERNEWLINEfrom zvt.recorders.recorder import RecorderNEWLINENEWLINENEWLINEclass ChinaETFListSpider(Recorder):NEWLINE data_schema = StockIndexNEWLINENEWLINE def __init__(self, batch_size=10, force_update=False, sleeping_time=10.0, provider=Provider.EXCHANGE) -> None:NEWLINE self.provider = providerNEWLINE super().__init__(batch_size, force_update, sleeping_time)NEWLINENEWLINE def run(self):NEWLINE # 抓取沪市 ETF 列表NEWLINE url = 'http://query.sse.com.cn/commonQuery.do?sqlId=COMMON_SSE_ZQPZ_ETFLB_L_NEW'NEWLINE response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)NEWLINE response_dict = demjson.decode(response.text)NEWLINENEWLINE df = pd.DataFrame(response_dict.get('result', []))NEWLINE self.persist_etf_list(df, exchange='sh')NEWLINE self.logger.info('沪市 ETF 列表抓取完成...')NEWLINENEWLINE # 抓取沪市 ETF 成分股NEWLINE self.download_sh_etf_component(df)NEWLINE self.logger.info('沪市 ETF 成分股抓取完成...')NEWLINENEWLINE # 抓取深市 ETF 列表NEWLINE url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1945'NEWLINE response = requests.get(url)NEWLINENEWLINE df = pd.read_excel(io.BytesIO(response.content), dtype=str)NEWLINE self.persist_etf_list(df, exchange='sz')NEWLINE self.logger.info('深市 ETF 列表抓取完成...')NEWLINENEWLINE # 抓取深市 ETF 成分股NEWLINE self.download_sz_etf_component(df)NEWLINE self.logger.info('深市 ETF 成分股抓取完成...')NEWLINENEWLINE def persist_etf_list(self, df: pd.DataFrame, exchange: str):NEWLINE if df is None:NEWLINE returnNEWLINENEWLINE df = df.copy()NEWLINE if exchange == 'sh':NEWLINE df = df[['FUND_ID', 'FUND_NAME']]NEWLINE elif exchange == 'sz':NEWLINE df = df[['证券代码', '证券简称']]NEWLINENEWLINE df.columns = ['code', 'name']NEWLINE df['id'] = df['code'].apply(lambda code: f'index_{exchange}_{code}')NEWLINE df['exchange'] = exchangeNEWLINE df['type'] = 'index'NEWLINE df['category'] = StockCategory.etf.valueNEWLINENEWLINE df = df.dropna(axis=0, how='any')NEWLINE df = df.drop_duplicates(subset='id', keep='last')NEWLINENEWLINE init_securities(df, security_type='index', provider=self.provider)NEWLINENEWLINE def download_sh_etf_component(self, df: pd.DataFrame):NEWLINE """NEWLINE ETF_CLASS => 1. 单市场 ETF 2.跨市场 ETF 3. 跨境 ETFNEWLINE 5. 债券 ETF 6. 黄金 ETFNEWLINE :param df: ETF 列表数据NEWLINE :return: NoneNEWLINE """NEWLINE query_url = 'http://query.sse.com.cn/infodisplay/queryConstituentStockInfo.do?' \NEWLINE 'isPagination=false&type={}&etfClass={}'NEWLINENEWLINE etf_df = df[(df['ETF_CLASS'] == '1') | (df['ETF_CLASS'] == '2')]NEWLINE etf_df = self.populate_sh_etf_type(etf_df)NEWLINENEWLINE for _, etf in etf_df.iterrows():NEWLINE url = query_url.format(etf['ETF_TYPE'], etf['ETF_CLASS'])NEWLINE response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)NEWLINE response_dict = demjson.decode(response.text)NEWLINE response_df = pd.DataFrame(response_dict.get('result', []))NEWLINENEWLINE etf_code = etf['FUND_ID']NEWLINE index_id = f'index_sh_{etf_code}'NEWLINE response_df = response_df[['instrumentId']]NEWLINE response_df['id'] = response_df['instrumentId'].apply(lambda code: f'{index_id}_{china_stock_code_to_id(code)}')NEWLINE response_df['stock_id'] = response_df['instrumentId'].apply(lambda code: china_stock_code_to_id(code))NEWLINE response_df['index_id'] = index_idNEWLINE response_df.drop('instrumentId', axis=1, inplace=True)NEWLINENEWLINE df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)NEWLINE self.logger.info(f'{etf["FUND_NAME"]} - {etf_code} 成分股抓取完成...')NEWLINENEWLINE self.sleep()NEWLINENEWLINE def download_sz_etf_component(self, df: pd.DataFrame):NEWLINE query_url = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vII_NewestComponent/indexid/{}.phtml'NEWLINENEWLINE self.parse_sz_etf_underlying_index(df)NEWLINE for _, etf in df.iterrows():NEWLINE underlying_index = etf['拟合指数']NEWLINE etf_code = etf['证券代码']NEWLINENEWLINE if len(underlying_index) == 0:NEWLINE self.logger.info(f'{etf["证券简称"]} - {etf_code} 非 A 股市场指数,跳过...')NEWLINE continueNEWLINENEWLINE url = query_url.format(underlying_index)NEWLINE response = requests.get(url)NEWLINE response.encoding = 'gbk'NEWLINENEWLINE try:NEWLINE dfs = pd.read_html(response.text, header=1)NEWLINE except ValueError as error:NEWLINE self.logger.error(f'HTML parse error: {error}, response: {response.text}')NEWLINE continueNEWLINENEWLINE if len(dfs) < 4:NEWLINE continueNEWLINENEWLINE response_df = dfs[3].copy()NEWLINE response_df = response_df.dropna(axis=1, how='any')NEWLINE response_df['品种代码'] = response_df['品种代码'].apply(lambda x: f'{x:06d}')NEWLINENEWLINE index_id = f'index_sz_{etf_code}'NEWLINE response_df = response_df[['品种代码']]NEWLINENEWLINE response_df['id'] = response_df['品种代码'].apply(lambda code: f'{index_id}_{china_stock_code_to_id(code)}')NEWLINE response_df['stock_id'] = response_df['品种代码'].apply(lambda code: china_stock_code_to_id(code))NEWLINE response_df['index_id'] = index_idNEWLINE response_df.drop('品种代码', axis=1, inplace=True)NEWLINENEWLINE df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)NEWLINE self.logger.info(f'{etf["证券简称"]} - {etf_code} 成分股抓取完成...')NEWLINENEWLINE self.sleep()NEWLINENEWLINE @staticmethodNEWLINE def populate_sh_etf_type(df: pd.DataFrame):NEWLINE """NEWLINE 填充沪市 ETF 代码对应的 TYPE 到列表数据中NEWLINE :param df: ETF 列表数据NEWLINE :return: 包含 ETF 对应 TYPE 的列表数据NEWLINE """NEWLINE query_url = 'http://query.sse.com.cn/infodisplay/queryETFNewAllInfo.do?' \NEWLINE 'isPagination=false&type={}&pageHelp.pageSize=25'NEWLINENEWLINE type_df = pd.DataFrame()NEWLINE for etf_class in [1, 2]:NEWLINE url = query_url.format(etf_class)NEWLINE response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)NEWLINE response_dict = demjson.decode(response.text)NEWLINE response_df = pd.DataFrame(response_dict.get('result', []))NEWLINE response_df = response_df[['fundid1', 'etftype']]NEWLINENEWLINE type_df = pd.concat([type_df, response_df])NEWLINENEWLINE result_df = df.copy()NEWLINE result_df = result_df.sort_values(by='FUND_ID').reset_index(drop=True)NEWLINE type_df = type_df.sort_values(by='fundid1').reset_index(drop=True)NEWLINENEWLINE result_df['ETF_TYPE'] = type_df['etftype']NEWLINENEWLINE return result_dfNEWLINENEWLINE @staticmethodNEWLINE def parse_sz_etf_underlying_index(df: pd.DataFrame):NEWLINE """NEWLINE 解析深市 ETF 对应跟踪的指数代码NEWLINE :param df: ETF 列表数据NEWLINE :return: 解析完成 ETF 对应指数代码的列表数据NEWLINE """NEWLINE def parse_index(text):NEWLINE if len(text) == 0:NEWLINE return ''NEWLINENEWLINE result = re.search(r"(\d+).*", text)NEWLINE if result is None:NEWLINE return ''NEWLINE else:NEWLINE return result.group(1)NEWLINENEWLINE df['拟合指数'] = df['拟合指数'].apply(parse_index)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE spider = ChinaETFListSpider(provider=Provider.EXCHANGE)NEWLINE spider.run()NEWLINE |
from __future__ import divisionNEWLINENEWLINEimport numpy as npNEWLINEfrom skimage.util.dtype import dtype_rangeNEWLINEfrom skimage import drawNEWLINEfrom skimage import measureNEWLINENEWLINEfrom .plotplugin import PlotPluginNEWLINEfrom ..canvastools import ThickLineToolNEWLINENEWLINENEWLINE__all__ = ['LineProfile']NEWLINENEWLINENEWLINEclass LineProfile(PlotPlugin):NEWLINE """Plugin to compute interpolated intensity under a scan line on an image.NEWLINENEWLINE See PlotPlugin and Plugin classes for additional details.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE maxdist : floatNEWLINE Maximum pixel distance allowed when selecting end point of scan line.NEWLINE limits : tuple or {None, 'image', 'dtype'}NEWLINE (minimum, maximum) intensity limits for plotted profile. The followingNEWLINE special values are defined:NEWLINENEWLINE None : rescale based on min/max intensity along selected scan line.NEWLINE 'image' : fixed scale based on min/max intensity in image.NEWLINE 'dtype' : fixed scale based on min/max intensity of image dtype.NEWLINE """NEWLINE name = 'Line Profile'NEWLINENEWLINE def __init__(self, maxdist=10, epsilon='deprecated',NEWLINE limits='image', **kwargs):NEWLINE super(LineProfile, self).__init__(**kwargs)NEWLINE self.maxdist = maxdistNEWLINE self._limit_type = limitsNEWLINE print(self.help())NEWLINENEWLINE def attach(self, image_viewer):NEWLINE super(LineProfile, self).attach(image_viewer)NEWLINENEWLINE image = image_viewer.original_imageNEWLINENEWLINE if self._limit_type == 'image':NEWLINE self.limits = (np.min(image), np.max(image))NEWLINE elif self._limit_type == 'dtype':NEWLINE self._limit_type = dtype_range[image.dtype.type]NEWLINE elif self._limit_type is None or len(self._limit_type) == 2:NEWLINE self.limits = self._limit_typeNEWLINE else:NEWLINE raise ValueError("Unrecognized `limits`: %s" % self._limit_type)NEWLINENEWLINE if not self._limit_type is None:NEWLINE self.ax.set_ylim(self.limits)NEWLINENEWLINE h, w = image.shape[0:2]NEWLINE x = [w / 3, 2 * w / 3]NEWLINE y = [h / 2] * 2NEWLINENEWLINE self.line_tool = ThickLineTool(self.image_viewer.ax,NEWLINE maxdist=self.maxdist,NEWLINE on_move=self.line_changed,NEWLINE on_change=self.line_changed)NEWLINE self.line_tool.end_points = np.transpose([x, y])NEWLINENEWLINE scan_data = measure.profile_line(image, NEWLINE *self.line_tool.end_points[:, ::-1])NEWLINE self.scan_data = scan_dataNEWLINE if scan_data.ndim == 1:NEWLINE scan_data = scan_data[:, np.newaxis]NEWLINENEWLINE self.reset_axes(scan_data)NEWLINENEWLINE self._autoscale_view()NEWLINENEWLINE def help(self):NEWLINE helpstr = ("Line profile tool",NEWLINE "+ and - keys or mouse scroll changes width of scan line.",NEWLINE "Select and drag ends of the scan line to adjust it.")NEWLINE return '\n'.join(helpstr)NEWLINENEWLINE def get_profiles(self):NEWLINE """Return intensity profile of the selected line.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE end_points: (2, 2) arrayNEWLINE The positions ((x1, y1), (x2, y2)) of the line ends.NEWLINE profile: list of 1d arraysNEWLINE Profile of intensity values. Length 1 (grayscale) or 3 (rgb).NEWLINE """NEWLINE profiles = [data.get_ydata() for data in self.profile]NEWLINE return self.line_tool.end_points, profilesNEWLINENEWLINE def _autoscale_view(self):NEWLINE if self.limits is None:NEWLINE self.ax.autoscale_view(tight=True)NEWLINE else:NEWLINE self.ax.autoscale_view(scaley=False, tight=True)NEWLINENEWLINE def line_changed(self, end_points):NEWLINE x, y = np.transpose(end_points)NEWLINE self.line_tool.end_points = end_pointsNEWLINE scan = measure.profile_line(self.image_viewer.original_image,NEWLINE *end_points[:, ::-1],NEWLINE linewidth=self.line_tool.linewidth)NEWLINE self.scan_data = scanNEWLINE if scan.ndim == 1:NEWLINE scan = scan[:, np.newaxis]NEWLINENEWLINE if scan.shape[1] != len(self.profile):NEWLINE self.reset_axes(scan)NEWLINENEWLINE for i in range(len(scan[0])):NEWLINE self.profile[i].set_xdata(np.arange(scan.shape[0]))NEWLINE self.profile[i].set_ydata(scan[:, i])NEWLINENEWLINE self.ax.relim()NEWLINENEWLINE self._autoscale_view()NEWLINE self.redraw()NEWLINENEWLINE def reset_axes(self, scan_data):NEWLINE # Clear lines outNEWLINE for line in self.ax.lines:NEWLINE self.ax.lines = []NEWLINENEWLINE if scan_data.shape[1] == 1:NEWLINE self.profile = self.ax.plot(scan_data, 'k-')NEWLINE else:NEWLINE self.profile = self.ax.plot(scan_data[:, 0], 'r-',NEWLINE scan_data[:, 1], 'g-',NEWLINE scan_data[:, 2], 'b-')NEWLINENEWLINE def output(self):NEWLINE """Return the drawn line and the resulting scan.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE line_image : (M, N) uint8 array, same shape as imageNEWLINE An array of 0s with the scanned line set to 255.NEWLINE If the linewidth of the line tool is greater than 1,NEWLINE sets the values within the profiled polygon to 128.NEWLINE scan : (P,) or (P, 3) array of int or floatNEWLINE The line scan values across the image.NEWLINE """NEWLINE end_points = self.line_tool.end_pointsNEWLINE line_image = np.zeros(self.image_viewer.original_image.shape[:2],NEWLINE np.uint8)NEWLINE width = self.line_tool.linewidthNEWLINE if width > 1:NEWLINE rp, cp = measure.profile._line_profile_coordinates(NEWLINE *end_points[:, ::-1], linewidth=width)NEWLINE # the points are aliased, so create a polygon using the cornersNEWLINE yp = np.rint(rp[[0, 0, -1, -1],[0, -1, -1, 0]]).astype(int)NEWLINE xp = np.rint(cp[[0, 0, -1, -1],[0, -1, -1, 0]]).astype(int)NEWLINE rp, cp = draw.polygon(yp, xp, line_image.shape)NEWLINE line_image[rp, cp] = 128NEWLINE (x1, y1), (x2, y2) = end_points.astype(int)NEWLINE rr, cc = draw.line(y1, x1, y2, x2)NEWLINE line_image[rr, cc] = 255NEWLINE return line_image, self.scan_dataNEWLINENEWLINE |
# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright 2020 Google LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# https://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Accesses the google.cloud.irm.v1alpha2 IncidentService API."""NEWLINENEWLINEimport functoolsNEWLINEimport pkg_resourcesNEWLINEimport warningsNEWLINENEWLINEfrom google.oauth2 import service_accountNEWLINEimport google.api_core.client_optionsNEWLINEimport google.api_core.gapic_v1.client_infoNEWLINEimport google.api_core.gapic_v1.configNEWLINEimport google.api_core.gapic_v1.methodNEWLINEimport google.api_core.gapic_v1.routing_headerNEWLINEimport google.api_core.grpc_helpersNEWLINEimport google.api_core.page_iteratorNEWLINEimport google.api_core.path_templateNEWLINEimport google.api_core.protobuf_helpersNEWLINEimport grpcNEWLINENEWLINEfrom google.cloud.irm_v1alpha2.gapic import enumsNEWLINEfrom google.cloud.irm_v1alpha2.gapic import incident_service_client_configNEWLINEfrom google.cloud.irm_v1alpha2.gapic.transports import incident_service_grpc_transportNEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_pb2NEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_service_pb2NEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_service_pb2_grpcNEWLINEfrom google.protobuf import empty_pb2NEWLINEfrom google.protobuf import field_mask_pb2NEWLINENEWLINENEWLINE_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-irm",).versionNEWLINENEWLINENEWLINEclass IncidentServiceClient(object):NEWLINE """The Incident API for Incident Response & Management."""NEWLINENEWLINE SERVICE_ADDRESS = "irm.googleapis.com:443"NEWLINE """The default address of the service."""NEWLINENEWLINE # The name of the interface for this client. This is the key used toNEWLINE # find the method configuration in the client_config dictionary.NEWLINE _INTERFACE_NAME = "google.cloud.irm.v1alpha2.IncidentService"NEWLINENEWLINE @classmethodNEWLINE def from_service_account_file(cls, filename, *args, **kwargs):NEWLINE """Creates an instance of this client using the provided credentialsNEWLINE file.NEWLINENEWLINE Args:NEWLINE filename (str): The path to the service account private key jsonNEWLINE file.NEWLINE args: Additional arguments to pass to the constructor.NEWLINE kwargs: Additional arguments to pass to the constructor.NEWLINENEWLINE Returns:NEWLINE IncidentServiceClient: The constructed client.NEWLINE """NEWLINE credentials = service_account.Credentials.from_service_account_file(filename)NEWLINE kwargs["credentials"] = credentialsNEWLINE return cls(*args, **kwargs)NEWLINENEWLINE from_service_account_json = from_service_account_fileNEWLINENEWLINE @classmethodNEWLINE def annotation_path(cls, project, incident, annotation):NEWLINE """Return a fully-qualified annotation string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/annotations/{annotation}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE annotation=annotation,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def artifact_path(cls, project, incident, artifact):NEWLINE """Return a fully-qualified artifact string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/artifacts/{artifact}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE artifact=artifact,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def incident_path(cls, project, incident):NEWLINE """Return a fully-qualified incident string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def incident_role_assignment_path(cls, project_id_or_number, incident_id, role_id):NEWLINE """Return a fully-qualified incident_role_assignment string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}/role_assignments/{role_id}",NEWLINE project_id_or_number=project_id_or_number,NEWLINE incident_id=incident_id,NEWLINE role_id=role_id,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def project_path(cls, project):NEWLINE """Return a fully-qualified project string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}", project=project,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def signal_path(cls, project, signal):NEWLINE """Return a fully-qualified signal string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/signals/{signal}", project=project, signal=signal,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def subscription_path(cls, project, incident, subscription):NEWLINE """Return a fully-qualified subscription string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/subscriptions/{subscription}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE subscription=subscription,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def tag_path(cls, project, incident, tag):NEWLINE """Return a fully-qualified tag string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/tags/{tag}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE tag=tag,NEWLINE )NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE transport=None,NEWLINE channel=None,NEWLINE credentials=None,NEWLINE client_config=None,NEWLINE client_info=None,NEWLINE client_options=None,NEWLINE ):NEWLINE """Constructor.NEWLINENEWLINE Args:NEWLINE transport (Union[~.IncidentServiceGrpcTransport,NEWLINE Callable[[~.Credentials, type], ~.IncidentServiceGrpcTransport]): A transportNEWLINE instance, responsible for actually making the API calls.NEWLINE The default transport uses the gRPC protocol.NEWLINE This argument may also be a callable which returns aNEWLINE transport instance. Callables will be sent the credentialsNEWLINE as the first argument and the default transport class asNEWLINE the second argument.NEWLINE channel (grpc.Channel): DEPRECATED. A ``Channel`` instanceNEWLINE through which to make calls. This argument is mutually exclusiveNEWLINE with ``credentials``; providing both will raise an exception.NEWLINE credentials (google.auth.credentials.Credentials): TheNEWLINE authorization credentials to attach to requests. TheseNEWLINE credentials identify this application to the service. If noneNEWLINE are specified, the client will attempt to ascertain theNEWLINE credentials from the environment.NEWLINE This argument is mutually exclusive with providing aNEWLINE transport instance to ``transport``; doing so will raiseNEWLINE an exception.NEWLINE client_config (dict): DEPRECATED. A dictionary of call options forNEWLINE each method. If not specified, the default configuration is used.NEWLINE client_info (google.api_core.gapic_v1.client_info.ClientInfo):NEWLINE The client info used to send a user-agent string along withNEWLINE API requests. If ``None``, then default info will be used.NEWLINE Generally, you only need to set this if you're developingNEWLINE your own client library.NEWLINE client_options (Union[dict, google.api_core.client_options.ClientOptions]):NEWLINE Client options used to set user options on the client. API EndpointNEWLINE should be set through client_options.NEWLINE """NEWLINE # Raise deprecation warnings for things we want to go away.NEWLINE if client_config is not None:NEWLINE warnings.warn(NEWLINE "The `client_config` argument is deprecated.",NEWLINE PendingDeprecationWarning,NEWLINE stacklevel=2,NEWLINE )NEWLINE else:NEWLINE client_config = incident_service_client_config.configNEWLINENEWLINE if channel:NEWLINE warnings.warn(NEWLINE "The `channel` argument is deprecated; use " "`transport` instead.",NEWLINE PendingDeprecationWarning,NEWLINE stacklevel=2,NEWLINE )NEWLINENEWLINE api_endpoint = self.SERVICE_ADDRESSNEWLINE if client_options:NEWLINE if type(client_options) == dict:NEWLINE client_options = google.api_core.client_options.from_dict(NEWLINE client_optionsNEWLINE )NEWLINE if client_options.api_endpoint:NEWLINE api_endpoint = client_options.api_endpointNEWLINENEWLINE # Instantiate the transport.NEWLINE # The transport is responsible for handling serialization andNEWLINE # deserialization and actually sending data to the service.NEWLINE if transport:NEWLINE if callable(transport):NEWLINE self.transport = transport(NEWLINE credentials=credentials,NEWLINE default_class=incident_service_grpc_transport.IncidentServiceGrpcTransport,NEWLINE address=api_endpoint,NEWLINE )NEWLINE else:NEWLINE if credentials:NEWLINE raise ValueError(NEWLINE "Received both a transport instance and "NEWLINE "credentials; these are mutually exclusive."NEWLINE )NEWLINE self.transport = transportNEWLINE else:NEWLINE self.transport = incident_service_grpc_transport.IncidentServiceGrpcTransport(NEWLINE address=api_endpoint, channel=channel, credentials=credentials,NEWLINE )NEWLINENEWLINE if client_info is None:NEWLINE client_info = google.api_core.gapic_v1.client_info.ClientInfo(NEWLINE gapic_version=_GAPIC_LIBRARY_VERSION,NEWLINE )NEWLINE else:NEWLINE client_info.gapic_version = _GAPIC_LIBRARY_VERSIONNEWLINE self._client_info = client_infoNEWLINENEWLINE # Parse out the default settings for retry and timeout for each RPCNEWLINE # from the client configuration.NEWLINE # (Ordinarily, these are the defaults specified in the `*_config.py`NEWLINE # file next to this one.)NEWLINE self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(NEWLINE client_config["interfaces"][self._INTERFACE_NAME],NEWLINE )NEWLINENEWLINE # Save a dictionary of cached API call functions.NEWLINE # These are the actual callables which invoke the properNEWLINE # transport methods, wrapped with `wrap_method` to add retry,NEWLINE # timeout, and the like.NEWLINE self._inner_api_calls = {}NEWLINENEWLINE # Service callsNEWLINE def delete_artifact(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.artifact_path('[PROJECT]', '[INCIDENT]', '[ARTIFACT]')NEWLINE >>>NEWLINE >>> client.delete_artifact(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the artifact.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_artifact,NEWLINE default_retry=self._method_configs["DeleteArtifact"].retry,NEWLINE default_timeout=self._method_configs["DeleteArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteArtifactRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def request_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Starts a role handover. The proposed assignee will receive an emailNEWLINE notifying them of the assignment. This will fail if a role handover isNEWLINE already pending.NEWLINE Handover to an oncall ladder is not permitted. UseNEWLINE CreateIncidentRoleAssignment instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `name`:NEWLINE >>> name = ''NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.request_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "request_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "request_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.request_incident_role_handover,NEWLINE default_retry=self._method_configs["RequestIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "RequestIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.RequestIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["request_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def confirm_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Confirms a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request. If the caller is not the new_assignee,NEWLINE ForceIncidentRoleHandover should be used instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.confirm_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee, who will now be the assignee. This should be theNEWLINE current user; otherwise ForceRoleHandover should be called.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "confirm_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "confirm_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.confirm_incident_role_handover,NEWLINE default_retry=self._method_configs["ConfirmIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ConfirmIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ConfirmIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["confirm_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def force_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Forces a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request. If the caller is the new_assignee,NEWLINE ConfirmIncidentRoleHandover should be used instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.force_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee, who will now be the assignee. This should not beNEWLINE the current user; otherwise ConfirmRoleHandover should be called.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "force_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "force_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.force_incident_role_handover,NEWLINE default_retry=self._method_configs["ForceIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ForceIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ForceIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["force_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_incident(NEWLINE self,NEWLINE incident,NEWLINE parent,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> response = client.create_incident(incident, parent)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project whichNEWLINE the incident belongs to. The name is of the formNEWLINE ``projects/{project_id_or_number}`` .NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_incident,NEWLINE default_retry=self._method_configs["CreateIncident"].retry,NEWLINE default_timeout=self._method_configs["CreateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateIncidentRequest(NEWLINE incident=incident, parent=parent,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def get_incident(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns an incident by name.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> response = client.get_incident(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "get_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "get_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.get_incident,NEWLINE default_retry=self._method_configs["GetIncident"].retry,NEWLINE default_timeout=self._method_configs["GetIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.GetIncidentRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["get_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_incidents(NEWLINE self,NEWLINE parent,NEWLINE query=None,NEWLINE page_size=None,NEWLINE time_zone=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of incidents.NEWLINE Incidents are ordered by start time, with the most recent incidents first.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_incidents(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_incidents(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE incidents belong to.NEWLINE query (str): An expression that defines which incidents to return.NEWLINENEWLINE Search atoms can be used to match certain specific fields. Otherwise,NEWLINE plain text will match text fields in the incident.NEWLINENEWLINE Search atoms:NEWLINENEWLINE - ``start`` - (timestamp) The time the incident started.NEWLINE - ``stage`` - The stage of the incident, one of detected, triaged,NEWLINE mitigated, resolved, documented, or duplicate (which correspond toNEWLINE values in the Incident.Stage enum). These are ordered, soNEWLINE ``stage<resolved`` is equivalent toNEWLINE ``stage:detected OR stage:triaged OR stage:mitigated``.NEWLINE - ``severity`` - (Incident.Severity) The severity of the incident.NEWLINENEWLINE - Supports matching on a specific severity (for example,NEWLINE ``severity:major``) or on a range (for example,NEWLINE ``severity>medium``, ``severity<=minor``, etc.).NEWLINENEWLINE Timestamp formats:NEWLINENEWLINE - yyyy-MM-dd - an absolute date, treated as a calendar-day-wide window.NEWLINE In other words, the "<" operator will match dates before that date,NEWLINE the ">" operator will match dates after that date, and the ":" or "="NEWLINE operators will match the entire day.NEWLINE - Nd (for example, 7d) - a relative number of days ago, treated as aNEWLINE moment in time (as opposed to a day-wide span). A multiple of 24NEWLINE hours ago (as opposed to calendar days). In the case of daylightNEWLINE savings time, it will apply the current timezone to both ends of theNEWLINE range. Note that exact matching (for example, ``start:7d``) isNEWLINE unlikely to be useful because that would only match incidents createdNEWLINE precisely at a particular instant in time.NEWLINENEWLINE Examples:NEWLINENEWLINE - ``foo`` - matches incidents containing the word "foo"NEWLINE - ``"foo bar"`` - matches incidents containing the phrase "foo bar"NEWLINE - ``foo bar`` or ``foo AND bar`` - matches incidents containing theNEWLINE words "foo" and "bar"NEWLINE - ``foo -bar`` or ``foo AND NOT bar`` - matches incidents containingNEWLINE the word "foo" but not the word "bar"NEWLINE - ``foo OR bar`` - matches incidents containing the word "foo" or theNEWLINE word "bar"NEWLINE - ``start>2018-11-28`` - matches incidents which started after NovemberNEWLINE 11, 2018.NEWLINE - ``start<=2018-11-28`` - matches incidents which started on or beforeNEWLINE November 11, 2018.NEWLINE - ``start:2018-11-28`` - matches incidents which started on NovemberNEWLINE 11, 2018.NEWLINE - ``start>7d`` - matches incidents which started after the point inNEWLINE time 7*24 hours agoNEWLINE - ``start>180d`` - similar to 7d, but likely to cross the daylightNEWLINE savings time boundary, so the end time will be 1 hour different fromNEWLINE "now."NEWLINE - ``foo AND start>90d AND stage<resolved`` - unresolved incidents fromNEWLINE the past 90 days containing the word "foo"NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE time_zone (str): The time zone name. It should be an IANA TZ name, such asNEWLINE "America/Los_Angeles". For more information, seeNEWLINE https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. If no timeNEWLINE zone is specified, the default is UTC.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Incident` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_incidents" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_incidents"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_incidents,NEWLINE default_retry=self._method_configs["SearchIncidents"].retry,NEWLINE default_timeout=self._method_configs["SearchIncidents"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchIncidentsRequest(NEWLINE parent=parent, query=query, page_size=page_size, time_zone=time_zone,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_incidents"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="incidents",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def update_incident(NEWLINE self,NEWLINE incident,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>>NEWLINE >>> response = client.update_incident(incident)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_incident,NEWLINE default_retry=self._method_configs["UpdateIncident"].retry,NEWLINE default_timeout=self._method_configs["UpdateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateIncidentRequest(NEWLINE incident=incident, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("incident.name", incident.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_similar_incidents(NEWLINE self,NEWLINE name,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of incidents that are "similar" to the specified incidentNEWLINE or signal. This functionality is provided on a best-effort basis and theNEWLINE definition of "similar" is subject to change.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_similar_incidents(name):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_similar_incidents(name).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the incident or signal, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Result` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_similar_incidents" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_similar_incidents"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_similar_incidents,NEWLINE default_retry=self._method_configs["SearchSimilarIncidents"].retry,NEWLINE default_timeout=self._method_configs["SearchSimilarIncidents"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchSimilarIncidentsRequest(NEWLINE name=name, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_similar_incidents"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="results",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_annotation(NEWLINE self,NEWLINE parent,NEWLINE annotation,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates an annotation on an existing incident. Only 'text/plain' andNEWLINE 'text/markdown' annotations can be created via this method.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `annotation`:NEWLINE >>> annotation = {}NEWLINE >>>NEWLINE >>> response = client.create_annotation(parent, annotation)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE annotation (Union[dict, ~google.cloud.irm_v1alpha2.types.Annotation]): Required. Only annotation.content is an input argument.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Annotation`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Annotation` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_annotation" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_annotation"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_annotation,NEWLINE default_retry=self._method_configs["CreateAnnotation"].retry,NEWLINE default_timeout=self._method_configs["CreateAnnotation"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateAnnotationRequest(NEWLINE parent=parent, annotation=annotation,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_annotation"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_annotations(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists annotations that are part of an incident. No assumptions should beNEWLINE made on the content-type of the annotation returned.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_annotations(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_annotations(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Annotation` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_annotations" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_annotations"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_annotations,NEWLINE default_retry=self._method_configs["ListAnnotations"].retry,NEWLINE default_timeout=self._method_configs["ListAnnotations"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListAnnotationsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_annotations"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="annotations",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_tag(NEWLINE self,NEWLINE parent,NEWLINE tag,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a tag on an existing incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `tag`:NEWLINE >>> tag = {}NEWLINE >>>NEWLINE >>> response = client.create_tag(parent, tag)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE tag (Union[dict, ~google.cloud.irm_v1alpha2.types.Tag]): Required. Tag to create. Only tag.display_name is an input argument.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Tag`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Tag` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_tag" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_tag"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_tag,NEWLINE default_retry=self._method_configs["CreateTag"].retry,NEWLINE default_timeout=self._method_configs["CreateTag"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateTagRequest(parent=parent, tag=tag,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_tag"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def delete_tag(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing tag.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.tag_path('[PROJECT]', '[INCIDENT]', '[TAG]')NEWLINE >>>NEWLINE >>> client.delete_tag(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the tag.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_tag" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_tag"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_tag,NEWLINE default_retry=self._method_configs["DeleteTag"].retry,NEWLINE default_timeout=self._method_configs["DeleteTag"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteTagRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_tag"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_tags(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists tags that are part of an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_tags(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_tags(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Tag` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_tags" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_tags"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_tags,NEWLINE default_retry=self._method_configs["ListTags"].retry,NEWLINE default_timeout=self._method_configs["ListTags"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListTagsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_tags"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="tags",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_signal(NEWLINE self,NEWLINE parent,NEWLINE signal,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new signal.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `signal`:NEWLINE >>> signal = {}NEWLINE >>>NEWLINE >>> response = client.create_signal(parent, signal)NEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE signal belongs to.NEWLINE signal (Union[dict, ~google.cloud.irm_v1alpha2.types.Signal]): Required. The signal to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Signal`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_signal,NEWLINE default_retry=self._method_configs["CreateSignal"].retry,NEWLINE default_timeout=self._method_configs["CreateSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateSignalRequest(NEWLINE parent=parent, signal=signal,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_signals(NEWLINE self,NEWLINE parent,NEWLINE query=None,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists signals that are part of an incident.NEWLINE Signals are returned in reverse chronological order.NEWLINE Note that search should not be relied on for critical functionality. ItNEWLINE has lower availability guarantees and might fail to return valid results.NEWLINE Returned results might include stale or extraneous entries.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_signals(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_signals(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE incidents belong to.NEWLINE query (str): An expression that defines which signals to return.NEWLINENEWLINE Search atoms can be used to match certain specific fields. Otherwise,NEWLINE plain text will match text fields in the signal.NEWLINENEWLINE Search atoms:NEWLINENEWLINE - ``start`` - (timestamp) The time the signal was created.NEWLINE - ``title`` - The title of the signal.NEWLINE - ``signal_state`` - ``open`` or ``closed``. State of the signal.NEWLINE (e.g., ``signal_state:open``)NEWLINENEWLINE Timestamp formats:NEWLINENEWLINE - yyyy-MM-dd - an absolute date, treated as a calendar-day-wide window.NEWLINE In other words, the "<" operator will match dates before that date,NEWLINE the ">" operator will match dates after that date, and the ":"NEWLINE operator will match the entire day.NEWLINE - yyyy-MM-ddTHH:mm - Same as above, but with minute resolution.NEWLINE - yyyy-MM-ddTHH:mm:ss - Same as above, but with second resolution.NEWLINE - Nd (e.g. 7d) - a relative number of days ago, treated as a moment inNEWLINE time (as opposed to a day-wide span) a multiple of 24 hours ago (asNEWLINE opposed to calendar days). In the case of daylight savings time, itNEWLINE will apply the current timezone to both ends of the range. Note thatNEWLINE exact matching (e.g. ``start:7d``) is unlikely to be useful becauseNEWLINE that would only match signals created precisely at a particularNEWLINE instant in time.NEWLINENEWLINE The absolute timestamp formats (everything starting with a year) canNEWLINE optionally be followed with a UTC offset in +/-hh:mm format. Also, theNEWLINE 'T' separating dates and times can optionally be replaced with a space.NEWLINE Note that any timestamp containing a space or colon will need to beNEWLINE quoted.NEWLINENEWLINE Examples:NEWLINENEWLINE - ``foo`` - matches signals containing the word "foo"NEWLINE - ``"foo bar"`` - matches signals containing the phrase "foo bar"NEWLINE - ``foo bar`` or ``foo AND bar`` - matches signals containing the wordsNEWLINE "foo" and "bar"NEWLINE - ``foo -bar`` or ``foo AND NOT bar`` - matches signals containing theNEWLINE word "foo" but not the word "bar"NEWLINE - ``foo OR bar`` - matches signals containing the word "foo" or theNEWLINE word "bar"NEWLINE - ``start>2018-11-28`` - matches signals which started after NovemberNEWLINE 11, 2018.NEWLINE - ``start<=2018-11-28`` - matches signals which started on or beforeNEWLINE November 11, 2018.NEWLINE - ``start:2018-11-28`` - matches signals which started on November 11,NEWLINE 2018.NEWLINE - ``start>"2018-11-28 01:02:03+04:00"`` - matches signals which startedNEWLINE after November 11, 2018 at 1:02:03 AM according to the UTC+04 timeNEWLINE zone.NEWLINE - ``start>7d`` - matches signals which started after the point in timeNEWLINE 7*24 hours agoNEWLINE - ``start>180d`` - similar to 7d, but likely to cross the daylightNEWLINE savings time boundary, so the end time will be 1 hour different fromNEWLINE "now."NEWLINE - ``foo AND start>90d AND stage<resolved`` - unresolved signals fromNEWLINE the past 90 days containing the word "foo"NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Signal` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_signals" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_signals"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_signals,NEWLINE default_retry=self._method_configs["SearchSignals"].retry,NEWLINE default_timeout=self._method_configs["SearchSignals"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchSignalsRequest(NEWLINE parent=parent, query=query, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_signals"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="signals",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def lookup_signal(NEWLINE self,NEWLINE cscc_finding=None,NEWLINE stackdriver_notification_id=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Finds a signal by other unique IDs.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> response = client.lookup_signal()NEWLINENEWLINE Args:NEWLINE cscc_finding (str): Required. Full resource name of the CSCC finding id this signal refers to (e.g.NEWLINE "organizations/abc/sources/123/findings/xyz")NEWLINE stackdriver_notification_id (str): The ID from the Stackdriver Alerting notification.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "lookup_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "lookup_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.lookup_signal,NEWLINE default_retry=self._method_configs["LookupSignal"].retry,NEWLINE default_timeout=self._method_configs["LookupSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE # Sanity check: We have some fields which are mutually exclusive;NEWLINE # raise ValueError if more than one is sent.NEWLINE google.api_core.protobuf_helpers.check_oneof(NEWLINE cscc_finding=cscc_finding,NEWLINE stackdriver_notification_id=stackdriver_notification_id,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.LookupSignalRequest(NEWLINE cscc_finding=cscc_finding,NEWLINE stackdriver_notification_id=stackdriver_notification_id,NEWLINE )NEWLINE return self._inner_api_calls["lookup_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def get_signal(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a signal by name.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.signal_path('[PROJECT]', '[SIGNAL]')NEWLINE >>>NEWLINE >>> response = client.get_signal(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the Signal resource, for example,NEWLINE "projects/{project_id_or_number}/signals/{signal_id}".NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "get_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "get_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.get_signal,NEWLINE default_retry=self._method_configs["GetSignal"].retry,NEWLINE default_timeout=self._method_configs["GetSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.GetSignalRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["get_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def update_signal(NEWLINE self,NEWLINE signal,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing signal (for example, to assign/unassign it to anNEWLINE incident).NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `signal`:NEWLINE >>> signal = {}NEWLINE >>>NEWLINE >>> response = client.update_signal(signal)NEWLINENEWLINE Args:NEWLINE signal (Union[dict, ~google.cloud.irm_v1alpha2.types.Signal]): Required. The signal to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Signal`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_signal,NEWLINE default_retry=self._method_configs["UpdateSignal"].retry,NEWLINE default_timeout=self._method_configs["UpdateSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateSignalRequest(NEWLINE signal=signal, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("signal.name", signal.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def escalate_incident(NEWLINE self,NEWLINE incident,NEWLINE update_mask=None,NEWLINE subscriptions=None,NEWLINE tags=None,NEWLINE roles=None,NEWLINE artifacts=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Escalates an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>>NEWLINE >>> response = client.escalate_incident(incident)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to escalate with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE subscriptions (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]]): Subscriptions to add or update. Existing subscriptions with the sameNEWLINE channel and address as a subscription in the list will be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE tags (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Tag]]): Tags to add. Tags identical to existing tags will be ignored.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Tag`NEWLINE roles (list[Union[dict, ~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment]]): Roles to add or update. Existing roles with the same type (andNEWLINE title, for TYPE_OTHER roles) will be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment`NEWLINE artifacts (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]]): Artifacts to add. All artifacts are added without checking for duplicates.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.EscalateIncidentResponse` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "escalate_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "escalate_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.escalate_incident,NEWLINE default_retry=self._method_configs["EscalateIncident"].retry,NEWLINE default_timeout=self._method_configs["EscalateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.EscalateIncidentRequest(NEWLINE incident=incident,NEWLINE update_mask=update_mask,NEWLINE subscriptions=subscriptions,NEWLINE tags=tags,NEWLINE roles=roles,NEWLINE artifacts=artifacts,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("incident.name", incident.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["escalate_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_artifact(NEWLINE self,NEWLINE parent,NEWLINE artifact,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `artifact`:NEWLINE >>> artifact = {}NEWLINE >>>NEWLINE >>> response = client.create_artifact(parent, artifact)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE artifact (Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]): Required. The artifact to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Artifact` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_artifact,NEWLINE default_retry=self._method_configs["CreateArtifact"].retry,NEWLINE default_timeout=self._method_configs["CreateArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateArtifactRequest(NEWLINE parent=parent, artifact=artifact,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_artifacts(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of artifacts for an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_artifacts(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_artifacts(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Artifact` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_artifacts" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_artifacts"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_artifacts,NEWLINE default_retry=self._method_configs["ListArtifacts"].retry,NEWLINE default_timeout=self._method_configs["ListArtifacts"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListArtifactsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_artifacts"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="artifacts",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def update_artifact(NEWLINE self,NEWLINE artifact,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `artifact`:NEWLINE >>> artifact = {}NEWLINE >>>NEWLINE >>> response = client.update_artifact(artifact)NEWLINENEWLINE Args:NEWLINE artifact (Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]): Required. The artifact to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Artifact` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_artifact,NEWLINE default_retry=self._method_configs["UpdateArtifact"].retry,NEWLINE default_timeout=self._method_configs["UpdateArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateArtifactRequest(NEWLINE artifact=artifact, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("artifact.name", artifact.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def send_shift_handoff(NEWLINE self,NEWLINE parent,NEWLINE recipients,NEWLINE subject,NEWLINE cc=None,NEWLINE notes_content_type=None,NEWLINE notes_content=None,NEWLINE incidents=None,NEWLINE preview_only=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Sends a summary of the shift for oncall handoff.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `recipients`:NEWLINE >>> recipients = []NEWLINE >>>NEWLINE >>> # TODO: Initialize `subject`:NEWLINE >>> subject = ''NEWLINE >>>NEWLINE >>> response = client.send_shift_handoff(parent, recipients, subject)NEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the Stackdriver project that theNEWLINE handoff is being sent from. for example,NEWLINE ``projects/{project_id_or_number}``NEWLINE recipients (list[str]): Required. Email addresses of the recipients of the handoff, for example,NEWLINE "user@example.com". Must contain at least one entry.NEWLINE subject (str): Required. The subject of the email.NEWLINE cc (list[str]): Optional. Email addresses that should be CC'd on the handoff.NEWLINE notes_content_type (str): Content type string, for example, 'text/plain' or 'text/html'.NEWLINE notes_content (str): Optional. Additional notes to be included in the handoff.NEWLINE incidents (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]]): Optional. The set of incidents that should be included in the handoff.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE preview_only (bool): If set to true a ShiftHandoffResponse will be returned but the handoffNEWLINE will not actually be sent.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.SendShiftHandoffResponse` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "send_shift_handoff" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "send_shift_handoff"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.send_shift_handoff,NEWLINE default_retry=self._method_configs["SendShiftHandoff"].retry,NEWLINE default_timeout=self._method_configs["SendShiftHandoff"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SendShiftHandoffRequest(NEWLINE parent=parent,NEWLINE recipients=recipients,NEWLINE subject=subject,NEWLINE cc=cc,NEWLINE notes_content_type=notes_content_type,NEWLINE notes_content=notes_content,NEWLINE incidents=incidents,NEWLINE preview_only=preview_only,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["send_shift_handoff"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_subscription(NEWLINE self,NEWLINE parent,NEWLINE subscription,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new subscription.NEWLINE This will fail if:NEWLINE a. there are too many (50) subscriptions in the incident alreadyNEWLINE b. a subscription using the given channel already existsNEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `subscription`:NEWLINE >>> subscription = {}NEWLINE >>>NEWLINE >>> response = client.create_subscription(parent, subscription)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE subscription (Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]): Required. The subscription to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Subscription` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_subscription,NEWLINE default_retry=self._method_configs["CreateSubscription"].retry,NEWLINE default_timeout=self._method_configs["CreateSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateSubscriptionRequest(NEWLINE parent=parent, subscription=subscription,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def update_subscription(NEWLINE self,NEWLINE subscription,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates a subscription.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `subscription`:NEWLINE >>> subscription = {}NEWLINE >>>NEWLINE >>> response = client.update_subscription(subscription)NEWLINENEWLINE Args:NEWLINE subscription (Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]): Required. The subscription to update, with new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Subscription` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_subscription,NEWLINE default_retry=self._method_configs["UpdateSubscription"].retry,NEWLINE default_timeout=self._method_configs["UpdateSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateSubscriptionRequest(NEWLINE subscription=subscription, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("subscription.name", subscription.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_subscriptions(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of subscriptions for an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_subscriptions(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_subscriptions(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Subscription` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_subscriptions" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_subscriptions"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_subscriptions,NEWLINE default_retry=self._method_configs["ListSubscriptions"].retry,NEWLINE default_timeout=self._method_configs["ListSubscriptions"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListSubscriptionsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_subscriptions"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="subscriptions",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def delete_subscription(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing subscription.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.subscription_path('[PROJECT]', '[INCIDENT]', '[SUBSCRIPTION]')NEWLINE >>>NEWLINE >>> client.delete_subscription(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the subscription.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_subscription,NEWLINE default_retry=self._method_configs["DeleteSubscription"].retry,NEWLINE default_timeout=self._method_configs["DeleteSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteSubscriptionRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_incident_role_assignment(NEWLINE self,NEWLINE parent,NEWLINE incident_role_assignment,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a role assignment on an existing incident. Normally, the user fieldNEWLINE will be set when assigning a role to oneself, and the next field will beNEWLINE set when proposing another user as the assignee. Setting the next fieldNEWLINE directly to a user other than oneself is equivalent to proposing andNEWLINE force-assigning the role to the user.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident_role_assignment`:NEWLINE >>> incident_role_assignment = {}NEWLINE >>>NEWLINE >>> response = client.create_incident_role_assignment(parent, incident_role_assignment)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE incident_role_assignment (Union[dict, ~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment]): Required. Role assignment to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_incident_role_assignment" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_incident_role_assignment"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_incident_role_assignment,NEWLINE default_retry=self._method_configs[NEWLINE "CreateIncidentRoleAssignment"NEWLINE ].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "CreateIncidentRoleAssignment"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateIncidentRoleAssignmentRequest(NEWLINE parent=parent, incident_role_assignment=incident_role_assignment,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_incident_role_assignment"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def delete_incident_role_assignment(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing role assignment.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> client.delete_incident_role_assignment(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_incident_role_assignment" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_incident_role_assignment"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_incident_role_assignment,NEWLINE default_retry=self._method_configs[NEWLINE "DeleteIncidentRoleAssignment"NEWLINE ].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "DeleteIncidentRoleAssignment"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteIncidentRoleAssignmentRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_incident_role_assignment"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_incident_role_assignments(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists role assignments that are part of an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_incident_role_assignments(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_incident_role_assignments(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_incident_role_assignments" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_incident_role_assignments"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_incident_role_assignments,NEWLINE default_retry=self._method_configs["ListIncidentRoleAssignments"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ListIncidentRoleAssignments"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListIncidentRoleAssignmentsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_incident_role_assignments"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="incident_role_assignments",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def cancel_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Cancels a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.cancel_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. Person who was proposed as the next assignee (i.e.NEWLINE IncidentRoleAssignment.proposed_assignee) and whose proposal is beingNEWLINE cancelled.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "cancel_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "cancel_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.cancel_incident_role_handover,NEWLINE default_retry=self._method_configs["CancelIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "CancelIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CancelIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["cancel_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINE |
import osNEWLINEimport matplotlib.pyplot as pltNEWLINEimport numpy as npNEWLINEimport subprocessNEWLINEfrom tempfile import TemporaryDirectoryNEWLINENEWLINESEMIHDP_HOME_DIR = os.path.dirname(os.path.realpath(__file__))NEWLINESEMIHDP_EXEC = os.path.join(SEMIHDP_HOME_DIR, 'build/run_from_file')NEWLINEBASE_CMD = SEMIHDP_EXEC + ' ' + "{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11}"NEWLINEPARAMS_FILE = os.path.join(SEMIHDP_HOME_DIR, 'semihdp_params.asciipb')NEWLINENEWLINENEWLINEdef run_mcmc_from_files(data_path, dens_grid_path, output_path, seed,NEWLINE niter, nburn, thin, update_c="full"):NEWLINE chainfile = os.path.join(output_path, "chains.recordio")NEWLINE c_file = os.path.join(output_path, "c.txt")NEWLINE latent_vars_file = os.path.join(output_path, "latent_vars.txt")NEWLINE dens_path = os.path.join(output_path, "dens/")NEWLINE os.makedirs(dens_path)NEWLINE cmd = BASE_CMD.format(NEWLINE data_path, PARAMS_FILE, chainfile, c_file,NEWLINE latent_vars_file, dens_grid_path, NEWLINE dens_path, seed, niter, nburn, thin, update_c)NEWLINE NEWLINE cmd = cmd.split(" ")NEWLINE subprocess.call(cmd, cwd=SEMIHDP_HOME_DIR)NEWLINE return NEWLINENEWLINENEWLINEdef load_output(output_path, ngroups):NEWLINE c_file = os.path.join(output_path, "c.txt")NEWLINE latent_vars_file = os.path.join(output_path, "latent_vars.txt")NEWLINE dens_path = os.path.join(output_path, "dens/")NEWLINENEWLINE c = np.loadtxt(c_file, delimiter=",")NEWLINE latent_vars = np.loadtxt(latent_vars_file, delimiter=",")NEWLINE log_dens = []NEWLINE for i in range(ngroups):NEWLINE fname = os.path.join(dens_path, "group_{0}.csv".format(i))NEWLINE log_dens.append(np.loadtxt(fname, delimiter=","))NEWLINE return c, latent_vars, log_dens NEWLINENEWLINENEWLINEdef run_mcmc(data: list, dens_grid: np.array, seed: int,NEWLINE niter=1000, nburn=1000, thin=10, update_c="full"):NEWLINE """NEWLINE Runs the semihpd sampler by calling the executable from a subprocess.NEWLINE ArgumentsNEWLINE ---------NEWLINE data: list of np.arrays, each entry is the data in one of the groupsNEWLINE dens_grid: np.array, the grid on which to evaluate the density of all the groupsNEWLINE seed: int, the seed for the random number generatorNEWLINE niter: int, number of iterations to run the samplerNEWLINE nburn: int, number of burn-in iterations NEWLINE thin: int, thinning factorNEWLINE update_c: str, either "full", "metro_base" or "metro_dist". NEWLINE The update rule for the restourants allocationsNEWLINENEWLINE The sampler will be ran for niter + nburn iterations.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE rest_allocs: np.array, of dimension [num_iter, num_groups]NEWLINE The parameters c_i's for each iterationNEWLINE latent_vars: np.array, of dimension [num_iter, 4] the colums areNEWLINE group_id, datum_id, mean (resp. variance) of the latent variable NEWLINE associated to the observationNEWLINE log_dens: list of np.arrays, each entry is the evaluation of log_density of NEWLINE one of the groups in each of the mcmc iterationsNEWLINE """NEWLINENEWLINE ngroups = len(data)NEWLINE data_ = np.vstack([NEWLINE np.column_stack([np.ones_like(x) * i, x]) for i, x in enumerate(data)])NEWLINE with TemporaryDirectory(prefix=SEMIHDP_HOME_DIR+"/") as tmpdir:NEWLINE data_path = os.path.join(tmpdir, 'data.txt')NEWLINE np.savetxt(data_path, data_, delimiter=",")NEWLINE grid_path = os.path.join(tmpdir, 'grid.txt')NEWLINE np.savetxt(grid_path, dens_grid, delimiter=",")NEWLINENEWLINE run_mcmc_from_files(data_path, grid_path, tmpdir, niter, NEWLINE nburn, thin, update_c)NEWLINENEWLINE return load_output(tmpdir, ngroups)NEWLINENEWLINE NEWLINEif __name__ == "__main__":NEWLINE seed = 132312NEWLINE data = [np.random.normal(0, 1, size=100),NEWLINE np.random.normal(0, 1, size=100)]NEWLINE dens_grid = np.linspace(-5, 5, 100)NEWLINE c, latent_vars, log_dens = run_mcmc(data, dens_grid, seed)NEWLINE plt.plot(dens_grid, np.exp(np.mean(log_dens[0], axis=0)))NEWLINE plt.show()NEWLINENEWLINE |
#!/usr/bin/env python3NEWLINENEWLINE"""NEWLINEIf we list all the natural numbers below 10 that are multiples of 3 or 5,NEWLINEwe get 3, 5, 6 and 9. The sum of these multiples is 23.NEWLINENEWLINEFind the sum of all the multiples of 3 or 5 below 1000.NEWLINENEWLINEhttps://projecteuler.net/problem=1NEWLINENEWLINE"""NEWLINENEWLINEinput = [x for x in range(0, 10)]NEWLINEtest = [x for x in range(0, 1000)]NEWLINENEWLINENEWLINEdef multiple3or5(input):NEWLINE if not input:NEWLINE return 0NEWLINE else:NEWLINE agg = 0NEWLINE for digit in input:NEWLINE print("digit {0}".format(digit))NEWLINE if digit % 3 == 0 or digit % 5 == 0:NEWLINE agg += digitNEWLINE print("agg: {0}".format(agg))NEWLINE else:NEWLINE continueNEWLINE return aggNEWLINENEWLINENEWLINEdef sum_of_sequence(n):NEWLINE """NEWLINE Using N(N+1)/ 2NEWLINE """NEWLINE if not input:NEWLINE return 0NEWLINE else:NEWLINE return (n*(n+1))/2NEWLINENEWLINENEWLINEprint(multiple3or5(input))NEWLINEprint(multiple3or5(test))NEWLINENEWLINEprint("Using Arithmetic")NEWLINEprint(int(3*sum_of_sequence(999//3) + 5*sum_of_sequence(999//5) -NEWLINE 15*sum_of_sequence(999//15)))NEWLINE |
import osNEWLINEimport pickleNEWLINEimport uuidNEWLINENEWLINEimport dagstermillNEWLINEfrom dagstermill.io_managers import local_output_notebook_io_managerNEWLINENEWLINEfrom dagster import (NEWLINE Field,NEWLINE FileHandle,NEWLINE InputDefinition,NEWLINE Int,NEWLINE List,NEWLINE ModeDefinition,NEWLINE OutputDefinition,NEWLINE ResourceDefinition,NEWLINE String,NEWLINE composite_solid,NEWLINE fs_io_manager,NEWLINE job,NEWLINE pipeline,NEWLINE repository,NEWLINE resource,NEWLINE solid,NEWLINE)NEWLINEfrom dagster.core.storage.file_manager import local_file_managerNEWLINEfrom dagster.utils import PICKLE_PROTOCOL, file_relative_pathNEWLINENEWLINEtry:NEWLINE from dagster_pandas import DataFrameNEWLINENEWLINE DAGSTER_PANDAS_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE DAGSTER_PANDAS_PRESENT = FalseNEWLINENEWLINEtry:NEWLINE import sklearn as _NEWLINENEWLINE SKLEARN_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE SKLEARN_PRESENT = FalseNEWLINENEWLINEtry:NEWLINE import matplotlib as _NEWLINENEWLINE MATPLOTLIB_PRESENT = TrueNEWLINEexcept ImportError:NEWLINE MATPLOTLIB_PRESENT = FalseNEWLINENEWLINENEWLINEclass BasicTest:NEWLINE def __init__(self, x):NEWLINE self.x = xNEWLINENEWLINE def __repr__(self):NEWLINE return "BasicTest: {x}".format(x=str(self.x))NEWLINENEWLINENEWLINEdef nb_test_path(name):NEWLINE return file_relative_path(__file__, f"notebooks/{name}.ipynb")NEWLINENEWLINENEWLINEdef test_nb_solid(name, **kwargs):NEWLINE output_defs = kwargs.pop("output_defs", [OutputDefinition(is_required=False)])NEWLINENEWLINE return dagstermill.define_dagstermill_solid(NEWLINE name=name,NEWLINE notebook_path=nb_test_path(name),NEWLINE output_notebook_name="notebook",NEWLINE output_defs=output_defs,NEWLINE **kwargs,NEWLINE )NEWLINENEWLINENEWLINEdef test_nb_op(name, path, **kwargs):NEWLINE output_defs = kwargs.pop("output_defs", [OutputDefinition(is_required=False)])NEWLINENEWLINE return dagstermill.define_dagstermill_op(NEWLINE name=name,NEWLINE notebook_path=path,NEWLINE output_notebook_name="notebook",NEWLINE output_defs=output_defs,NEWLINE **kwargs,NEWLINE )NEWLINENEWLINENEWLINEdefault_mode_defs = [NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE }NEWLINE )NEWLINE]NEWLINENEWLINENEWLINEhello_world = test_nb_solid("hello_world", output_defs=[])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_pipeline():NEWLINE hello_world()NEWLINENEWLINENEWLINEhello_world_op = test_nb_op(NEWLINE "hello_world_op",NEWLINE nb_test_path("hello_world"),NEWLINE output_defs=[],NEWLINE)NEWLINENEWLINENEWLINEdef build_hello_world_job():NEWLINE @job(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE def hello_world_job():NEWLINE hello_world_op()NEWLINENEWLINE return hello_world_jobNEWLINENEWLINENEWLINEhello_world_with_custom_tags_and_description = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_custom",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE output_notebook_name="notebook",NEWLINE tags={"foo": "bar"},NEWLINE description="custom description",NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_with_custom_tags_and_description_pipeline():NEWLINE hello_world_with_custom_tags_and_description()NEWLINENEWLINENEWLINEhello_world_config = test_nb_solid(NEWLINE "hello_world_config",NEWLINE config_schema={"greeting": Field(String, is_required=False, default_value="hello")},NEWLINE)NEWLINENEWLINENEWLINEgoodbye_config = dagstermill.define_dagstermill_solid(NEWLINE name="goodbye_config",NEWLINE notebook_path=nb_test_path("print_dagstermill_context_solid_config"),NEWLINE output_notebook_name="notebook",NEWLINE config_schema={"farewell": Field(String, is_required=False, default_value="goodbye")},NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_config_pipeline():NEWLINE hello_world_config()NEWLINE goodbye_config()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef alias_config_pipeline():NEWLINE hello_world_config.alias("aliased_greeting")()NEWLINE goodbye_config.alias("aliased_goodbye")()NEWLINENEWLINENEWLINE@solid(input_defs=[InputDefinition("notebook")])NEWLINEdef load_notebook(notebook):NEWLINE return notebookNEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_with_output_notebook_pipeline():NEWLINE notebook = hello_world()NEWLINE load_notebook(notebook)NEWLINENEWLINENEWLINEhello_world_no_output_notebook_no_file_manager = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_no_output_notebook_no_file_manager",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE)NEWLINENEWLINENEWLINE@pipelineNEWLINEdef hello_world_no_output_notebook_no_file_manager_pipeline():NEWLINE hello_world_no_output_notebook_no_file_manager()NEWLINENEWLINENEWLINEhello_world_no_output_notebook = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_no_output_notebook",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_no_output_notebook_pipeline():NEWLINE hello_world_no_output_notebook()NEWLINENEWLINENEWLINEhello_world_output = test_nb_solid("hello_world_output", output_defs=[OutputDefinition(str)])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_output_pipeline():NEWLINE hello_world_output()NEWLINENEWLINENEWLINEhello_world_explicit_yield = test_nb_solid(NEWLINE "hello_world_explicit_yield", output_defs=[OutputDefinition(str)]NEWLINE)NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_world_explicit_yield_pipeline():NEWLINE hello_world_explicit_yield()NEWLINENEWLINENEWLINEhello_logging = test_nb_solid("hello_logging")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef hello_logging_pipeline():NEWLINE hello_logging()NEWLINENEWLINENEWLINEadd_two_numbers = test_nb_solid(NEWLINE "add_two_numbers",NEWLINE input_defs=[NEWLINE InputDefinition(name="a", dagster_type=Int),NEWLINE InputDefinition(name="b", dagster_type=Int),NEWLINE ],NEWLINE output_defs=[OutputDefinition(Int)],NEWLINE)NEWLINENEWLINENEWLINEmult_two_numbers = test_nb_solid(NEWLINE "mult_two_numbers",NEWLINE input_defs=[NEWLINE InputDefinition(name="a", dagster_type=Int),NEWLINE InputDefinition(name="b", dagster_type=Int),NEWLINE ],NEWLINE output_defs=[OutputDefinition(Int)],NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef return_one():NEWLINE return 1NEWLINENEWLINENEWLINE@solidNEWLINEdef return_two():NEWLINE return 2NEWLINENEWLINENEWLINE@solidNEWLINEdef return_three():NEWLINE return 3NEWLINENEWLINENEWLINE@solidNEWLINEdef return_four():NEWLINE return 4NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef add_pipeline():NEWLINE add_two_numbers(return_one(), return_two())NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef double_add_pipeline():NEWLINE add_two_numbers.alias("add_two_numbers_1")(return_one(), return_two())NEWLINE add_two_numbers.alias("add_two_numbers_2")(return_three(), return_four())NEWLINENEWLINENEWLINE@solid(input_defs=[], config_schema=Int)NEWLINEdef load_constant(context):NEWLINE return context.solid_configNEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef notebook_dag_pipeline():NEWLINE a = load_constant.alias("load_a")()NEWLINE b = load_constant.alias("load_b")()NEWLINE num, _ = add_two_numbers(a, b)NEWLINE mult_two_numbers(num, b)NEWLINENEWLINENEWLINEerror_notebook = test_nb_solid("error_notebook")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef error_pipeline():NEWLINE error_notebook()NEWLINENEWLINENEWLINEif DAGSTER_PANDAS_PRESENT and SKLEARN_PRESENT and MATPLOTLIB_PRESENT:NEWLINENEWLINE clean_data = test_nb_solid("clean_data", output_defs=[OutputDefinition(DataFrame)])NEWLINENEWLINE # FIXME add an output to thisNEWLINE tutorial_LR = test_nb_solid(NEWLINE "tutorial_LR",NEWLINE input_defs=[InputDefinition(name="df", dagster_type=DataFrame)],NEWLINE )NEWLINENEWLINE tutorial_RF = test_nb_solid(NEWLINE "tutorial_RF",NEWLINE input_defs=[InputDefinition(name="df", dagster_type=DataFrame)],NEWLINE )NEWLINENEWLINE @pipeline(mode_defs=default_mode_defs)NEWLINE def tutorial_pipeline():NEWLINE dfr, _ = clean_data()NEWLINE # FIXME get better names for theseNEWLINE tutorial_LR(dfr)NEWLINE tutorial_RF(dfr)NEWLINENEWLINENEWLINE@solid("resource_solid", required_resource_keys={"list"})NEWLINEdef resource_solid(context):NEWLINE context.resources.list.append("Hello, solid!")NEWLINE return TrueNEWLINENEWLINENEWLINEhello_world_resource = test_nb_solid(NEWLINE "hello_world_resource",NEWLINE input_defs=[InputDefinition("nonce")],NEWLINE required_resource_keys={"list"},NEWLINE)NEWLINENEWLINEhello_world_resource_with_exception = test_nb_solid(NEWLINE "hello_world_resource_with_exception",NEWLINE input_defs=[InputDefinition("nonce")],NEWLINE required_resource_keys={"list"},NEWLINE)NEWLINENEWLINENEWLINEclass FilePickleList:NEWLINE # This is not thread- or anything else-safeNEWLINE def __init__(self, path):NEWLINE self.closed = FalseNEWLINE self.id = str(uuid.uuid4())[-6:]NEWLINE self.path = pathNEWLINE self.list = []NEWLINE if not os.path.exists(self.path):NEWLINE self.write()NEWLINE self.read()NEWLINE self.open()NEWLINENEWLINE def open(self):NEWLINE self.read()NEWLINE self.append("Opened")NEWLINENEWLINE def append(self, obj):NEWLINE self.read()NEWLINE self.list.append(self.id + ": " + obj)NEWLINE self.write()NEWLINENEWLINE def read(self):NEWLINE with open(self.path, "rb") as fd:NEWLINE self.list = pickle.load(fd)NEWLINE return self.listNEWLINENEWLINE def write(self):NEWLINE with open(self.path, "wb") as fd:NEWLINE pickle.dump(self.list, fd, protocol=PICKLE_PROTOCOL)NEWLINENEWLINE def close(self):NEWLINE self.append("Closed")NEWLINE self.closed = TrueNEWLINENEWLINENEWLINE@resource(config_schema=Field(String))NEWLINEdef filepicklelist_resource(init_context):NEWLINE filepicklelist = FilePickleList(init_context.resource_config)NEWLINE try:NEWLINE yield filepicklelistNEWLINE finally:NEWLINE filepicklelist.close()NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE name="test",NEWLINE resource_defs={NEWLINE "list": ResourceDefinition(lambda _: []),NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE },NEWLINE ),NEWLINE ModeDefinition(NEWLINE name="prod",NEWLINE resource_defs={NEWLINE "list": filepicklelist_resource,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE },NEWLINE ),NEWLINE ]NEWLINE)NEWLINEdef resource_pipeline():NEWLINE hello_world_resource(resource_solid())NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "list": filepicklelist_resource,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE "io_manager": fs_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef resource_with_exception_pipeline():NEWLINE hello_world_resource_with_exception(resource_solid())NEWLINENEWLINENEWLINEbad_kernel = test_nb_solid("bad_kernel")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef bad_kernel_pipeline():NEWLINE bad_kernel()NEWLINENEWLINENEWLINEreimport = test_nb_solid(NEWLINE "reimport", input_defs=[InputDefinition("l", List[int])], output_defs=[OutputDefinition(int)]NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef lister():NEWLINE return [1, 2, 3]NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef reimport_pipeline():NEWLINE reimport(lister())NEWLINENEWLINENEWLINEyield_3 = test_nb_solid("yield_3", output_defs=[OutputDefinition(Int)])NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef yield_3_pipeline():NEWLINE yield_3()NEWLINENEWLINENEWLINEyield_obj = test_nb_solid("yield_obj")NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef yield_obj_pipeline():NEWLINE yield_obj()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef retries_pipeline():NEWLINE test_nb_solid("raise_retry")()NEWLINE test_nb_solid("yield_retry")()NEWLINENEWLINENEWLINE@pipeline(mode_defs=default_mode_defs)NEWLINEdef failure_pipeline():NEWLINE test_nb_solid("raise_failure")()NEWLINE test_nb_solid("yield_failure")()NEWLINENEWLINENEWLINEyield_something = test_nb_solid(NEWLINE "yield_something",NEWLINE input_defs=[InputDefinition("obj", str)],NEWLINE output_defs=[OutputDefinition(str, "result")],NEWLINE)NEWLINENEWLINENEWLINE@solidNEWLINEdef fan_in(a, b):NEWLINE return f"{a} {b}"NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef fan_in_notebook_pipeline():NEWLINE val_a, _ = yield_something.alias("solid_1")()NEWLINE val_b, _ = yield_something.alias("solid_2")()NEWLINE fan_in(val_a, val_b)NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef fan_in_notebook_pipeline_in_mem():NEWLINE val_a, _ = yield_something.alias("solid_1")()NEWLINE val_b, _ = yield_something.alias("solid_2")()NEWLINE fan_in(val_a, val_b)NEWLINENEWLINENEWLINE@composite_solidNEWLINEdef outer():NEWLINE yield_something()NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "output_notebook_io_manager": local_output_notebook_io_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef composite_pipeline():NEWLINE outer()NEWLINENEWLINENEWLINE###################################################################################################NEWLINE# Back compatNEWLINE###################################################################################################NEWLINENEWLINEhello_world_legacy = dagstermill.define_dagstermill_solid(NEWLINE name="hello_world_legacy",NEWLINE notebook_path=nb_test_path("hello_world"),NEWLINE output_notebook="notebook",NEWLINE)NEWLINENEWLINENEWLINE@solid(input_defs=[InputDefinition("notebook", dagster_type=FileHandle)])NEWLINEdef load_notebook_legacy(notebook):NEWLINE return os.path.exists(notebook.path_desc)NEWLINENEWLINENEWLINE@pipeline(NEWLINE mode_defs=[NEWLINE ModeDefinition(NEWLINE resource_defs={NEWLINE "io_manager": fs_io_manager,NEWLINE "file_manager": local_file_manager,NEWLINE }NEWLINE )NEWLINE ]NEWLINE)NEWLINEdef hello_world_with_output_notebook_pipeline_legacy():NEWLINE notebook = hello_world_legacy()NEWLINE load_notebook_legacy(notebook)NEWLINENEWLINENEWLINE@repositoryNEWLINEdef notebook_repo():NEWLINE pipelines = [NEWLINE bad_kernel_pipeline,NEWLINE error_pipeline,NEWLINE hello_world_pipeline,NEWLINE hello_world_with_custom_tags_and_description_pipeline,NEWLINE hello_world_config_pipeline,NEWLINE hello_world_explicit_yield_pipeline,NEWLINE hello_world_output_pipeline,NEWLINE hello_world_with_output_notebook_pipeline,NEWLINE hello_logging_pipeline,NEWLINE resource_pipeline,NEWLINE resource_with_exception_pipeline,NEWLINE add_pipeline,NEWLINE notebook_dag_pipeline,NEWLINE reimport_pipeline,NEWLINE yield_3_pipeline,NEWLINE yield_obj_pipeline,NEWLINE retries_pipeline,NEWLINE failure_pipeline,NEWLINE fan_in_notebook_pipeline_in_mem,NEWLINE fan_in_notebook_pipeline,NEWLINE hello_world_no_output_notebook_no_file_manager_pipeline,NEWLINE hello_world_with_output_notebook_pipeline_legacy,NEWLINE ]NEWLINE if DAGSTER_PANDAS_PRESENT and SKLEARN_PRESENT and MATPLOTLIB_PRESENT:NEWLINE pipelines += [tutorial_pipeline]NEWLINENEWLINE return pipelinesNEWLINE |
# -*- coding: utf-8 -*-NEWLINE"""NEWLINETests for the Py2-like class:`basestring` type.NEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_import, unicode_literals, print_functionNEWLINEimport osNEWLINENEWLINEfrom past import utilsNEWLINEfrom future.tests.base import unittestNEWLINEfrom past.builtins import basestring, str as oldstrNEWLINENEWLINENEWLINEclass TestBaseString(unittest.TestCase):NEWLINENEWLINE def test_isinstance(self):NEWLINE s = b'abc'NEWLINE self.assertTrue(isinstance(s, basestring))NEWLINE s2 = oldstr(b'abc')NEWLINE self.assertTrue(isinstance(s2, basestring))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE |
from django.core.files.base import ContentFileNEWLINENEWLINEfrom readable.models import DocumentsNEWLINENEWLINEfrom .utils import TestCaseNEWLINENEWLINENEWLINEclass TestDocuments(TestCase):NEWLINE def setUp(self) -> None:NEWLINE super(TestDocuments, self).setUp()NEWLINE self.user = self.create_user("staff", self.get_random_string())NEWLINE self.staff = self.create_staff(self.user)NEWLINE self.lorem = ContentFile("Lorem ipsum dolor sit amet, consectetur adipiscing elit.", "lorem.txt")NEWLINENEWLINE def test_upload_directory(self) -> None:NEWLINE document: Documents = Documents.objects.create(filename=self.lorem, uploaded_by=self.staff)NEWLINE self.assertEqual(document.realname, self.lorem.name)NEWLINE self.assertEqual(document.filename, f"{document.id!s}{document.path.suffix}")NEWLINENEWLINE self.assertTrue(document.unavailable)NEWLINE document.status = Documents.Status.FINISHEDNEWLINE document.save(update_fields=["status"])NEWLINE self.assertFalse(document.unavailable)NEWLINE |
# coding=utf-8NEWLINE# Copyright 2020 The HuggingFace NLP Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE""" BLEU metric. """NEWLINENEWLINEimport nlpNEWLINENEWLINEfrom .nmt_bleu import compute_bleu # From: https://github.com/tensorflow/nmt/blob/master/nmt/scripts/bleu.pyNEWLINENEWLINENEWLINE_CITATION = """\NEWLINE@INPROCEEDINGS{Papineni02bleu:a,NEWLINE author = {Kishore Papineni and Salim Roukos and Todd Ward and Wei-jing Zhu},NEWLINE title = {BLEU: a Method for Automatic Evaluation of Machine Translation},NEWLINE booktitle = {},NEWLINE year = {2002},NEWLINE pages = {311--318}NEWLINE}NEWLINE@inproceedings{lin-och-2004-orange,NEWLINE title = "{ORANGE}: a Method for Evaluating Automatic Evaluation Metrics for Machine Translation",NEWLINE author = "Lin, Chin-Yew andNEWLINE Och, Franz Josef",NEWLINE booktitle = "{COLING} 2004: Proceedings of the 20th International Conference on Computational Linguistics",NEWLINE month = "aug 23{--}aug 27",NEWLINE year = "2004",NEWLINE address = "Geneva, Switzerland",NEWLINE publisher = "COLING",NEWLINE url = "https://www.aclweb.org/anthology/C04-1072",NEWLINE pages = "501--507",NEWLINE}NEWLINE"""NEWLINENEWLINE_DESCRIPTION = """\NEWLINEBLEU (bilingual evaluation understudy) is an algorithm for evaluating the quality of text which has been machine-translated from one natural language to another.NEWLINEQuality is considered to be the correspondence between a machine's output and that of a human: "the closer a machine translation is to a professional human translation,NEWLINEthe better it is" – this is the central idea behind BLEU. BLEU was one of the first metrics to claim a high correlation with human judgements of quality, andNEWLINEremains one of the most popular automated and inexpensive metrics.NEWLINENEWLINEScores are calculated for individual translated segments—generally sentences—by comparing them with a set of good quality reference translations.NEWLINEThose scores are then averaged over the whole corpus to reach an estimate of the translation's overall quality. Intelligibility or grammatical correctnessNEWLINEare not taken into account[citation needed].NEWLINENEWLINEBLEU's output is always a number between 0 and 1. This value indicates how similar the candidate text is to the reference texts, with values closer to 1NEWLINErepresenting more similar texts. Few human translations will attain a score of 1, since this would indicate that the candidate is identical to one of theNEWLINEreference translations. For this reason, it is not necessary to attain a score of 1. Because there are more opportunities to match, adding additionalNEWLINEreference translations will increase the BLEU score.NEWLINE"""NEWLINENEWLINE_KWARGS_DESCRIPTION = """NEWLINEComputes BLEU score of translated segments against one or more references.NEWLINEArgs:NEWLINE predictions: list of translations to score.NEWLINE Each translation should be tokenized into a list of tokens.NEWLINE references: list of lists of references for each translation.NEWLINE Each reference should be tokenized into a list of tokens.NEWLINE max_order: Maximum n-gram order to use when computing BLEU score.NEWLINE smooth: Whether or not to apply Lin et al. 2004 smoothing.NEWLINEReturns:NEWLINE 'bleu': bleu score,NEWLINE 'precisions': geometric mean of n-gram precisions,NEWLINE 'brevity_penalty': brevity penalty,NEWLINE 'length_ratio': ratio of lengths,NEWLINE 'translation_length': translation_length,NEWLINE 'reference_length': reference_lengthNEWLINE"""NEWLINENEWLINEclass Bleu(nlp.Metric):NEWLINE def _info(self):NEWLINE return nlp.MetricInfo(NEWLINE description=_DESCRIPTION,NEWLINE citation=_CITATION,NEWLINE inputs_description=_KWARGS_DESCRIPTION,NEWLINE features=nlp.Features({NEWLINE 'predictions': nlp.Sequence(nlp.Value('string', id='token'), id='sequence'),NEWLINE 'references': nlp.Sequence(nlp.Sequence(nlp.Value('string', id='token'), id='sequence'), id='references'),NEWLINE }),NEWLINE codebase_urls=["https://github.com/tensorflow/nmt/blob/master/nmt/scripts/bleu.py"],NEWLINE reference_urls=["https://en.wikipedia.org/wiki/BLEU",NEWLINE "https://towardsdatascience.com/evaluating-text-output-in-nlp-bleu-at-your-own-risk-e8609665a213"]NEWLINE )NEWLINENEWLINE def _compute(self, predictions, references, max_order=4, smooth=False):NEWLINE score = compute_bleu(reference_corpus=references, translation_corpus=predictions, max_order=max_order, smooth=smooth)NEWLINE (bleu, precisions, bp, ratio, translation_length, reference_length) = scoreNEWLINE return {'bleu': bleu,NEWLINE 'precisions': precisions,NEWLINE 'brevity_penalty': bp,NEWLINE 'length_ratio': ratio,NEWLINE 'translation_length': translation_length,NEWLINE 'reference_length': reference_length}NEWLINE |
import rlkit.misc.hyperparameter as hypNEWLINEfrom rlkit.demos.source.dict_to_mdp_path_loader import EncoderDictToMDPPathLoaderNEWLINEfrom rlkit.launchers.experiments.ashvin.awac_rig import awac_rig_experimentNEWLINEfrom rlkit.launchers.launcher_util import run_experimentNEWLINEfrom rlkit.launchers.arglauncher import run_variantsNEWLINEfrom rlkit.torch.sac.policies import GaussianPolicy, GaussianMixturePolicyNEWLINEfrom rlkit.envs.encoder_wrappers import ConditionalEncoderWrappedEnvNEWLINEfrom sawyer_control.envs.sawyer_grip import SawyerGripEnvNEWLINE#from sawyer_control.envs.sawyer_grip_stub import SawyerGripEnvNEWLINEfrom rlkit.torch.networks import ClampNEWLINEfrom rlkit.torch.vae.vq_vae import VQ_VAENEWLINEfrom rlkit.torch.vae.vq_vae_trainer import VQ_VAETrainerNEWLINEfrom rlkit.torch.grill.common import train_vqvaeNEWLINENEWLINEpath_func = lambda name: '/media/ashvin/data2/data/baseline/'+ nameNEWLINENEWLINEall_demos = [NEWLINE dict(path=path_func('fixed_drawer_demos.npy'), obs_dict=True, is_demo=True, data_split=0.2),NEWLINE dict(path=path_func('fixed_pot_demos.npy'), obs_dict=True, is_demo=True, data_split=0.2),NEWLINE dict(path=path_func('fixed_pot_extra1_demos.npy'), obs_dict=True, is_demo=True, data_split=0.2),NEWLINE dict(path=path_func('fixed_pnp_demos.npy'), obs_dict=True, is_demo=True, data_split=0.2),NEWLINE dict(path=path_func('fixed_tray_demos.npy'), obs_dict=True, is_demo=True, data_split=0.2),NEWLINE]NEWLINENEWLINENEWLINEall_demos = [NEWLINE dict(path=path_func('fixed_drawer_demos.npy'), obs_dict=True, is_demo=True,),NEWLINE dict(path=path_func('fixed_pot_demos.npy'), obs_dict=True, is_demo=True,),NEWLINE dict(path=path_func('fixed_pot_extra1_demos.npy'), obs_dict=True, is_demo=True,),NEWLINE dict(path=path_func('fixed_pnp_demos.npy'), obs_dict=True, is_demo=True,),NEWLINE dict(path=path_func('fixed_tray_demos.npy'), obs_dict=True, is_demo=True,),NEWLINE]NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE variant = dict(NEWLINE imsize=48,NEWLINE env_class=SawyerGripEnv,NEWLINE env_kwargs=dict(NEWLINE action_mode='position',NEWLINE config_name='ashvin_config',NEWLINE reset_free=False,NEWLINE position_action_scale=0.05,NEWLINE max_speed=0.4,NEWLINE step_sleep_time=0.2,NEWLINE crop_version_str="crop_val_torch",NEWLINE ),NEWLINE policy_class=GaussianPolicy,NEWLINE policy_kwargs=dict(NEWLINE hidden_sizes=[256, 256, 256, 256, ],NEWLINE max_log_std=0,NEWLINE min_log_std=-6,NEWLINE std_architecture="values",NEWLINE ),NEWLINENEWLINE qf_kwargs=dict(NEWLINE hidden_sizes=[256, 256],NEWLINE ),NEWLINENEWLINE trainer_kwargs=dict(NEWLINE discount=0.99,NEWLINE soft_target_tau=5e-3,NEWLINE target_update_period=1,NEWLINE policy_lr=3e-4,NEWLINE qf_lr=3E-4,NEWLINE reward_scale=1,NEWLINE beta=1,NEWLINE use_automatic_entropy_tuning=False,NEWLINE alpha=0,NEWLINENEWLINE bc_num_pretrain_steps=0,NEWLINE q_num_pretrain1_steps=0,NEWLINE q_num_pretrain2_steps=25001, #25001 #HERENEWLINE policy_weight_decay=1e-4,NEWLINE q_weight_decay=0,NEWLINENEWLINE rl_weight=1.0,NEWLINE use_awr_update=True,NEWLINE use_reparam_update=False,NEWLINE compute_bc=True,NEWLINE reparam_weight=0.0,NEWLINE awr_weight=1.0,NEWLINE bc_weight=0.0,NEWLINENEWLINE reward_transform_kwargs=None,NEWLINE terminal_transform_kwargs=None,NEWLINE ),NEWLINENEWLINE max_path_length=75, #50NEWLINE algo_kwargs=dict(NEWLINE batch_size=1024, #1024NEWLINE num_epochs=101, #1001NEWLINE num_eval_steps_per_epoch=150, #500NEWLINE num_expl_steps_per_train_loop=600, #500NEWLINE num_trains_per_train_loop=600, #500NEWLINE min_num_steps_before_training=150, #4000NEWLINE ),NEWLINE replay_buffer_kwargs=dict(NEWLINE fraction_future_context=0.6,NEWLINE fraction_distribution_context=0.1, # TODO: Try less?NEWLINE max_size=int(5E5), # HERE# HERE# HERE# HERE# HERE# HERE# HERE# HERE# HERE (DOUBLE CHECK THAT DEMOS FIT!!!!)NEWLINE ),NEWLINE demo_replay_buffer_kwargs=dict(NEWLINE fraction_future_context=0.6,NEWLINE fraction_distribution_context=0.1, # TODO: Try less?NEWLINE ),NEWLINE reward_kwargs=dict(NEWLINE reward_type='sparse',NEWLINE epsilon=1.0,NEWLINE ),NEWLINENEWLINE observation_key='latent_observation',NEWLINE desired_goal_key='latent_desired_goal',NEWLINE save_video=True,NEWLINE save_video_kwargs=dict(NEWLINE save_video_period=1,NEWLINE pad_color=0,NEWLINE ),NEWLINE NEWLINE encoder_wrapper=ConditionalEncoderWrappedEnv,NEWLINE reset_keys_map=dict(NEWLINE image_observation="initial_latent_state"NEWLINE ),NEWLINENEWLINE path_loader_class=EncoderDictToMDPPathLoader,NEWLINE path_loader_kwargs=dict(NEWLINE recompute_reward=True,NEWLINE ),NEWLINENEWLINE renderer_kwargs=dict(NEWLINE create_image_format='HWC',NEWLINE output_image_format='CWH',NEWLINE flatten_image=True,NEWLINE width=48,NEWLINE height=48,NEWLINE ),NEWLINENEWLINE add_env_demos=False,NEWLINE add_env_offpolicy_data=False,NEWLINENEWLINE load_demos=True,NEWLINE pretrain_policy=True,NEWLINE pretrain_rl=True,NEWLINENEWLINE evaluation_goal_sampling_mode="presampled_images",NEWLINE exploration_goal_sampling_mode="conditional_vae_prior",NEWLINE train_vae_kwargs=dict(NEWLINE imsize=48,NEWLINE beta=1,NEWLINE beta_schedule_kwargs=dict(NEWLINE x_values=(0, 250),NEWLINE y_values=(0, 100),NEWLINE ),NEWLINE num_epochs=1501, #1501NEWLINE embedding_dim=5,NEWLINE dump_skew_debug_plots=False,NEWLINE decoder_activation='sigmoid',NEWLINE use_linear_dynamics=False,NEWLINE generate_vae_dataset_kwargs=dict(NEWLINE N=1000,NEWLINE n_random_steps=2,NEWLINE test_p=.9,NEWLINE dataset_path={NEWLINE 'train': 'demos/icra2021/dataset_v1_train.npy',NEWLINE 'test': 'demos/icra2021/dataset_v1_test.npy',NEWLINE },NEWLINE augment_data=False,NEWLINE use_cached=False,NEWLINE show=False,NEWLINE oracle_dataset=False,NEWLINE oracle_dataset_using_set_to_goal=False,NEWLINE non_presampled_goal_img_is_garbage=False,NEWLINE random_rollout_data=True,NEWLINE random_rollout_data_set_to_goal=True,NEWLINE conditional_vae_dataset=True,NEWLINE save_trajectories=False,NEWLINE enviorment_dataset=False,NEWLINE tag="ccrig_tuning_orig_network",NEWLINE ),NEWLINE vae_trainer_class=VQ_VAETrainer,NEWLINE vae_class=VQ_VAE,NEWLINE vae_kwargs=dict(NEWLINE input_channels=3,NEWLINE imsize=48,NEWLINE ),NEWLINE algo_kwargs=dict(NEWLINE key_to_reconstruct='x_t',NEWLINE start_skew_epoch=5000,NEWLINE is_auto_encoder=False,NEWLINE batch_size=128,NEWLINE lr=1e-3,NEWLINE skew_config=dict(NEWLINE method='vae_prob',NEWLINE power=0,NEWLINE ),NEWLINE weight_decay=0.0,NEWLINE skew_dataset=False,NEWLINE priority_function_kwargs=dict(NEWLINE decoder_distribution='gaussian_identity_variance',NEWLINE sampling_method='importance_sampling',NEWLINE num_latents_to_sample=10,NEWLINE ),NEWLINE use_parallel_dataloading=False,NEWLINE ),NEWLINENEWLINE save_period=10,NEWLINE ),NEWLINE train_model_func=train_vqvae,NEWLINENEWLINE presampled_goal_kwargs=dict(NEWLINE eval_goals='/media/ashvin/data2/data/val/v1/ccvae_pot1_eval_goals.pkl',NEWLINE expl_goals=None,NEWLINE ),NEWLINE launcher_config=dict(NEWLINE unpack_variant=True,NEWLINE region='us-west-1',NEWLINE ),NEWLINE logger_config=dict(NEWLINE snapshot_mode='gap',NEWLINE snapshot_gap=1,NEWLINE ),NEWLINE ccvae_or_cbigan_exp=True,NEWLINE pickle_paths=True,NEWLINE NEWLINE pretrained_vae_path=path_func('vae.pt'),NEWLINE pretrained_algo_path=path_func('agent_sparse_1.pt'), #agent_sparse_1.pt, agent_sparse_2.pt, agent_dense.ptNEWLINE )NEWLINENEWLINE search_space = {NEWLINE "seed": range(1),NEWLINE 'path_loader_kwargs.demo_paths': [all_demos], #CHANGEDNEWLINENEWLINE 'reward_kwargs.reward_type': ['sparse',], # TRY SPARSE (EPS=1), SPARSE (EPS=2), DENSE (PROB NOT GONNA WORK)NEWLINE 'trainer_kwargs.beta': [0.3],NEWLINE 'num_pybullet_objects':[None],NEWLINENEWLINE 'policy_kwargs.min_log_std': [-6],NEWLINE 'trainer_kwargs.awr_weight': [1.0],NEWLINE 'trainer_kwargs.awr_use_mle_for_vf': [True],NEWLINE 'trainer_kwargs.awr_sample_actions': [False],NEWLINE 'trainer_kwargs.clip_score': [2],NEWLINE 'trainer_kwargs.awr_min_q': [True],NEWLINE 'trainer_kwargs.reward_transform_kwargs': [None, ],NEWLINE 'trainer_kwargs.terminal_transform_kwargs': [dict(m=0, b=0)],NEWLINE #'qf_kwargs.output_activation': [Clamp(max=0)],NEWLINE }NEWLINE sweeper = hyp.DeterministicHyperparameterSweeper(NEWLINE search_space, default_parameters=variant,NEWLINE )NEWLINENEWLINE variants = []NEWLINE for variant in sweeper.iterate_hyperparameters():NEWLINE if 'sparse' in variant['pretrained_algo_path']:NEWLINE variant['qf_kwargs']['output_activation'] = Clamp(max=0)NEWLINENEWLINE if variant['pretrained_algo_path'] == path_func('agent_sparse_1.pt'):NEWLINE variant['reward_kwargs'] == dict(reward_type='sparse', epsilon=1.0)NEWLINE if variant['pretrained_algo_path'] == path_func('agent_sparse_2.pt'):NEWLINE variant['reward_kwargs'] == dict(reward_type='sparse', epsilon=2.0)NEWLINE if variant['pretrained_algo_path'] == path_func('agent_dense.pt'):NEWLINE variant['reward_kwargs'] == dict(reward_type='dense', epsilon=1.0)NEWLINE NEWLINE variants.append(variant)NEWLINENEWLINE run_variants(awac_rig_experiment, variants, run_id=10) #HERENEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINE# Form implementation generated from reading ui file '/media/raul/OS/Users/king_/Desktop/carrera/curso2018-2019/2oCuatri/TFG/gui_dbjudge/sql_judge/view/qt_view/custom_types/custom_type_row.ui'NEWLINE#NEWLINE# Created by: PyQt5 UI code generator 5.13.2NEWLINE#NEWLINE# WARNING! All changes made in this file will be lost!NEWLINENEWLINENEWLINEfrom PyQt5 import QtCore, QtGui, QtWidgetsNEWLINENEWLINENEWLINEclass Ui_Form(object):NEWLINE def setupUi(self, CustomTypeRow):NEWLINE CustomTypeRow.setObjectName("CustomTypeRow")NEWLINE CustomTypeRow.setGeometry(QtCore.QRect(0, 0, 195, 61))NEWLINE sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)NEWLINE sizePolicy.setHorizontalStretch(0)NEWLINE sizePolicy.setVerticalStretch(0)NEWLINE sizePolicy.setHeightForWidth(CustomTypeRow.sizePolicy().hasHeightForWidth())NEWLINE CustomTypeRow.setSizePolicy(sizePolicy)NEWLINE self.horizontalLayout_2 = QtWidgets.QHBoxLayout(CustomTypeRow)NEWLINE self.horizontalLayout_2.setObjectName("horizontalLayout_2")NEWLINE self.horizontalLayout = QtWidgets.QHBoxLayout()NEWLINE self.horizontalLayout.setObjectName("horizontalLayout")NEWLINE self.data_label = QtWidgets.QLabel(CustomTypeRow)NEWLINE self.data_label.setObjectName("data_label")NEWLINE self.horizontalLayout.addWidget(self.data_label)NEWLINE spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)NEWLINE self.horizontalLayout.addItem(spacerItem)NEWLINE self.delete_sample_button = QtWidgets.QPushButton(CustomTypeRow)NEWLINE self.delete_sample_button.setText("")NEWLINE icon = QtGui.QIcon()NEWLINE icon.addPixmap(QtGui.QPixmap(":/icons/trash-26.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)NEWLINE self.delete_sample_button.setIcon(icon)NEWLINE self.delete_sample_button.setObjectName("delete_sample_button")NEWLINE self.horizontalLayout.addWidget(self.delete_sample_button)NEWLINE self.horizontalLayout_2.addLayout(self.horizontalLayout)NEWLINENEWLINE self.retranslateUi(CustomTypeRow)NEWLINE QtCore.QMetaObject.connectSlotsByName(CustomTypeRow)NEWLINENEWLINE def retranslateUi(self, CustomTypeRow):NEWLINE _translate = QtCore.QCoreApplication.translateNEWLINE CustomTypeRow.setWindowTitle(_translate("Form", "Form"))NEWLINE self.data_label.setText(_translate("Form", "data"))NEWLINEfrom . import resourcesNEWLINE |
"""This module contains miscellaneous utilities."""NEWLINENEWLINE__author__ = "Damián Silvani"NEWLINE__copyright__ = "Dymaxion Labs"NEWLINE__license__ = "MIT"NEWLINENEWLINENEWLINEdef flatten(list):NEWLINE return [item for sublist in list for item in sublist]NEWLINE |
import argparseNEWLINEimport torchNEWLINEimport numpy as npNEWLINEimport osNEWLINEimport pickleNEWLINEfrom post_process.kcenter_greedy import kCenterGreedyNEWLINEfrom sklearn.random_projection import SparseRandomProjectionNEWLINENEWLINEfrom data_loader.one_class_dataset import get_train_dataloaderNEWLINEfrom model.one_class.models import STPMNEWLINEfrom inference import OneClassInferenceNEWLINEfrom utils import reshape_embeddingNEWLINENEWLINENEWLINEclass OneClassTest():NEWLINE def __init__(self, args):NEWLINE self.embedding_dir_path = "./embeddings"NEWLINE if not os.path.exists(self.embedding_dir_path):NEWLINE os.mkdir(self.embedding_dir_path)NEWLINENEWLINE self.embedding_list = []NEWLINE self.train_loader = get_train_dataloader(args.dataset_path, args.load_size, args.input_size, args.batch_size)NEWLINE self.model = STPM()NEWLINE self.model.eval()NEWLINE self.inference = OneClassInference(args)NEWLINENEWLINE def test(self):NEWLINE for index, batch_data in enumerate(self.train_loader):NEWLINE prediction = self.inference.infer(batch_data)NEWLINE self.embedding_list.extend(reshape_embedding(np.array(prediction)))NEWLINE self.computer_embedding()NEWLINENEWLINE def computer_embedding(self):NEWLINE total_embeddings = np.array(self.embedding_list)NEWLINE # Random projectionNEWLINE randomprojector = SparseRandomProjection(n_components='auto',NEWLINE eps=0.9) # 'auto' => Johnson-Lindenstrauss lemmaNEWLINE randomprojector.fit(total_embeddings)NEWLINE # Coreset SubsamplingNEWLINE selector = kCenterGreedy(total_embeddings, 0, 0)NEWLINE selected_idx = selector.select_batch(model=randomprojector, already_selected=[],NEWLINE N=int(total_embeddings.shape[0] * float(args.coreset_sampling_ratio)))NEWLINE self.embedding_coreset = total_embeddings[selected_idx]NEWLINENEWLINE print('initial embedding size : ', total_embeddings.shape)NEWLINE print('final embedding size : ', self.embedding_coreset.shape)NEWLINE with open(os.path.join(self.embedding_dir_path, 'embedding.pickle'), 'wb') as f:NEWLINE pickle.dump(self.embedding_coreset, f)NEWLINENEWLINENEWLINEdef get_args():NEWLINE parser = argparse.ArgumentParser(description='ANOMALYDETECTION')NEWLINE parser.add_argument('--dataset_path',NEWLINE default=r'/home/changwoo/hdd/datasets/mvtec_anomaly_detection') # 'D:\Dataset\mvtec_anomaly_detection')#NEWLINE parser.add_argument('--batch_size', default=32)NEWLINE parser.add_argument('--load_size', default=256) # 256NEWLINE parser.add_argument('--input_size', default=224)NEWLINE parser.add_argument('--coreset_sampling_ratio', default=0.01)NEWLINE parser.add_argument('--project_root_path',NEWLINE default=r'/home/changwoo/hdd/project_results/patchcore/test')NEWLINE parser.add_argument('--method', default=r'NN')NEWLINE parser.add_argument('--n_neighbors', type=int, default=9)NEWLINE args = parser.parse_args()NEWLINE return argsNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE device = torch.device("cuda" if torch.cuda.is_available() else "cpu")NEWLINE args = get_args()NEWLINENEWLINE one_class_test = OneClassTest(args)NEWLINE one_class_test.test()NEWLINE |
# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright (C) 2021 Graz University of Technology.NEWLINE# Copyright (C) 2021 CERN.NEWLINE# Copyright (C) 2021 TU Wien.NEWLINE#NEWLINE# Invenio-Records-Permissions is free software; you can redistribute itNEWLINE# and/or modify it under the terms of the MIT License; see LICENSE file forNEWLINE# more details.NEWLINENEWLINE"""Pytest configuration.NEWLINENEWLINESee https://pytest-invenio.readthedocs.io/ for documentation on which testNEWLINEfixtures are available.NEWLINE"""NEWLINENEWLINEfrom typing import PatternNEWLINENEWLINEimport pytestNEWLINEfrom flask_principal import Identity, UserNeedNEWLINEfrom invenio_access.permissions import any_user, authenticated_user, \NEWLINE system_processNEWLINEfrom invenio_db import dbNEWLINEfrom invenio_pidstore.models import PersistentIdentifier, PIDStatusNEWLINEfrom invenio_records_permissions.generators import AnyUser, \NEWLINE AuthenticatedUser, SystemProcessNEWLINENEWLINEfrom invenio_rdm_records.records import RDMParent, RDMRecordNEWLINEfrom invenio_rdm_records.services.generators import IfRestricted, RecordOwnersNEWLINENEWLINENEWLINEdef _public_record():NEWLINE record = RDMRecord({}, access={})NEWLINE record.access.protection.set("public", "public")NEWLINE return recordNEWLINENEWLINENEWLINEdef _restricted_record():NEWLINE record = RDMRecord({}, access={})NEWLINE record.access.protection.set("restricted", "restricted")NEWLINE return recordNEWLINENEWLINENEWLINEdef _owned_record():NEWLINE parent = RDMParent.create({})NEWLINE parent.access.owners.add({"user": 16})NEWLINE parent.access.owners.add({"user": 17})NEWLINE record = RDMRecord.create({}, parent=parent)NEWLINE return recordNEWLINENEWLINENEWLINEdef _then_needs():NEWLINE return {authenticated_user, system_process}NEWLINENEWLINENEWLINEdef _else_needs():NEWLINE return {any_user, system_process}NEWLINENEWLINENEWLINE#NEWLINE# TestsNEWLINE#NEWLINE@pytest.mark.parametrize(NEWLINE "field,record_fun,expected_needs_fun", [NEWLINE ("record", _public_record, _else_needs),NEWLINE ("record", _restricted_record, _then_needs),NEWLINE ("files", _public_record, _else_needs),NEWLINE ("files", _restricted_record, _then_needs),NEWLINE ]NEWLINE)NEWLINEdef test_ifrestricted_needs(field, record_fun, expected_needs_fun):NEWLINE """Test the IfRestricted generator."""NEWLINE generator = IfRestricted(NEWLINE field,NEWLINE then_=[AuthenticatedUser(), SystemProcess()],NEWLINE else_=[AnyUser(), SystemProcess()]NEWLINE )NEWLINE assert generator.needs(record=record_fun()) == expected_needs_fun()NEWLINE assert generator.excludes(record=record_fun()) == set()NEWLINENEWLINENEWLINEdef test_ifrestricted_query():NEWLINE """Test the query generation."""NEWLINE generator = IfRestricted(NEWLINE "record",NEWLINE then_=[AuthenticatedUser()],NEWLINE else_=[AnyUser()]NEWLINE )NEWLINE assert generator.query_filter(identity=any_user).to_dict() == {NEWLINE 'bool': {NEWLINE 'should': [NEWLINE {'match': {'access.record': 'restricted'}},NEWLINE {'match': {'access.record': 'public'}}NEWLINE ]NEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef test_record_owner(app, mocker):NEWLINE generator = RecordOwners()NEWLINE record = _owned_record()NEWLINENEWLINE assert generator.needs(record=record) == [NEWLINE UserNeed(16),NEWLINE UserNeed(17),NEWLINE ]NEWLINENEWLINE assert generator.excludes(record=record) == []NEWLINENEWLINE # Anonymous identity.NEWLINE assert not generator.query_filter(identity=mocker.Mock(provides=[]))NEWLINENEWLINE # Authenticated identityNEWLINE query_filter = generator.query_filter(NEWLINE identity=mocker.Mock(NEWLINE provides=[mocker.Mock(method='id', value=15)]NEWLINE )NEWLINE )NEWLINENEWLINE expected_query_filter = {NEWLINE "terms": {NEWLINE "parent.access.owned_by.user": [15]NEWLINE }NEWLINE }NEWLINE assert query_filter.to_dict() == expected_query_filterNEWLINE |
import sys, os, reNEWLINEimport timeNEWLINENEWLINEsys.path.append("lib")NEWLINEimport utilsNEWLINENEWLINEimport requestsNEWLINEfrom bs4 import BeautifulSoupNEWLINENEWLINEtail_number_records = utils.read_json_lines_file('data/tail_numbers.jsonl')NEWLINENEWLINEaircraft_records = []NEWLINE# Loop through the tail numbers, fetchingNEWLINEfor tail_number_record in tail_number_records:NEWLINE time.sleep(0.1) # essential to sleep FIRST in loop or you will flood sitesNEWLINE NEWLINE # Parameterize the URL with the tail numberNEWLINE BASE_URL = 'http://registry.faa.gov/aircraftinquiry/NNum_Results.aspx?NNumbertxt={}'NEWLINE tail_number = tail_number_record['TailNum']NEWLINE url = BASE_URL.format(tail_number)NEWLINENEWLINE # Fetch the page, parse the HTMLNEWLINE r = requests.get(url)NEWLINE NEWLINE html = r.textNEWLINE soup = BeautifulSoup(html)NEWLINE NEWLINE # The table structure is constant for all pages that contain dataNEWLINE try:NEWLINE aircraft_description = soup.find_all('table')[4]NEWLINE craft_tds = aircraft_description.find_all('td')NEWLINE serial_number = craft_tds[1].text.strip()NEWLINE manufacturer = craft_tds[5].text.strip()NEWLINE model = craft_tds[9].text.strip()NEWLINE mfr_year = craft_tds[25].text.strip()NEWLINENEWLINE registered_owner = soup.find_all('table')[5]NEWLINE reg_tds = registered_owner.find_all('td')NEWLINE owner = reg_tds[1].text.strip()NEWLINE owner_state = reg_tds[9].text.strip()NEWLINENEWLINE airworthiness = soup.find_all('table')[6]NEWLINE worthy_tds = airworthiness.find_all('td')NEWLINE engine_manufacturer = worthy_tds[1].text.strip()NEWLINE engine_model = worthy_tds[5].text.strip()NEWLINENEWLINE aircraft_record = {NEWLINE 'TailNum': tail_number,NEWLINE 'serial_number': serial_number,NEWLINE 'manufacturer': manufacturer,NEWLINE 'model': model,NEWLINE 'mfr_year': mfr_year,NEWLINE 'owner': owner,NEWLINE 'owner_state': owner_state,NEWLINE 'engine_manufacturer': engine_manufacturer,NEWLINE 'engine_model': engine_model,NEWLINE }NEWLINE aircraft_records.append(NEWLINE aircraft_recordNEWLINE )NEWLINE print(aircraft_record)NEWLINE NEWLINE except IndexError as e:NEWLINE print("Missing {} record: {}".format(tail_number, e))NEWLINENEWLINEutils.write_json_lines_file(NEWLINE aircraft_records, 'data/faa_tail_number_inquiry.jsonl'NEWLINE)NEWLINE |
import importlibNEWLINEimport inspectNEWLINEimport osNEWLINEimport pathlibNEWLINENEWLINEimport pkg_resourcesNEWLINEfrom clvm_tools.clvmc import compile_clvm as compile_clvm_pyNEWLINEfrom flax.types.blockchain_format.program import Program, SerializedProgramNEWLINENEWLINEcompile_clvm = compile_clvm_pyNEWLINENEWLINE# Handle optional use of clvm_tools_rs if available and requestedNEWLINEif "CLVM_TOOLS_RS" in os.environ:NEWLINE try:NEWLINENEWLINE def sha256file(f):NEWLINE import hashlibNEWLINENEWLINE m = hashlib.sha256()NEWLINE m.update(open(f).read().encode("utf8"))NEWLINE return m.hexdigest()NEWLINENEWLINE from clvm_tools_rs import compile_clvm as compile_clvm_rsNEWLINENEWLINE def translate_path(p_):NEWLINE p = str(p_)NEWLINE if os.path.isdir(p):NEWLINE return pNEWLINE else:NEWLINE module_object = importlib.import_module(p)NEWLINE return os.path.dirname(inspect.getfile(module_object))NEWLINENEWLINE def rust_compile_clvm(full_path, output, search_paths=[]):NEWLINE treated_include_paths = list(map(translate_path, search_paths))NEWLINE print("compile_clvm_rs", full_path, output, treated_include_paths)NEWLINE compile_clvm_rs(str(full_path), str(output), treated_include_paths)NEWLINENEWLINE if os.environ["CLVM_TOOLS_RS"] == "check":NEWLINE orig = str(output) + ".orig"NEWLINE compile_clvm_py(full_path, orig, search_paths=search_paths)NEWLINE orig256 = sha256file(orig)NEWLINE rs256 = sha256file(output)NEWLINENEWLINE if orig256 != rs256:NEWLINE print("Compiled %s: %s vs %s\n" % (full_path, orig256, rs256))NEWLINE print("Aborting compilation due to mismatch with rust")NEWLINE assert orig256 == rs256NEWLINENEWLINE compile_clvm = rust_compile_clvmNEWLINE finally:NEWLINE passNEWLINENEWLINENEWLINEdef load_serialized_clvm(clvm_filename, package_or_requirement=__name__) -> SerializedProgram:NEWLINE """NEWLINE This function takes a .clvm file in the given package and compiles it to aNEWLINE .clvm.hex file if the .hex file is missing or older than the .clvm file, thenNEWLINE returns the contents of the .hex file as a `Program`.NEWLINENEWLINE clvm_filename: file nameNEWLINE package_or_requirement: usually `__name__` if the clvm file is in the same packageNEWLINE """NEWLINENEWLINE hex_filename = f"{clvm_filename}.hex"NEWLINENEWLINE try:NEWLINE if pkg_resources.resource_exists(package_or_requirement, clvm_filename):NEWLINE full_path = pathlib.Path(pkg_resources.resource_filename(package_or_requirement, clvm_filename))NEWLINE output = full_path.parent / hex_filenameNEWLINE compile_clvm(full_path, output, search_paths=[full_path.parent])NEWLINE except NotImplementedError:NEWLINE # pyinstaller doesn't support `pkg_resources.resource_exists`NEWLINE # so we just fall through to loading the hex clvmNEWLINE passNEWLINENEWLINE clvm_hex = pkg_resources.resource_string(package_or_requirement, hex_filename).decode("utf8")NEWLINE clvm_blob = bytes.fromhex(clvm_hex)NEWLINE return SerializedProgram.from_bytes(clvm_blob)NEWLINENEWLINENEWLINEdef load_clvm(clvm_filename, package_or_requirement=__name__) -> Program:NEWLINE return Program.from_bytes(bytes(load_serialized_clvm(clvm_filename, package_or_requirement=package_or_requirement)))NEWLINE |
"""NEWLINEtorch.multiprocessing is a wrapper around the native :mod:`multiprocessing`NEWLINEmodule. It registers custom reducers, that use shared memory to provide sharedNEWLINEviews on the same data in different processes. Once the tensor/storage is movedNEWLINEto shared_memory (see :func:`~torch.Tensor.share_memory_`), it will be possibleNEWLINEto send it to other processes without making any copies.NEWLINENEWLINEThe API is 100% compatible with the original module - it's enough to changeNEWLINE``import multiprocessing`` to ``import torch.multiprocessing`` to have all theNEWLINEtensors sent through the queues or shared via other mechanisms, moved to sharedNEWLINEmemory.NEWLINENEWLINEBecause of the similarity of APIs we do not document most of this packageNEWLINEcontents, and we recommend referring to very good docs of the original module.NEWLINE"""NEWLINEimport torchNEWLINEimport sysNEWLINEfrom .reductions import init_reductionsNEWLINEimport multiprocessingNEWLINENEWLINE__all__ = ['set_sharing_strategy', 'get_sharing_strategy',NEWLINE 'get_all_sharing_strategies']NEWLINENEWLINENEWLINEfrom multiprocessing import *NEWLINENEWLINENEWLINE__all__ += multiprocessing.__all__NEWLINENEWLINENEWLINE# This call adds a Linux specific prctl(2) wrapper function to this module.NEWLINE# See https://github.com/pytorch/pytorch/pull/14391 for more information.NEWLINEtorch._C._multiprocessing_init()NEWLINENEWLINENEWLINEif sys.version_info < (3, 3):NEWLINE """Override basic classes in Python 2.7 and Python 3.3 to use ForkingPicklerNEWLINE for serialization. Later versions of Python already use ForkingPickler."""NEWLINE from .queue import Queue, SimpleQueueNEWLINE from .pool import PoolNEWLINENEWLINENEWLINE"""Add helper function to spawn N processes and wait for completion of any ofNEWLINEthem. This depends `mp.get_context` which was added in Python 3.4."""NEWLINEfrom .spawn import spawn, SpawnContext, _supports_context, start_processes, ProcessContextNEWLINENEWLINENEWLINEif sys.platform == 'darwin' or sys.platform == 'win32':NEWLINE _sharing_strategy = 'file_system'NEWLINE _all_sharing_strategies = {'file_system'}NEWLINEelse:NEWLINE _sharing_strategy = 'file_descriptor'NEWLINE _all_sharing_strategies = {'file_descriptor', 'file_system'}NEWLINENEWLINENEWLINEdef set_sharing_strategy(new_strategy):NEWLINE """Sets the strategy for sharing CPU tensors.NEWLINENEWLINE Arguments:NEWLINE new_strategy (str): Name of the selected strategy. Should be one ofNEWLINE the values returned by :func:`get_all_sharing_strategies()`.NEWLINE """NEWLINE global _sharing_strategyNEWLINE assert new_strategy in _all_sharing_strategiesNEWLINE _sharing_strategy = new_strategyNEWLINENEWLINENEWLINEdef get_sharing_strategy():NEWLINE """Returns the current strategy for sharing CPU tensors."""NEWLINE return _sharing_strategyNEWLINENEWLINENEWLINEdef get_all_sharing_strategies():NEWLINE """Returns a set of sharing strategies supported on a current system."""NEWLINE return _all_sharing_strategiesNEWLINENEWLINENEWLINEinit_reductions()NEWLINE |
import argparseNEWLINEimport jsonNEWLINEimport pandas as pdNEWLINEpd.options.display.float_format = '{:,.2f}'.formatNEWLINEimport randomNEWLINEimport numpy as npNEWLINEimport tqdmNEWLINENEWLINEfrom src.sim import SimNEWLINENEWLINEdef run(params):NEWLINE """simulates the investment on the S&P500 index similar toNEWLINE investing on index fundsNEWLINENEWLINE ParametersNEWLINE ----------NEWLINE params : dictNEWLINE contrain the parameters to run the simulationNEWLINE """NEWLINE #load data sourceNEWLINE data = pd.read_csv('./data/sp500.csv')NEWLINENEWLINE #create empty dataframe to store resultsNEWLINE res = pd.DataFrame(NEWLINE columns=['length','mean','median','std','iqr',NEWLINE 'wins','losses','zero','total','wins/losses',NEWLINE 'a_r_mean','a_r_median','a_r_std'])NEWLINE res_all = pd.DataFrame(NEWLINE columns=['len', 'year', 'month',NEWLINE 'gain', 'annualized_returns'])NEWLINE NEWLINE for i_l, length in enumerate(params['lengths']):NEWLINE for i_y, year in enumerate(params['years']):NEWLINE for i_m, month in enumerate(params['months']):NEWLINE try:NEWLINE config={'buy': params['buy'],NEWLINE 'buy_year': year,NEWLINE 'buy_month': month,NEWLINE 'sell_year': year+length,NEWLINE 'sell_month': month,NEWLINE 'dividends': params['dividends'],NEWLINE 'inflation_corrected': False}NEWLINE NEWLINE sim = Sim(config, data)NEWLINE sim.run()NEWLINE # calculates right row to store resultsNEWLINE i_res_all = i_l*len(params['years'])*len(params['months']) + \NEWLINE i_y*len(params['months']) + i_mNEWLINE res_all.at[i_res_all, 'len'] = lengthNEWLINE res_all.at[i_res_all, 'year'] = yearNEWLINE res_all.at[i_res_all, 'month'] = monthNEWLINE res_all.at[i_res_all, 'gain'] = sim.gainNEWLINE res_all.at[i_res_all, 'annualized_returns'] = sim.annualized_returnsNEWLINE except Exception as e:NEWLINE # happes usually when the length goes beyond the data (2021+)NEWLINE print(length, year, month, e)NEWLINE res_all.at[i_res_all, :] = np.nanNEWLINE NEWLINE res.at[i_l, 'length'] = lengthNEWLINE res.at[i_l, 'mean'] = np.mean(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'median'] = np.median(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'std'] = np.std(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'iqr'] = np.quantile(NEWLINE res_all[res_all['len']==length]['gain'], 0.75) - \NEWLINE np.quantile(res_all[res_all['len']==length]['gain'], 0.25)NEWLINE res.at[i_l, 'wins'] = np.sum(res_all[res_all['len']==length]['gain'] > 0)NEWLINE res.at[i_l, 'losses'] = np.sum(res_all[res_all['len']==length]['gain'] < 0)NEWLINE res.at[i_l, 'zero'] = np.sum(res_all[res_all['len']==length]['gain'] == 0)NEWLINE res.at[i_l, 'total'] = res.at[i_l, 'wins'] + res.at[i_l, 'losses'] + res.at[i_l, 'zero']NEWLINE res.at[i_l, 'wins/losses'] = res.at[i_l, 'wins'] / res.at[i_l, 'losses']NEWLINE res.at[i_l, 'a_r_mean'] = np.mean(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res.at[i_l, 'a_r_median'] = np.median(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res.at[i_l, 'a_r_std'] = np.std(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res_all.to_csv(f'./results/res_all_buy_{params["buy"]}_dividends_{params["dividends"]}.csv')NEWLINE res.to_csv(f'./results/res_buy_{params["buy"]}_dividends_{params["dividends"]}.csv')NEWLINENEWLINEif __name__ == '__main__':NEWLINE NEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument("config_file", help="path to config file")NEWLINE args = parser.parse_args()NEWLINE params = json.load(open('./config/'+args.config_file+'.json', 'r'))NEWLINE run(params) |
import osNEWLINEimport matplotlib.pyplot as pltNEWLINEimport numpy as npNEWLINEimport subprocessNEWLINEfrom tempfile import TemporaryDirectoryNEWLINENEWLINESEMIHDP_HOME_DIR = os.path.dirname(os.path.realpath(__file__))NEWLINESEMIHDP_EXEC = os.path.join(SEMIHDP_HOME_DIR, 'build/run_from_file')NEWLINEBASE_CMD = SEMIHDP_EXEC + ' ' + "{0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11}"NEWLINEPARAMS_FILE = os.path.join(SEMIHDP_HOME_DIR, 'semihdp_params.asciipb')NEWLINENEWLINENEWLINEdef run_mcmc_from_files(data_path, dens_grid_path, output_path, seed,NEWLINE niter, nburn, thin, update_c="full"):NEWLINE chainfile = os.path.join(output_path, "chains.recordio")NEWLINE c_file = os.path.join(output_path, "c.txt")NEWLINE latent_vars_file = os.path.join(output_path, "latent_vars.txt")NEWLINE dens_path = os.path.join(output_path, "dens/")NEWLINE os.makedirs(dens_path)NEWLINE cmd = BASE_CMD.format(NEWLINE data_path, PARAMS_FILE, chainfile, c_file,NEWLINE latent_vars_file, dens_grid_path, NEWLINE dens_path, seed, niter, nburn, thin, update_c)NEWLINE NEWLINE cmd = cmd.split(" ")NEWLINE subprocess.call(cmd, cwd=SEMIHDP_HOME_DIR)NEWLINE return NEWLINENEWLINENEWLINEdef load_output(output_path, ngroups):NEWLINE c_file = os.path.join(output_path, "c.txt")NEWLINE latent_vars_file = os.path.join(output_path, "latent_vars.txt")NEWLINE dens_path = os.path.join(output_path, "dens/")NEWLINENEWLINE c = np.loadtxt(c_file, delimiter=",")NEWLINE latent_vars = np.loadtxt(latent_vars_file, delimiter=",")NEWLINE log_dens = []NEWLINE for i in range(ngroups):NEWLINE fname = os.path.join(dens_path, "group_{0}.csv".format(i))NEWLINE log_dens.append(np.loadtxt(fname, delimiter=","))NEWLINE return c, latent_vars, log_dens NEWLINENEWLINENEWLINEdef run_mcmc(data: list, dens_grid: np.array, seed: int,NEWLINE niter=1000, nburn=1000, thin=10, update_c="full"):NEWLINE """NEWLINE Runs the semihpd sampler by calling the executable from a subprocess.NEWLINE ArgumentsNEWLINE ---------NEWLINE data: list of np.arrays, each entry is the data in one of the groupsNEWLINE dens_grid: np.array, the grid on which to evaluate the density of all the groupsNEWLINE seed: int, the seed for the random number generatorNEWLINE niter: int, number of iterations to run the samplerNEWLINE nburn: int, number of burn-in iterations NEWLINE thin: int, thinning factorNEWLINE update_c: str, either "full", "metro_base" or "metro_dist". NEWLINE The update rule for the restourants allocationsNEWLINENEWLINE The sampler will be ran for niter + nburn iterations.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE rest_allocs: np.array, of dimension [num_iter, num_groups]NEWLINE The parameters c_i's for each iterationNEWLINE latent_vars: np.array, of dimension [num_iter, 4] the colums areNEWLINE group_id, datum_id, mean (resp. variance) of the latent variable NEWLINE associated to the observationNEWLINE log_dens: list of np.arrays, each entry is the evaluation of log_density of NEWLINE one of the groups in each of the mcmc iterationsNEWLINE """NEWLINENEWLINE ngroups = len(data)NEWLINE data_ = np.vstack([NEWLINE np.column_stack([np.ones_like(x) * i, x]) for i, x in enumerate(data)])NEWLINE with TemporaryDirectory(prefix=SEMIHDP_HOME_DIR+"/") as tmpdir:NEWLINE data_path = os.path.join(tmpdir, 'data.txt')NEWLINE np.savetxt(data_path, data_, delimiter=",")NEWLINE grid_path = os.path.join(tmpdir, 'grid.txt')NEWLINE np.savetxt(grid_path, dens_grid, delimiter=",")NEWLINENEWLINE run_mcmc_from_files(data_path, grid_path, tmpdir, niter, NEWLINE nburn, thin, update_c)NEWLINENEWLINE return load_output(tmpdir, ngroups)NEWLINENEWLINE NEWLINEif __name__ == "__main__":NEWLINE seed = 132312NEWLINE data = [np.random.normal(0, 1, size=100),NEWLINE np.random.normal(0, 1, size=100)]NEWLINE dens_grid = np.linspace(-5, 5, 100)NEWLINE c, latent_vars, log_dens = run_mcmc(data, dens_grid, seed)NEWLINE plt.plot(dens_grid, np.exp(np.mean(log_dens[0], axis=0)))NEWLINE plt.show()NEWLINENEWLINE |
from os import mkdirNEWLINEfrom os.path import existsNEWLINENEWLINEfrom .dist_tree import DistTreeNEWLINEfrom ...typehint import *NEWLINENEWLINENEWLINEdef main(conf: TConf):NEWLINE """ Create dist-side tree (all empty folders under `dst_root`) """NEWLINE _precheck(conf)NEWLINE NEWLINE # create main folders under dst_root.NEWLINE mkdir(conf['build']['dist_dir'])NEWLINE mkdir(conf['build']['dist_dir'] + '/' + 'build')NEWLINE mkdir(conf['build']['dist_dir'] + '/' + 'lib')NEWLINE mkdir(conf['build']['dist_dir'] + '/' + 'src')NEWLINE mkdir(conf['build']['dist_dir'] + '/' + 'src' + '/' + '.pylauncher_conf')NEWLINE NEWLINE dist_tree = DistTree()NEWLINE NEWLINE """NEWLINE Add to source dirs list:NEWLINE conf:NEWLINE build:NEWLINE + proj_dirNEWLINE + targetNEWLINE + attachmentsNEWLINENEWLINE Do not add to source dirs list:NEWLINE conf:NEWLINE build:NEWLINE - dist_dirNEWLINE - iconNEWLINE - readmeNEWLINE - module_pathsNEWLINE """NEWLINE dist_tree.add_src_dirs(NEWLINE conf['build']['proj_dir'],NEWLINE *(v['file'] for v in conf['build']['launchers'].values()),NEWLINE *(k for k, v in conf['build']['attachments'].items()NEWLINE if v['path'] == ''),NEWLINE )NEWLINE NEWLINE src_root = dist_tree.suggest_src_root()NEWLINE dst_root = conf['build']['dist_dir']NEWLINE print(f'the suggested source root directory is: {src_root}', ':v2')NEWLINE NEWLINE dist_tree.build_dst_dirs(src_root, f'{dst_root}/src')NEWLINE NEWLINE # init global path modelsNEWLINE _init_path_models(src_root, dst_root, conf)NEWLINE NEWLINE return src_root, dst_rootNEWLINENEWLINENEWLINEdef _precheck(conf: TConf):NEWLINE assert not exists(d := conf['build']['dist_dir']), (NEWLINE 'The target distribution directory ({}) already exists, please appoint 'NEWLINE 'another (non-existent) folder to distribute.'.format(d)NEWLINE )NEWLINE NEWLINE paths_not_exist = []NEWLINE for src_path in conf['build']['attachments']:NEWLINE if not exists(src_path):NEWLINE paths_not_exist.append(src_path)NEWLINE if paths_not_exist:NEWLINE print(':l', paths_not_exist)NEWLINE raise FileNotFoundError(NEWLINE 'Please make sure all required paths in `conf["build"]'NEWLINE '["attachments"]` are existed.'NEWLINE )NEWLINE NEWLINE # if conf['build']['venv']['enabled']:NEWLINE # from .embed_python import EmbedPythonManagerNEWLINE # builder = EmbedPythonManager(NEWLINE # pyversion=conf['build']['venv']['python_version']NEWLINE # )NEWLINE # # try to get a valid embed python path, if failed, this method willNEWLINE # # raise an exception to terminate process.NEWLINE # builder.get_embed_python_dir()NEWLINE #NEWLINE # mode = conf['build']['venv']['mode']NEWLINE # if mode == 'source_venv':NEWLINE # if venv_path := conf['build']['venv']['options'][mode]['path']:NEWLINE # if venv_path.startswith(src_path := conf['build']['proj_dir']):NEWLINE # lk.logt('[W2015]', f'''NEWLINE # Please do not put the Python virtual environment folderNEWLINE # in your source code folder! This will make the third-NEWLINE # party libraries to be encrypted, which usually leads toNEWLINE # unpredicatable errors.NEWLINE # You can put venv aside with the source code dir, thisNEWLINE # is the recommended parctice.NEWLINE #NEWLINE # Current venv dir: {venv_path}NEWLINE # Suggest moved to: {ospath.dirname(src_path)}/venvNEWLINE # ''')NEWLINE # if input('Continue the process? (y/n): ').lower() != 'y':NEWLINE # raise SystemExitNEWLINENEWLINENEWLINEdef _init_path_models(src_root, dst_root, conf: TConf):NEWLINE from ...path_model import dst_modelNEWLINE from ...path_model import src_modelNEWLINE from ...path_model import relpathNEWLINE NEWLINE src_model.init(NEWLINE src_root=src_root, prj_root=conf['build']['proj_dir'],NEWLINE readme=conf['build']['readme']NEWLINE )NEWLINE dst_model.init(NEWLINE dst_root=dst_root,NEWLINE prj_relroot=relpath(src_model.prj_root, src_model.src_root),NEWLINE launcher_name=conf['build']['launcher_name'],NEWLINE readme=conf['build']['readme']NEWLINE )NEWLINE |
NEWLINE"""NEWLINEPERIODSNEWLINE"""NEWLINENEWLINEnumPeriods = 60NEWLINENEWLINE"""NEWLINESTOPSNEWLINE"""NEWLINENEWLINEnumStations = 6NEWLINENEWLINEstation_names = (NEWLINE "Hamburg Hbf", # 0NEWLINE "Landwehr", # 1NEWLINE "Hasselbrook", # 2NEWLINE "Wansbeker Chaussee*", # 3NEWLINE "Friedrichsberg*", # 4NEWLINE "Barmbek*", # 5NEWLINE )NEWLINENEWLINEnumStops = 12NEWLINENEWLINEstops_position = (NEWLINE (0, 0), # Stop 0NEWLINE (2, 0), # Stop 1NEWLINE (3, 0), # Stop 2NEWLINE (4, 0), # Stop 3NEWLINE (5, 0), # Stop 4NEWLINE (7, 0), # Stop 5NEWLINE (7, 1), # Stop 6NEWLINE (15, 1), # Stop 7NEWLINE (13, 1), # Stop 8NEWLINE (12, 1), # Stop 9NEWLINE (11, 1), # Stop 10NEWLINE (10, 1), # Stop 11NEWLINE )NEWLINENEWLINEstops_distance = (NEWLINE (0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 0NEWLINE (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 1NEWLINE (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 2NEWLINE (0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0), # Stop 3NEWLINE (0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0), # Stop 4NEWLINE (0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0), # Stop 5NEWLINE (0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0), # Stop 6NEWLINE (0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0), # Stop 7NEWLINE (0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0), # Stop 8NEWLINE (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0), # Stop 9NEWLINE (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2), # Stop 10NEWLINE (1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # Stop 11NEWLINE )NEWLINENEWLINEstation_start = 0NEWLINENEWLINE"""NEWLINETRAMSNEWLINE"""NEWLINENEWLINEnumTrams = 6NEWLINENEWLINEtram_capacity = 514NEWLINENEWLINEtram_capacity_cargo = 304NEWLINENEWLINEtram_capacity_min_passenger = 208NEWLINENEWLINEtram_capacity_min_cargo = 0NEWLINENEWLINEtram_speed = 1NEWLINENEWLINEtram_headway = 1NEWLINENEWLINEtram_min_service = 1NEWLINENEWLINEtram_max_service = 10NEWLINENEWLINEmin_time_next_tram = 0.333NEWLINENEWLINEtram_travel_deviation = 0.167NEWLINENEWLINE"""NEWLINEPASSENGERSNEWLINE"""NEWLINENEWLINEpassenger_set = "pas-20210421-2109-int6000000000000001e-1"NEWLINENEWLINEpassenger_service_time_board = 0.0145NEWLINENEWLINEpassenger_service_time_alight = 0.0145NEWLINENEWLINE"""NEWLINECARGONEWLINE"""NEWLINENEWLINEnumCargo = 50NEWLINENEWLINEcargo_size = 4NEWLINENEWLINEcargo_station_destination = (NEWLINE 4, # 0NEWLINE 4, # 1NEWLINE 4, # 2NEWLINE 4, # 3NEWLINE 3, # 4NEWLINE 3, # 5NEWLINE 4, # 6NEWLINE 4, # 7NEWLINE 3, # 8NEWLINE 3, # 9NEWLINE 5, # 10NEWLINE 4, # 11NEWLINE 5, # 12NEWLINE 4, # 13NEWLINE 4, # 14NEWLINE 5, # 15NEWLINE 3, # 16NEWLINE 5, # 17NEWLINE 3, # 18NEWLINE 4, # 19NEWLINE 4, # 20NEWLINE 3, # 21NEWLINE 5, # 22NEWLINE 5, # 23NEWLINE 4, # 24NEWLINE 3, # 25NEWLINE 4, # 26NEWLINE 5, # 27NEWLINE 4, # 28NEWLINE 3, # 29NEWLINE 3, # 30NEWLINE 5, # 31NEWLINE 5, # 32NEWLINE 5, # 33NEWLINE 5, # 34NEWLINE 3, # 35NEWLINE 3, # 36NEWLINE 3, # 37NEWLINE 3, # 38NEWLINE 5, # 39NEWLINE 3, # 40NEWLINE 4, # 41NEWLINE 3, # 42NEWLINE 5, # 43NEWLINE 4, # 44NEWLINE 5, # 45NEWLINE 5, # 46NEWLINE 4, # 47NEWLINE 5, # 48NEWLINE 3, # 49NEWLINE )NEWLINENEWLINEcargo_release = (NEWLINE 0, # 0NEWLINE 0, # 1NEWLINE 0, # 2NEWLINE 1, # 3NEWLINE 1, # 4NEWLINE 1, # 5NEWLINE 1, # 6NEWLINE 1, # 7NEWLINE 1, # 8NEWLINE 1, # 9NEWLINE 2, # 10NEWLINE 2, # 11NEWLINE 2, # 12NEWLINE 2, # 13NEWLINE 2, # 14NEWLINE 3, # 15NEWLINE 3, # 16NEWLINE 4, # 17NEWLINE 4, # 18NEWLINE 4, # 19NEWLINE 5, # 20NEWLINE 5, # 21NEWLINE 5, # 22NEWLINE 5, # 23NEWLINE 6, # 24NEWLINE 6, # 25NEWLINE 6, # 26NEWLINE 6, # 27NEWLINE 7, # 28NEWLINE 7, # 29NEWLINE 7, # 30NEWLINE 7, # 31NEWLINE 7, # 32NEWLINE 9, # 33NEWLINE 9, # 34NEWLINE 9, # 35NEWLINE 9, # 36NEWLINE 9, # 37NEWLINE 9, # 38NEWLINE 9, # 39NEWLINE 10, # 40NEWLINE 10, # 41NEWLINE 10, # 42NEWLINE 10, # 43NEWLINE 11, # 44NEWLINE 11, # 45NEWLINE 11, # 46NEWLINE 11, # 47NEWLINE 11, # 48NEWLINE 11, # 49NEWLINE )NEWLINENEWLINEcargo_station_deadline = (NEWLINE 24, # 0NEWLINE 14, # 1NEWLINE 11, # 2NEWLINE 36, # 3NEWLINE 17, # 4NEWLINE 11, # 5NEWLINE 41, # 6NEWLINE 43, # 7NEWLINE 19, # 8NEWLINE 49, # 9NEWLINE 46, # 10NEWLINE 39, # 11NEWLINE 49, # 12NEWLINE 46, # 13NEWLINE 58, # 14NEWLINE 13, # 15NEWLINE 35, # 16NEWLINE 45, # 17NEWLINE 19, # 18NEWLINE 14, # 19NEWLINE 29, # 20NEWLINE 48, # 21NEWLINE 44, # 22NEWLINE 22, # 23NEWLINE 16, # 24NEWLINE 16, # 25NEWLINE 46, # 26NEWLINE 40, # 27NEWLINE 29, # 28NEWLINE 17, # 29NEWLINE 25, # 30NEWLINE 17, # 31NEWLINE 50, # 32NEWLINE 56, # 33NEWLINE 32, # 34NEWLINE 37, # 35NEWLINE 33, # 36NEWLINE 39, # 37NEWLINE 19, # 38NEWLINE 19, # 39NEWLINE 20, # 40NEWLINE 20, # 41NEWLINE 57, # 42NEWLINE 57, # 43NEWLINE 22, # 44NEWLINE 56, # 45NEWLINE 21, # 46NEWLINE 21, # 47NEWLINE 21, # 48NEWLINE 51, # 49NEWLINE )NEWLINENEWLINEcargo_max_delay = 3NEWLINENEWLINEcargo_service_time_load = 0.3333333333333333NEWLINENEWLINEcargo_service_time_unload = 0.25NEWLINENEWLINE"""NEWLINEparameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.htmlNEWLINE"""NEWLINENEWLINE#initial entropyNEWLINEentropy = 258194110137029475889902652135037600173NEWLINENEWLINE#index for seed sequence childNEWLINEchild_seed_index = (NEWLINE 0, # 0NEWLINE )NEWLINENEWLINE"""NEWLINEResults from timetablingNEWLINE"""NEWLINENEWLINEscheme = "SV"NEWLINENEWLINEmethod = "timetabling_benchmark"NEWLINENEWLINEpassengerData = "0-rep"NEWLINENEWLINEdownstream_cargo = FalseNEWLINENEWLINEdelivery_optional = TrueNEWLINENEWLINEassignment_method = "timetabling_benchmark"NEWLINENEWLINEoperating = (NEWLINE True, # 0NEWLINE True, # 1NEWLINE True, # 2NEWLINE True, # 3NEWLINE True, # 4NEWLINE True, # 5NEWLINE )NEWLINENEWLINEtram_tour = (NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 0NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 1NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 2NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 3NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 4NEWLINE (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), # 5NEWLINE )NEWLINENEWLINEtram_time_arrival = (NEWLINE (2, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 37), # 0NEWLINE (6, 10, 13, 16, 19, 22, 25, 28, 31, 34, 37, 41), # 1NEWLINE (10, 14, 17, 20, 23, 26, 29, 32, 35, 38, 41, 45), # 2NEWLINE (14, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 49), # 3NEWLINE (18, 22, 25, 28, 31, 34, 37, 40, 43, 46, 49, 53), # 4NEWLINE (22, 26, 29, 32, 35, 38, 41, 44, 47, 50, 53, 57), # 5NEWLINE )NEWLINENEWLINEtram_time_departure = (NEWLINE (4, 8, 11, 14, 17, 20, 23, 26, 29, 32, 35, 39), # 0NEWLINE (8, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 43), # 1NEWLINE (12, 16, 19, 22, 25, 28, 31, 34, 37, 40, 43, 47), # 2NEWLINE (16, 20, 23, 26, 29, 32, 35, 38, 41, 44, 47, 51), # 3NEWLINE (20, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51, 55), # 4NEWLINE (24, 28, 31, 34, 37, 40, 43, 46, 49, 52, 55, 59), # 5NEWLINE )NEWLINENEWLINEcargo_tram_assignment = (NEWLINE 0, # 0NEWLINE 0, # 1NEWLINE 0, # 2NEWLINE 0, # 3NEWLINE 0, # 4NEWLINE 0, # 5NEWLINE 0, # 6NEWLINE 0, # 7NEWLINE 0, # 8NEWLINE 0, # 9NEWLINE 0, # 10NEWLINE 0, # 11NEWLINE 0, # 12NEWLINE 0, # 13NEWLINE 0, # 14NEWLINE 0, # 15NEWLINE 0, # 16NEWLINE 1, # 17NEWLINE 1, # 18NEWLINE 1, # 19NEWLINE 1, # 20NEWLINE 1, # 21NEWLINE 1, # 22NEWLINE 1, # 23NEWLINE 1, # 24NEWLINE 1, # 25NEWLINE 1, # 26NEWLINE 1, # 27NEWLINE 1, # 28NEWLINE 1, # 29NEWLINE 1, # 30NEWLINE 1, # 31NEWLINE 1, # 32NEWLINE 2, # 33NEWLINE 2, # 34NEWLINE 2, # 35NEWLINE 2, # 36NEWLINE 2, # 37NEWLINE 2, # 38NEWLINE 2, # 39NEWLINE 2, # 40NEWLINE 2, # 41NEWLINE 2, # 42NEWLINE 2, # 43NEWLINE 2, # 44NEWLINE 2, # 45NEWLINE 2, # 46NEWLINE 2, # 47NEWLINE 2, # 48NEWLINE 2, # 49NEWLINE )NEWLINE |
#!/usr/bin/env/ pythonNEWLINEprint ("Hola mundo")NEWLINENEWLINE# TIPOS DE DATOSNEWLINENEWLINE# Esto e unha cadeaNEWLINEc = "Hola mundo"NEWLINENEWLINE# Esto e un enteiroNEWLINEe = 23NEWLINENEWLINE# Esto e un longNEWLINElong = 23NEWLINENEWLINE# Numero en octalNEWLINEoctal = 0o27NEWLINENEWLINE# Numero en HexadecimalNEWLINEhexDecimal = 0x3452334NEWLINENEWLINE# Numero con decimalesNEWLINEreal = 23.334223NEWLINENEWLINE# Numero con decimales en notacion cientificaNEWLINEcientifico = 0.1e-3NEWLINENEWLINE# Podese comprobar coa funcion typeNEWLINEprint(type(c))NEWLINEprint(type(e))NEWLINEprint(type(long))NEWLINEprint(octal)NEWLINEprint(hexDecimal)NEWLINENEWLINENEWLINE |
# Third-partyNEWLINEimport astropy.units as uNEWLINENEWLINENEWLINEdef quantity_from_hdf5(dset):NEWLINE """NEWLINE Return an Astropy Quantity object from a key in an HDF5 file,NEWLINE group, or dataset. This checks to see if the input file/group/datasetNEWLINE contains a ``'unit'`` attribute (e.g., in `f.attrs`).NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE dset : :class:`h5py.DataSet`NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE q : `astropy.units.Quantity`, `numpy.ndarray`NEWLINE If a unit attribute exists, this returns a Quantity. Otherwise, itNEWLINE returns a numpy array.NEWLINE """NEWLINE if 'unit' in dset.attrs and dset.attrs['unit'] is not None:NEWLINE unit = u.Unit(dset.attrs['unit'])NEWLINE else:NEWLINE unit = 1.NEWLINENEWLINE return dset[:] * unitNEWLINENEWLINENEWLINEdef quantity_to_hdf5(f, key, q):NEWLINE """NEWLINE Turn an Astropy Quantity object into something we can write out toNEWLINE an HDF5 file.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE f : :class:`h5py.File`, :class:`h5py.Group`, :class:`h5py.DataSet`NEWLINE key : strNEWLINE The name.NEWLINE q : float, `astropy.units.Quantity`NEWLINE The quantity.NEWLINENEWLINE """NEWLINENEWLINE if hasattr(q, 'unit'):NEWLINE f[key] = q.valueNEWLINE f[key].attrs['unit'] = str(q.unit)NEWLINENEWLINE else:NEWLINE f[key] = qNEWLINE f[key].attrs['unit'] = ""NEWLINE |
# Twitter AUTH:NEWLINEAPP_KEY = 'APP_KEY_HERE' NEWLINEAPP_SECRET = 'APP_SECRET_HERE' NEWLINEOAUTH_TOKEN = 'TOKEN_HERE'NEWLINEOAUTH_TOKEN_SECRET = 'TOKEN_SECRET_HERE'NEWLINENEWLINE# Telegram options:NEWLINETELEGRAM_CHANNEL = 'CHANNEL_NAME_HERE'NEWLINETELEGRAM_TOKEN = 'TOKEN_HERE'NEWLINENEWLINE# Misc:NEWLINETWITTER_USER_NAME = 'USER_NAME_HERE'NEWLINEMSG = '<b>{NAME}</b>:\n{TEXT}\n\n<a href="{URL}">Source</a>'NEWLINENEWLINE# Technical stuff:NEWLINETWEET_BASE_URL = 'https://twitter.com/i/web/status/'NEWLINESTATE_FILE = 'state.p'NEWLINESLEEP = 3NEWLINETG_LINK = 'https://api.telegram.org/bot{TOKEN}/sendMessage?chat_id=@{CHANNEL}&text={MESSAGE}&parse_mode=html'NEWLINEUNSUPPORTED_TAGS = ['<span class="twython-tweet-suffix">', '<span class="twython-tweet-prefix">', '</span>', 'class="twython-url"', 'class="twython-media"', 'class="twython-mention"', 'class="twython-hashtag"', 'class="twython-symbol"', ]NEWLINE |
# Copyright (c) 2017-present, Facebook, Inc.NEWLINE# All rights reserved.NEWLINE#NEWLINE# This source code is licensed under the license found in the LICENSE file inNEWLINE# the root directory of this source tree. An additional grant of patent rightsNEWLINE# can be found in the PATENTS file in the same directory.NEWLINENEWLINEfrom .dictionary import Dictionary, TruncatedDictionaryNEWLINEfrom .fairseq_dataset import FairseqDatasetNEWLINEfrom .backtranslation_dataset import BacktranslationDatasetNEWLINEfrom .concat_dataset import ConcatDatasetNEWLINEfrom .indexed_dataset import IndexedCachedDataset, IndexedDataset, IndexedRawTextDatasetNEWLINEfrom .language_pair_dataset import LanguagePairDataset, LanguagePairDatasetWithIndexNEWLINEfrom .lm_context_window_dataset import LMContextWindowDatasetNEWLINEfrom .monolingual_dataset import MonolingualDatasetNEWLINEfrom .noising import NoisingDatasetNEWLINEfrom .round_robin_zip_datasets import RoundRobinZipDatasetsNEWLINEfrom .token_block_dataset import TokenBlockDatasetNEWLINEfrom .transform_eos_dataset import TransformEosDatasetNEWLINEfrom .transform_eos_lang_pair_dataset import TransformEosLangPairDatasetNEWLINENEWLINEfrom .iterators import (NEWLINE CountingIterator,NEWLINE EpochBatchIterator,NEWLINE GroupedIterator,NEWLINE ShardedIterator,NEWLINE)NEWLINENEWLINE__all__ = [NEWLINE 'BacktranslationDataset',NEWLINE 'ConcatDataset',NEWLINE 'CountingIterator',NEWLINE 'Dictionary',NEWLINE 'EpochBatchIterator',NEWLINE 'FairseqDataset',NEWLINE 'GroupedIterator',NEWLINE 'IndexedCachedDataset',NEWLINE 'IndexedDataset',NEWLINE 'IndexedRawTextDataset',NEWLINE 'LanguagePairDataset',NEWLINE 'LanguagePairDatasetWithIndex'NEWLINE 'LMContextWindowDataset',NEWLINE 'MonolingualDataset',NEWLINE 'NoisingDataset',NEWLINE 'RoundRobinZipDatasets',NEWLINE 'ShardedIterator',NEWLINE 'TokenBlockDataset',NEWLINE 'TransformEosDataset',NEWLINE 'TransformEosLangPairDataset',NEWLINE]NEWLINE |
# encoding: utf-8NEWLINE"""Event loop integration for the ZeroMQ-based kernels."""NEWLINENEWLINE# Copyright (c) IPython Development Team.NEWLINE# Distributed under the terms of the Modified BSD License.NEWLINENEWLINEfrom functools import partialNEWLINEimport osNEWLINEimport sysNEWLINEimport platformNEWLINENEWLINEimport zmqNEWLINENEWLINEfrom distutils.version import LooseVersion as VNEWLINEfrom traitlets.config.application import ApplicationNEWLINENEWLINENEWLINEdef _use_appnope():NEWLINE """Should we use appnope for dealing with OS X app nap?NEWLINENEWLINE Checks if we are on OS X 10.9 or greater.NEWLINE """NEWLINE return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9')NEWLINENEWLINENEWLINEdef _notify_stream_qt(kernel, stream):NEWLINENEWLINE from IPython.external.qt_for_kernel import QtCoreNEWLINENEWLINE def process_stream_events():NEWLINE """fall back to main loop when there's a socket event"""NEWLINE # call flush to ensure that the stream doesn't lose eventsNEWLINE # due to our consuming of the edge-triggered FDNEWLINE # flush returns the number of events consumed.NEWLINE # if there were any, wake it upNEWLINE if stream.flush(limit=1):NEWLINE notifier.setEnabled(False)NEWLINE kernel.app.quit()NEWLINENEWLINE fd = stream.getsockopt(zmq.FD)NEWLINE notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)NEWLINE notifier.activated.connect(process_stream_events)NEWLINE # there may already be unprocessed events waiting.NEWLINE # these events will not wake zmq's edge-triggered FDNEWLINE # since edge-triggered notification only occurs on new i/o activity.NEWLINE # process all the waiting events immediatelyNEWLINE # so we start in a clean state ensuring that any new i/o events will notify.NEWLINE # schedule first call on the eventloop as soon as it's running,NEWLINE # so we don't block here processing eventsNEWLINE timer = QtCore.QTimer(kernel.app)NEWLINE timer.setSingleShot(True)NEWLINE timer.timeout.connect(process_stream_events)NEWLINE timer.start(0)NEWLINENEWLINE# mapping of keys to loop functionsNEWLINEloop_map = {NEWLINE 'inline': None,NEWLINE 'nbagg': None,NEWLINE 'notebook': None,NEWLINE 'ipympl': None,NEWLINE 'widget': None,NEWLINE None: None,NEWLINE}NEWLINENEWLINEdef register_integration(*toolkitnames):NEWLINE """Decorator to register an event loop to integrate with the IPython kernelNEWLINENEWLINE The decorator takes names to register the event loop as for the %gui magic.NEWLINE You can provide alternative names for the same toolkit.NEWLINENEWLINE The decorated function should take a single argument, the IPython kernelNEWLINE instance, arrange for the event loop to call ``kernel.do_one_iteration()``NEWLINE at least every ``kernel._poll_interval`` seconds, and start the event loop.NEWLINENEWLINE :mod:`ipykernel.eventloops` provides and registers such functionsNEWLINE for a few common event loops.NEWLINE """NEWLINE def decorator(func):NEWLINE for name in toolkitnames:NEWLINE loop_map[name] = funcNEWLINENEWLINE func.exit_hook = lambda kernel: NoneNEWLINENEWLINE def exit_decorator(exit_func):NEWLINE """@func.exit is now a decoratorNEWLINENEWLINE to register a function to be called on exitNEWLINE """NEWLINE func.exit_hook = exit_funcNEWLINE return exit_funcNEWLINENEWLINE func.exit = exit_decoratorNEWLINE return funcNEWLINENEWLINE return decoratorNEWLINENEWLINENEWLINEdef _loop_qt(app):NEWLINE """Inner-loop for running the Qt eventloopNEWLINENEWLINE Pulled from guisupport.start_event_loop in IPython < 5.2,NEWLINE since IPython 5.2 only checks `get_ipython().active_eventloop` is defined,NEWLINE rather than if the eventloop is actually running.NEWLINE """NEWLINE app._in_event_loop = TrueNEWLINE app.exec_()NEWLINE app._in_event_loop = FalseNEWLINENEWLINENEWLINE@register_integration('qt4')NEWLINEdef loop_qt4(kernel):NEWLINE """Start a kernel with PyQt4 event loop integration."""NEWLINENEWLINE from IPython.lib.guisupport import get_app_qt4NEWLINENEWLINE kernel.app = get_app_qt4([" "])NEWLINE kernel.app.setQuitOnLastWindowClosed(False)NEWLINENEWLINE # Only register the eventloop for the shell stream because doingNEWLINE # it for the control stream is generating a bunch of unnecessaryNEWLINE # warnings on Windows.NEWLINE _notify_stream_qt(kernel, kernel.shell_streams[0])NEWLINENEWLINE _loop_qt(kernel.app)NEWLINENEWLINENEWLINE@register_integration('qt', 'qt5')NEWLINEdef loop_qt5(kernel):NEWLINE """Start a kernel with PyQt5 event loop integration."""NEWLINE os.environ['QT_API'] = 'pyqt5'NEWLINE return loop_qt4(kernel)NEWLINENEWLINENEWLINE# exit and watch are the same for qt 4 and 5NEWLINE@loop_qt4.exitNEWLINE@loop_qt5.exitNEWLINEdef loop_qt_exit(kernel):NEWLINE kernel.app.exit()NEWLINENEWLINENEWLINEdef _loop_wx(app):NEWLINE """Inner-loop for running the Wx eventloopNEWLINENEWLINE Pulled from guisupport.start_event_loop in IPython < 5.2,NEWLINE since IPython 5.2 only checks `get_ipython().active_eventloop` is defined,NEWLINE rather than if the eventloop is actually running.NEWLINE """NEWLINE app._in_event_loop = TrueNEWLINE app.MainLoop()NEWLINE app._in_event_loop = FalseNEWLINENEWLINENEWLINE@register_integration('wx')NEWLINEdef loop_wx(kernel):NEWLINE """Start a kernel with wx event loop support."""NEWLINENEWLINE import wxNEWLINENEWLINE # Wx uses millisecondsNEWLINE poll_interval = int(1000 * kernel._poll_interval)NEWLINENEWLINE def wake():NEWLINE """wake from wx"""NEWLINE for stream in kernel.shell_streams:NEWLINE if stream.flush(limit=1):NEWLINE kernel.app.ExitMainLoop()NEWLINE returnNEWLINENEWLINE # We have to put the wx.Timer in a wx.Frame for it to fire properly.NEWLINE # We make the Frame hidden when we create it in the main app below.NEWLINE class TimerFrame(wx.Frame):NEWLINE def __init__(self, func):NEWLINE wx.Frame.__init__(self, None, -1)NEWLINE self.timer = wx.Timer(self)NEWLINE # Units for the timer are in millisecondsNEWLINE self.timer.Start(poll_interval)NEWLINE self.Bind(wx.EVT_TIMER, self.on_timer)NEWLINE self.func = funcNEWLINENEWLINE def on_timer(self, event):NEWLINE self.func()NEWLINENEWLINE # We need a custom wx.App to create our Frame subclass that has theNEWLINE # wx.Timer to defer back to the tornado event loop.NEWLINE class IPWxApp(wx.App):NEWLINE def OnInit(self):NEWLINE self.frame = TimerFrame(wake)NEWLINE self.frame.Show(False)NEWLINE return TrueNEWLINENEWLINE # The redirect=False here makes sure that wx doesn't replaceNEWLINE # sys.stdout/stderr with its own classes.NEWLINE if not (NEWLINE getattr(kernel, 'app', None)NEWLINE and isinstance(kernel.app, wx.App)NEWLINE ):NEWLINE kernel.app = IPWxApp(redirect=False)NEWLINENEWLINE # The import of wx on Linux sets the handler for signal.SIGINTNEWLINE # to 0. This is a bug in wx or gtk. We fix by just setting itNEWLINE # back to the Python default.NEWLINE import signalNEWLINE if not callable(signal.getsignal(signal.SIGINT)):NEWLINE signal.signal(signal.SIGINT, signal.default_int_handler)NEWLINENEWLINE _loop_wx(kernel.app)NEWLINENEWLINENEWLINE@loop_wx.exitNEWLINEdef loop_wx_exit(kernel):NEWLINE import wxNEWLINE wx.Exit()NEWLINENEWLINENEWLINE@register_integration('tk')NEWLINEdef loop_tk(kernel):NEWLINE """Start a kernel with the Tk event loop."""NEWLINENEWLINE from tkinter import Tk, READABLENEWLINENEWLINE app = Tk()NEWLINE # Capability detection:NEWLINE # per https://docs.python.org/3/library/tkinter.html#file-handlersNEWLINE # file handlers are not available on WindowsNEWLINE if hasattr(app, 'createfilehandler'):NEWLINE # A basic wrapper for structural similarity with the Windows versionNEWLINE class BasicAppWrapper(object):NEWLINE def __init__(self, app):NEWLINE self.app = appNEWLINE self.app.withdraw()NEWLINENEWLINE def process_stream_events(stream, *a, **kw):NEWLINE """fall back to main loop when there's a socket event"""NEWLINE if stream.flush(limit=1):NEWLINE app.tk.deletefilehandler(stream.getsockopt(zmq.FD))NEWLINE app.quit()NEWLINENEWLINE # For Tkinter, we create a Tk object and call its withdraw method.NEWLINE kernel.app_wrapper = BasicAppWrapper(app)NEWLINENEWLINE for stream in kernel.shell_streams:NEWLINE notifier = partial(process_stream_events, stream)NEWLINE # seems to be needed for tkNEWLINE notifier.__name__ = "notifier"NEWLINE app.tk.createfilehandler(stream.getsockopt(zmq.FD), READABLE, notifier)NEWLINE # schedule initial call after startNEWLINE app.after(0, notifier)NEWLINENEWLINE app.mainloop()NEWLINENEWLINE else:NEWLINE doi = kernel.do_one_iterationNEWLINE # Tk uses millisecondsNEWLINE poll_interval = int(1000 * kernel._poll_interval)NEWLINENEWLINE class TimedAppWrapper(object):NEWLINE def __init__(self, app, func):NEWLINE self.app = appNEWLINE self.app.withdraw()NEWLINE self.func = funcNEWLINENEWLINE def on_timer(self):NEWLINE self.func()NEWLINE self.app.after(poll_interval, self.on_timer)NEWLINENEWLINE def start(self):NEWLINE self.on_timer() # Call it once to get things going.NEWLINE self.app.mainloop()NEWLINENEWLINE kernel.app_wrapper = TimedAppWrapper(app, doi)NEWLINE kernel.app_wrapper.start()NEWLINENEWLINENEWLINE@loop_tk.exitNEWLINEdef loop_tk_exit(kernel):NEWLINE kernel.app_wrapper.app.destroy()NEWLINENEWLINENEWLINE@register_integration('gtk')NEWLINEdef loop_gtk(kernel):NEWLINE """Start the kernel, coordinating with the GTK event loop"""NEWLINE from .gui.gtkembed import GTKEmbedNEWLINENEWLINE gtk_kernel = GTKEmbed(kernel)NEWLINE gtk_kernel.start()NEWLINE kernel._gtk = gtk_kernelNEWLINENEWLINENEWLINE@loop_gtk.exitNEWLINEdef loop_gtk_exit(kernel):NEWLINE kernel._gtk.stop()NEWLINENEWLINENEWLINE@register_integration('gtk3')NEWLINEdef loop_gtk3(kernel):NEWLINE """Start the kernel, coordinating with the GTK event loop"""NEWLINE from .gui.gtk3embed import GTKEmbedNEWLINENEWLINE gtk_kernel = GTKEmbed(kernel)NEWLINE gtk_kernel.start()NEWLINE kernel._gtk = gtk_kernelNEWLINENEWLINENEWLINE@loop_gtk3.exitNEWLINEdef loop_gtk3_exit(kernel):NEWLINE kernel._gtk.stop()NEWLINENEWLINENEWLINE@register_integration('osx')NEWLINEdef loop_cocoa(kernel):NEWLINE """Start the kernel, coordinating with the Cocoa CFRunLoop event loopNEWLINE via the matplotlib MacOSX backend.NEWLINE """NEWLINE from ._eventloop_macos import mainloop, stopNEWLINENEWLINE real_excepthook = sys.excepthookNEWLINE def handle_int(etype, value, tb):NEWLINE """don't let KeyboardInterrupts look like crashes"""NEWLINE # wake the eventloop when we get a signalNEWLINE stop()NEWLINE if etype is KeyboardInterrupt:NEWLINE print("KeyboardInterrupt caught in CFRunLoop", file=sys.__stdout__)NEWLINE else:NEWLINE real_excepthook(etype, value, tb)NEWLINENEWLINE while not kernel.shell.exit_now:NEWLINE try:NEWLINE # double nested try/except, to properly catch KeyboardInterruptNEWLINE # due to pyzmq Issue #130NEWLINE try:NEWLINE # don't let interrupts during mainloop invoke crash_handler:NEWLINE sys.excepthook = handle_intNEWLINE mainloop(kernel._poll_interval)NEWLINE for stream in kernel.shell_streams:NEWLINE if stream.flush(limit=1):NEWLINE # events to process, return control to kernelNEWLINE returnNEWLINE except:NEWLINE raiseNEWLINE except KeyboardInterrupt:NEWLINE # Ctrl-C shouldn't crash the kernelNEWLINE print("KeyboardInterrupt caught in kernel", file=sys.__stdout__)NEWLINE finally:NEWLINE # ensure excepthook is restoredNEWLINE sys.excepthook = real_excepthookNEWLINENEWLINENEWLINE@loop_cocoa.exitNEWLINEdef loop_cocoa_exit(kernel):NEWLINE from ._eventloop_macos import stopNEWLINE stop()NEWLINENEWLINENEWLINE@register_integration('asyncio')NEWLINEdef loop_asyncio(kernel):NEWLINE '''Start a kernel with asyncio event loop support.'''NEWLINE import asyncioNEWLINE loop = asyncio.get_event_loop()NEWLINE # loop is already running (e.g. tornado 5), nothing left to doNEWLINE if loop.is_running():NEWLINE returnNEWLINENEWLINE if loop.is_closed():NEWLINE # main loop is closed, create a new oneNEWLINE loop = asyncio.new_event_loop()NEWLINE asyncio.set_event_loop(loop)NEWLINE loop._should_close = FalseNEWLINENEWLINE # pause eventloop when there's an event on a zmq socketNEWLINE def process_stream_events(stream):NEWLINE """fall back to main loop when there's a socket event"""NEWLINE if stream.flush(limit=1):NEWLINE loop.stop()NEWLINENEWLINE for stream in kernel.shell_streams:NEWLINE fd = stream.getsockopt(zmq.FD)NEWLINE notifier = partial(process_stream_events, stream)NEWLINE loop.add_reader(fd, notifier)NEWLINE loop.call_soon(notifier)NEWLINENEWLINE while True:NEWLINE error = NoneNEWLINE try:NEWLINE loop.run_forever()NEWLINE except KeyboardInterrupt:NEWLINE continueNEWLINE except Exception as e:NEWLINE error = eNEWLINE if loop._should_close:NEWLINE loop.close()NEWLINE if error is not None:NEWLINE raise errorNEWLINE breakNEWLINENEWLINENEWLINE@loop_asyncio.exitNEWLINEdef loop_asyncio_exit(kernel):NEWLINE """Exit hook for asyncio"""NEWLINE import asyncioNEWLINE loop = asyncio.get_event_loop()NEWLINENEWLINE @asyncio.coroutineNEWLINE def close_loop():NEWLINE if hasattr(loop, 'shutdown_asyncgens'):NEWLINE yield from loop.shutdown_asyncgens()NEWLINE loop._should_close = TrueNEWLINE loop.stop()NEWLINENEWLINE if loop.is_running():NEWLINE close_loop()NEWLINENEWLINE elif not loop.is_closed():NEWLINE loop.run_until_complete(close_loop)NEWLINE loop.close()NEWLINENEWLINENEWLINEdef enable_gui(gui, kernel=None):NEWLINE """Enable integration with a given GUI"""NEWLINE if gui not in loop_map:NEWLINE e = "Invalid GUI request %r, valid ones are:%s" % (gui, loop_map.keys())NEWLINE raise ValueError(e)NEWLINE if kernel is None:NEWLINE if Application.initialized():NEWLINE kernel = getattr(Application.instance(), 'kernel', None)NEWLINE if kernel is None:NEWLINE raise RuntimeError("You didn't specify a kernel,"NEWLINE " and no IPython Application with a kernel appears to be running."NEWLINE )NEWLINE loop = loop_map[gui]NEWLINE if loop and kernel.eventloop is not None and kernel.eventloop is not loop:NEWLINE raise RuntimeError("Cannot activate multiple GUI eventloops")NEWLINE kernel.eventloop = loopNEWLINE |
from __future__ import divisionNEWLINENEWLINEimport numpy as npNEWLINEfrom skimage.util.dtype import dtype_rangeNEWLINEfrom skimage import drawNEWLINEfrom skimage import measureNEWLINENEWLINEfrom .plotplugin import PlotPluginNEWLINEfrom ..canvastools import ThickLineToolNEWLINENEWLINENEWLINE__all__ = ['LineProfile']NEWLINENEWLINENEWLINEclass LineProfile(PlotPlugin):NEWLINE """Plugin to compute interpolated intensity under a scan line on an image.NEWLINENEWLINE See PlotPlugin and Plugin classes for additional details.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE maxdist : floatNEWLINE Maximum pixel distance allowed when selecting end point of scan line.NEWLINE limits : tuple or {None, 'image', 'dtype'}NEWLINE (minimum, maximum) intensity limits for plotted profile. The followingNEWLINE special values are defined:NEWLINENEWLINE None : rescale based on min/max intensity along selected scan line.NEWLINE 'image' : fixed scale based on min/max intensity in image.NEWLINE 'dtype' : fixed scale based on min/max intensity of image dtype.NEWLINE """NEWLINE name = 'Line Profile'NEWLINENEWLINE def __init__(self, maxdist=10, epsilon='deprecated',NEWLINE limits='image', **kwargs):NEWLINE super(LineProfile, self).__init__(**kwargs)NEWLINE self.maxdist = maxdistNEWLINE self._limit_type = limitsNEWLINE print(self.help())NEWLINENEWLINE def attach(self, image_viewer):NEWLINE super(LineProfile, self).attach(image_viewer)NEWLINENEWLINE image = image_viewer.original_imageNEWLINENEWLINE if self._limit_type == 'image':NEWLINE self.limits = (np.min(image), np.max(image))NEWLINE elif self._limit_type == 'dtype':NEWLINE self._limit_type = dtype_range[image.dtype.type]NEWLINE elif self._limit_type is None or len(self._limit_type) == 2:NEWLINE self.limits = self._limit_typeNEWLINE else:NEWLINE raise ValueError("Unrecognized `limits`: %s" % self._limit_type)NEWLINENEWLINE if not self._limit_type is None:NEWLINE self.ax.set_ylim(self.limits)NEWLINENEWLINE h, w = image.shape[0:2]NEWLINE x = [w / 3, 2 * w / 3]NEWLINE y = [h / 2] * 2NEWLINENEWLINE self.line_tool = ThickLineTool(self.image_viewer.ax,NEWLINE maxdist=self.maxdist,NEWLINE on_move=self.line_changed,NEWLINE on_change=self.line_changed)NEWLINE self.line_tool.end_points = np.transpose([x, y])NEWLINENEWLINE scan_data = measure.profile_line(image, NEWLINE *self.line_tool.end_points[:, ::-1])NEWLINE self.scan_data = scan_dataNEWLINE if scan_data.ndim == 1:NEWLINE scan_data = scan_data[:, np.newaxis]NEWLINENEWLINE self.reset_axes(scan_data)NEWLINENEWLINE self._autoscale_view()NEWLINENEWLINE def help(self):NEWLINE helpstr = ("Line profile tool",NEWLINE "+ and - keys or mouse scroll changes width of scan line.",NEWLINE "Select and drag ends of the scan line to adjust it.")NEWLINE return '\n'.join(helpstr)NEWLINENEWLINE def get_profiles(self):NEWLINE """Return intensity profile of the selected line.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE end_points: (2, 2) arrayNEWLINE The positions ((x1, y1), (x2, y2)) of the line ends.NEWLINE profile: list of 1d arraysNEWLINE Profile of intensity values. Length 1 (grayscale) or 3 (rgb).NEWLINE """NEWLINE profiles = [data.get_ydata() for data in self.profile]NEWLINE return self.line_tool.end_points, profilesNEWLINENEWLINE def _autoscale_view(self):NEWLINE if self.limits is None:NEWLINE self.ax.autoscale_view(tight=True)NEWLINE else:NEWLINE self.ax.autoscale_view(scaley=False, tight=True)NEWLINENEWLINE def line_changed(self, end_points):NEWLINE x, y = np.transpose(end_points)NEWLINE self.line_tool.end_points = end_pointsNEWLINE scan = measure.profile_line(self.image_viewer.original_image,NEWLINE *end_points[:, ::-1],NEWLINE linewidth=self.line_tool.linewidth)NEWLINE self.scan_data = scanNEWLINE if scan.ndim == 1:NEWLINE scan = scan[:, np.newaxis]NEWLINENEWLINE if scan.shape[1] != len(self.profile):NEWLINE self.reset_axes(scan)NEWLINENEWLINE for i in range(len(scan[0])):NEWLINE self.profile[i].set_xdata(np.arange(scan.shape[0]))NEWLINE self.profile[i].set_ydata(scan[:, i])NEWLINENEWLINE self.ax.relim()NEWLINENEWLINE self._autoscale_view()NEWLINE self.redraw()NEWLINENEWLINE def reset_axes(self, scan_data):NEWLINE # Clear lines outNEWLINE for line in self.ax.lines:NEWLINE self.ax.lines = []NEWLINENEWLINE if scan_data.shape[1] == 1:NEWLINE self.profile = self.ax.plot(scan_data, 'k-')NEWLINE else:NEWLINE self.profile = self.ax.plot(scan_data[:, 0], 'r-',NEWLINE scan_data[:, 1], 'g-',NEWLINE scan_data[:, 2], 'b-')NEWLINENEWLINE def output(self):NEWLINE """Return the drawn line and the resulting scan.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE line_image : (M, N) uint8 array, same shape as imageNEWLINE An array of 0s with the scanned line set to 255.NEWLINE If the linewidth of the line tool is greater than 1,NEWLINE sets the values within the profiled polygon to 128.NEWLINE scan : (P,) or (P, 3) array of int or floatNEWLINE The line scan values across the image.NEWLINE """NEWLINE end_points = self.line_tool.end_pointsNEWLINE line_image = np.zeros(self.image_viewer.original_image.shape[:2],NEWLINE np.uint8)NEWLINE width = self.line_tool.linewidthNEWLINE if width > 1:NEWLINE rp, cp = measure.profile._line_profile_coordinates(NEWLINE *end_points[:, ::-1], linewidth=width)NEWLINE # the points are aliased, so create a polygon using the cornersNEWLINE yp = np.rint(rp[[0, 0, -1, -1],[0, -1, -1, 0]]).astype(int)NEWLINE xp = np.rint(cp[[0, 0, -1, -1],[0, -1, -1, 0]]).astype(int)NEWLINE rp, cp = draw.polygon(yp, xp, line_image.shape)NEWLINE line_image[rp, cp] = 128NEWLINE (x1, y1), (x2, y2) = end_points.astype(int)NEWLINE rr, cc = draw.line(y1, x1, y2, x2)NEWLINE line_image[rr, cc] = 255NEWLINE return line_image, self.scan_dataNEWLINENEWLINE |
NEWLINEimport numpy as npNEWLINENEWLINEclass Variable():NEWLINENEWLINE def __init__(self, data, creator=None):NEWLINENEWLINE if data is None:NEWLINE raise ValueError("data is not allowed to be None.")NEWLINE if not isinstance(data, np.ndarray):NEWLINE data = np.array(data)NEWLINE NEWLINE self.data = dataNEWLINE self.grad = np.zeros_like(self.data)NEWLINE self.creator = creatorNEWLINENEWLINE def backward(self, init=True):NEWLINE NEWLINE if init is True:NEWLINE self.grad = np.ones_like(self.data)NEWLINENEWLINE funcs = [self.creator] if self.creator is not None else NoneNEWLINE while funcs:NEWLINE f = funcs.pop()NEWLINE y, x = f.output, f.inputNEWLINE x.grad += f.backward(y.grad)NEWLINE if x.creator is not None:NEWLINE funcs.append(x.creator)NEWLINE NEWLINENEWLINE NEWLINENEWLINE |
import pytestNEWLINEimport osNEWLINEimport numpy as npNEWLINENEWLINEfrom .utils import get_dataNEWLINEfrom ..wb_attack_data_generator import WBAttackGeneratorNEWLINENEWLINENEWLINEdef test_data():NEWLINE """NEWLINE The Data Generator should not crashNEWLINE """NEWLINE model, x_train, x_test, y_train, y_test = get_data()NEWLINE X = np.concatenate((x_train, x_test))NEWLINE Y = np.concatenate((y_train, y_test))NEWLINE wb_generator_train = WBAttackGenerator(model, X, Y,NEWLINE range(0, len(x_train) // 2), range(len(x_train) // 2, len(x_train)),NEWLINE 10, 10, last_layer_only=True)NEWLINE wb_generator_train.write_attack_info(f'{os.getcwd()}/libs/MIA/tests/fixtures/', "mnist_train")NEWLINENEWLINE assert os.path.exists(f'{os.getcwd()}/libs/MIA/tests/fixtures/mnist_train_data_inf.json')NEWLINE assert os.path.exists(f'{os.getcwd()}/libs/MIA/tests/fixtures/mnist_train_target_train_attack_data.h5')NEWLINE assert os.path.exists(f'{os.getcwd()}/libs/MIA/tests/fixtures/mnist_train_target_test_attack_data.h5')NEWLINE |