hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
248
max_stars_repo_name
stringlengths
5
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
248
max_issues_repo_name
stringlengths
5
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
248
max_forks_repo_name
stringlengths
5
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
2.06M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.03M
alphanum_fraction
float64
0
1
count_classes
int64
0
1.6M
score_classes
float64
0
1
count_generators
int64
0
651k
score_generators
float64
0
1
count_decorators
int64
0
990k
score_decorators
float64
0
1
count_async_functions
int64
0
235k
score_async_functions
float64
0
1
count_documentation
int64
0
1.04M
score_documentation
float64
0
1
39ec9a70f64ddc65a70eb731b8421b2083d1e79f
410
py
Python
src/aerocloud/packages.py
Aerometrex/aerocloud-python-client
0bd15432bb0f81fc5e9ca03c48b9b15c8e8ed438
[ "MIT" ]
null
null
null
src/aerocloud/packages.py
Aerometrex/aerocloud-python-client
0bd15432bb0f81fc5e9ca03c48b9b15c8e8ed438
[ "MIT" ]
null
null
null
src/aerocloud/packages.py
Aerometrex/aerocloud-python-client
0bd15432bb0f81fc5e9ca03c48b9b15c8e8ed438
[ "MIT" ]
null
null
null
import os from enum import Enum class AppPackage(Enum): # Add packages here as required. LASTOOLS = "lastools" def getPackageDirectory(package: AppPackage, version: str = None): "Gets the directory where the specified package is installed." varName = f'AZ_BATCH_APP_PACKAGE_{package.value}' if version != None: varName = f'{varName}#{version}' return os.environ[varName]
21.578947
66
0.702439
86
0.209756
0
0
0
0
0
0
165
0.402439
39ef2ca7f17378b96bb6865f18c59fdf8633759c
680
py
Python
src/nodeforge/StartEngine.py
nsk89/nodeforge
51e798092cfaf52112cfdc96af359633741da799
[ "BSD-3-Clause" ]
null
null
null
src/nodeforge/StartEngine.py
nsk89/nodeforge
51e798092cfaf52112cfdc96af359633741da799
[ "BSD-3-Clause" ]
1
2018-10-21T05:30:32.000Z
2018-10-31T05:53:18.000Z
src/nodeforge/StartEngine.py
nsk89/nodeforge
51e798092cfaf52112cfdc96af359633741da799
[ "BSD-3-Clause" ]
2
2018-10-31T05:56:34.000Z
2018-10-31T05:57:36.000Z
""" This file should be imported at the bottom of configure.py TODO: All of this may be moved into a single function in the future so people can choose a reactor in configure.py """ from twisted.internet import reactor from twisted.internet.task import LoopingCall from threading import currentThread, Thread # Check to see if main thread is alive mainthread = currentThread() def checkExit(): if not mainthread.isAlive(): reactor.stop() # Every second, make sure that the interface thread is alive. LoopingCall(checkExit).start(1) # start the network loop in a new thread Thread(target=lambda : reactor.run(installSignalHandlers=0)).start()
29.565217
68
0.744118
0
0
0
0
0
0
0
0
330
0.485294
39ef5804d073f8e1a8698f5b8f98bbb0a09926ef
7,170
py
Python
src/asit.py
6H057WH1P3/Asit
4dce80e3c4c05c4f56563110c59bae55e61aeaae
[ "MIT" ]
null
null
null
src/asit.py
6H057WH1P3/Asit
4dce80e3c4c05c4f56563110c59bae55e61aeaae
[ "MIT" ]
3
2015-09-16T17:54:13.000Z
2015-09-18T06:54:33.000Z
src/asit.py
6H057WH1P3/Asit
4dce80e3c4c05c4f56563110c59bae55e61aeaae
[ "MIT" ]
null
null
null
import random import time import requests class Account: # C'tor def __init__(self, language, world, user, password, ability): # def standard class variables self.cookie = "" self.language = language self.world = world self.user = user self.password = password self.ability = ability # preparing header and basic url for get and post requests if language == "de": self.basic_url = "http://welt" + self.world + ".freewar.de/freewar/internal/" self.header = {"Host": "welt" + self.world + ".freewar.de", "Connection": "keep-alive", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64)"} elif language == "en": self.basic_url = "http://world" + self.world + ".freewar.com/freewar/internal/" self.header = {"Host": "world" + self.world + ".freewar.com", "Connection": "keep-alive", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64)"} def login(self): print("\t[*] Logging in") login_url = self.basic_url + "index.php" # really annoying if self.language == "de": login_submit = "Einloggen" elif self.language == "en": login_submit = "Login" # login payload / post parameters login_payload = {"name": self.user, "password": self.password, "submit": login_submit} # login request login_request = requests.post(login_url, data = login_payload, headers = self.header) # nesseccary for session management in other requests self.cookie = login_request.cookies print("\t[+] Login successful") return 0 # nesseccary to access all other links in fw main window after login def redirect(self): print("\t[*] Redirecting") redirect_url = self.basic_url + "frset.php" requests.get(redirect_url, headers = self.header, cookies = self.cookie) print("\t[+] Redirect successful") return 0 # function to train characters abilities def train(self): # the training sequence print("\t[*] Training") train_url = self.basic_url + "ability.php" train_payload = {"action": "train", "ability_id": self.ability} requests.get(train_url, params = train_payload, headers = self.header, cookies = self.cookie) print("\t[+] Training successful") # preparing for the training status request status_payload = {"action": "show_ability", "ability_id": self.ability} # requesting content of main frame status_request = requests.get(train_url, params = status_payload, headers = self.header, cookies = self.cookie) if self.language == "de": search_parameters = ["Aktuelle Stufe: ", "Maximale Stufe: "] # TODO: online den genauen text nachschlagen elif self.language == "en": search_parameters = ["actual level: ", "maximal level: "] output = "\t[*] Actual level: " first = True # looking for search parameters in http response for search_text in search_parameters: # exception handling try: position = status_request.text.find(search_text) if (position == -1): raise RuntimeError("Bad Request") except RuntimeError: print("\t[-] Could not found ability level.") return 1 # TODO: Hier gehts weiter text_length = len(search_text) ability_level = status_request.text[position + text_length : position + text_length + 3] # geting a clean output ability_level = ability_level.strip("<") ability_level = ability_level.strip("/") ability_level = ability_level.strip("b") output += ability_level if first: first = False output += " / " print(output) return 0 # function to pick up accounts oil if he's on the right field for that def oil(self): print("\t[*] Picking up oil") # requesting content of main frame main_url = self.basic_url + "main.php" main_request = requests.get(main_url, headers = self.header, cookies = self.cookie) # something called exception handling try: position = main_request.text.find("checkid=") if (position == -1): raise RuntimeError("wrong position") except RuntimeError: print("\t[-] Oil isn't ready yet or account is on the wrong position.") return 1 # pincking up the oil oil_url = self.basic_url + "main.php" oil_payload = {"arrive_eval": "drink", "checkid": main_request.text[position + 8 : position + 15]} requests.get(oil_url, params = oil_payload, headers = self.header, cookies = self.cookie) return 0 # for a clean session def logout(self): print("\t[*] Logging out") logout_url = self.basic_url + "logout.php" requests.get(logout_url, headers = self.header, cookies = self.cookie) print("\t[+] Logged out") return 0 def automatic_sit(self): try: self.login() self.redirect() self.train() self.oil() self.logout() except: print("[!] Connection Error.") return 1 class ManageAccounts: def __init__(self, account_path): self.accounts = [] self.later = [] # filling the list of credentials with open(account_path, "r") as account_file: for line in account_file: splitted_line = line.strip("\n").split(", ") #print(splitted_line) if len(splitted_line) == 5: self.accounts.append(splitted_line) def manage(self): while len(self.accounts) > 0: for language, world, user, password, ability in self.accounts: # skipping credentials of the same world skip = False for account in self.accounts: if (account[1] == world) and (account[2] != user): self.later.append(account) self.accounts.remove(account) skip = True if skip: continue # if not skipped, handling the credential print("\n[*] World: " + world + " Account: " + user + " Server: " + language) FWAccount = Account(language, world, user, password, ability) if FWAccount.automatic_sit(): return 1 # writing memorized credentials back to be handled if len(self.later) > 0: random_time = random.randint(180, 300) print("[*] Wating " + str(random_time) + " Seconds to log other accounts savely.") time.sleep(random_time) self.accounts = self.later self.later.clear() else: self.accounts.clear()
39.61326
155
0.565969
7,122
0.993305
0
0
0
0
0
0
2,033
0.283543
39f17c6cf9e734ea907636289c61a9999dc0de12
251
py
Python
src/core/views.py
Ao99/django-boilerplate
7fa8078b67655698a4070ce58c10d2226fe1d59b
[ "MIT" ]
null
null
null
src/core/views.py
Ao99/django-boilerplate
7fa8078b67655698a4070ce58c10d2226fe1d59b
[ "MIT" ]
null
null
null
src/core/views.py
Ao99/django-boilerplate
7fa8078b67655698a4070ce58c10d2226fe1d59b
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.views import View # Create your views here. class CoreView(View): template_name = 'core/home.html' def get(self, request, *args, **kwargs): return render(request, self.template_name, {})
31.375
54
0.717131
158
0.629482
0
0
0
0
0
0
41
0.163347
39f26329f53e08a2340c221abdf702988c619417
12,075
py
Python
hashvis.py
boredzo/hashvis
74a017c7fa9b6d48e43172ffd15fc19ccfb060e1
[ "BSD-3-Clause" ]
15
2015-12-02T14:26:52.000Z
2018-01-21T15:18:59.000Z
hashvis.py
boredzo/hashvis
74a017c7fa9b6d48e43172ffd15fc19ccfb060e1
[ "BSD-3-Clause" ]
10
2015-12-04T06:00:42.000Z
2016-07-09T21:40:53.000Z
hashvis.py
boredzo/hashvis
74a017c7fa9b6d48e43172ffd15fc19ccfb060e1
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- """hashvis by Peter Hosey Reads from standard input or files, and prints what it reads, along with colorized versions of any hashes or signatures found in each line. The goal here is visual comparability. You should be able to tell whether two hashes are the same at a glance, rather than having to closely compare digits (or, more probably, not bother and just assume the hashes match!). The more obvious of the two methods used is shaping the output: Each hash will be represented as a rectangle of an aspect ratio determined by the hash. You may thus end up with one that's tall and one that's wide, or one that's square (if the hash length is a square number) and one that isn't. If two hashes are the same shape (or if you passed --oneline), another difference is that each byte is represented by a different pair of foreground and background colors. You should thus be able to compare the color-patterns rather than having to look at individual digits. """ # #mark - Imports and utilities import sys import os import re import base64 import binascii import cmath as math range = xrange def factors(n): "Yield every pair of factors of n (x,y where n/x == y and n/y == x), except for (1,n) and (n,1)." limit = math.sqrt(n).real if n == 1: yield (1, 1) return for i in range(1, int(limit + 1)): if n % i == 0: pair = (i, n/i) yield pair opposite_pair = (pair[1], pair[0]) #If n is square, one of the pairs will be (sqrt, sqrt). We want to yield that only once. All other pairs, we want to yield both ways round. if pair != opposite_pair: yield opposite_pair def except_one(pairs): "Given a sequence of pairs (x, y), yield every pair where neither x nor y is 1." for pair in pairs: if 1 not in pair: yield pair # #mark - Parsing MD5_exp = re.compile(r'^MD5 \(.*\) = ([0-9a-fA-F]+)') fingerprint_exp = re.compile(r'^(?:R|ECD)SA key fingerprint is (?:(?:MD5:)?(?P<hex>[:0-9a-fA-F]+)|SHA256:(?P<base64>[+/0-9a-zA-Z]+))\.') commit_exp = re.compile(r'^commit ([0-9a-fA-F]+)') more_base64_padding_than_anybody_should_ever_need = '=' * 64 def extract_hash_from_line(input_line): "Returns a tuple of the extracted hash as hex, and whether it was originally hex (vs, say, base64). The hash may be None if none was found in the input." if input_line[:1] == 'M': match = MD5_exp.match(input_line) if match: return match.group(1), True else: return '', False elif input_line[:1] in 'RE': match = fingerprint_exp.match(input_line) if match: hex = match.group('hex') if hex: return hex, True b64str = match.group('base64') if b64str: # Pacify the base64 module, which wants *some* padding (at least sometimes) but doesn't care how much. b64str += more_base64_padding_than_anybody_should_ever_need # Re-encode to hex for processing downstream. Arguably a refactoring opportunity… return binascii.b2a_hex(base64.b64decode(b64str)), False return '', False elif input_line[:7] == 'commit ': match = commit_exp.match(input_line) if match: return match.group(1), True if input_line: try: hash, not_the_hash = input_line.split(None, 1) except ValueError: # Insufficient fields. This line doesn't contain any whitespace. Use the entire line. hash = input_line hash = hash.strip().replace('-', '') try: int(hash, 16) except ValueError: # Not a hex number. return None, False else: return hash, True def parse_hex(hex): hex = hex.lstrip(':-') while hex: byte_hex, hex = hex[:2], hex[2:].lstrip(':-') yield int(byte_hex, 16) # #mark - Representation def fgcolor(idx, deep_color=False): if deep_color: return '\x1b[38;5;{0}m'.format(idx) idx = ((idx >> 4) & 0xf) # 90 is bright foreground; 30 is dull foreground. if idx < 0x8: base = 30 else: base = 90 idx = idx - 0x8 return '\x1b[{0}m'.format(base + idx) def bgcolor(idx, deep_color=False): if deep_color: idx = ((idx & 0xf) << 4) | ((idx & 0xf0) >> 4) # This add 128 and mod 256 is important, because it ensures double-digits such as 00 remain different colors. return '\x1b[48;5;{0}m'.format((idx + 128) % 256) else: idx = (idx & 0xf) # 100 is bright background; 40 is dull background. if idx < 0x8: base = 40 else: base = 100 idx = idx - 0x8 return '\x1b[{0}m'.format(base + idx) BOLD = '\x1b[1m' RESET = '\x1b[0m' def hash_to_pic(hash, only_ever_one_line=False, represent_as_hex=False, deep_color=False, _underlying_fgcolor=fgcolor, _underlying_bgcolor=bgcolor): def fgcolor(idx): return _underlying_fgcolor(idx, deep_color) def bgcolor(idx): return _underlying_bgcolor(idx, deep_color) bytes = parse_hex(hash) characters = list('0123456789abcdef') if represent_as_hex else [ '▚', '▞', '▀', '▌', ] if not only_ever_one_line: pairs = list((w, h) for (w, h) in except_one(factors(len(hash) / 2)) if w >= h) if not pairs: # Prefer (w, 1) over (1, h) if we have that choice. pairs = list((w, h) for (w, h) in factors(len(hash) / 2) if w >= h) output_chunks = [] last_byte = 0 character_idx = None for b in bytes: def find_character(b): character_idx = b % len(characters) return characters[character_idx] if not represent_as_hex: output_chunks.append(fgcolor(b) + bgcolor(b) + find_character(b)) else: output_chunks.append(fgcolor(b) + bgcolor(b) + find_character(b >> 4) + find_character(b & 0xf)) last_byte = b if only_ever_one_line: pixels_per_row, num_rows = len(hash) / 2, 1 else: pixels_per_row, num_rows = pairs[last_byte % len(pairs)] while output_chunks: yield BOLD + ''.join(output_chunks[:pixels_per_row]) + RESET del output_chunks[:pixels_per_row] if __name__ == '__main__': # #mark - Self-tests run_tests = False if run_tests: # A square number. Should contain a diagonal pair (in this case, (16,16)). factors_of_256 = set(factors(256)) assert factors_of_256 == set([(256, 1), (16, 16), (8, 32), (2, 128), (64, 4), (1, 256), (32, 8), (128, 2), (4, 64)]) # A rectangular number: not square, but still composite. No diagonal pair here. factors_of_12 = set(factors(12)) assert factors_of_12 == set([(2, 6), (12, 1), (1, 12), (6, 2), (4, 3), (3, 4)]) assert (1, 256) in factors_of_256 assert (256, 1) in factors_of_256 assert (1, 256) not in except_one(factors_of_256) assert (256, 1) not in except_one(factors_of_256) # A prime number. Should have exactly one pair of factors. factors_of_5 = set(factors(5)) assert factors_of_5 == set([(1, 5), (5, 1)]) assert list(parse_hex('ab15e')) == [0xab, 0x15, 0x0e] assert list(parse_hex(':::ab:15:e')) == [0xab, 0x15, 0x0e] assert extract_hash_from_line('RSA key fingerprint is b8:79:03:7d:00:44:98:6e:67:a0:59:1a:01:21:36:38.\n') == ('b8:79:03:7d:00:44:98:6e:67:a0:59:1a:01:21:36:38', True) assert extract_hash_from_line('RSA key fingerprint is b8:79:03:7d:00:44:98:6e:67:a0:59:1a:01:21:36:38.') == ('b8:79:03:7d:00:44:98:6e:67:a0:59:1a:01:21:36:38', True) #Alternate output example from https://en.wikibooks.org/wiki/OpenSSH/Cookbook/Authentication_Keys : assert extract_hash_from_line('RSA key fingerprint is MD5:10:4a:ec:d2:f1:38:f7:ea:0a:a0:0f:17:57:ea:a6:16.') == ('10:4a:ec:d2:f1:38:f7:ea:0a:a0:0f:17:57:ea:a6:16', True) # Also from https://en.wikibooks.org/wiki/OpenSSH/Cookbook/Authentication_Keys : assert extract_hash_from_line('ECDSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.\n') == ('2cf162318aeb098415b1550fce3387bfe663cb10879556093015457ab5423fb9', False), extract_hash_from_line('ECDSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.\n') assert extract_hash_from_line('ECDSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.') == ('2cf162318aeb098415b1550fce3387bfe663cb10879556093015457ab5423fb9', False), extract_hash_from_line('ECDSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.') # Mix and match RSA and ECDSA with MD5 and SHA256: assert extract_hash_from_line('ECDSA key fingerprint is MD5:10:4a:ec:d2:f1:38:f7:ea:0a:a0:0f:17:57:ea:a6:16.') == ('10:4a:ec:d2:f1:38:f7:ea:0a:a0:0f:17:57:ea:a6:16', True) assert extract_hash_from_line('RSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.\n') == ('2cf162318aeb098415b1550fce3387bfe663cb10879556093015457ab5423fb9', False), extract_hash_from_line('RSA key fingerprint is SHA256:LPFiMYrrCYQVsVUPzjOHv+ZjyxCHlVYJMBVFerVCP7k.\n') #UUID assert extract_hash_from_line('E6CD379E-12CD-4E00-A83A-B06E74CF03B8') == ('E6CD379E12CD4E00A83AB06E74CF03B8', True), extract_hash_from_line('E6CD379E-12CD-4E00-A83A-B06E74CF03B8') assert extract_hash_from_line('e6cd379e-12cd-4e00-a83a-b06e74cf03b8') == ('e6cd379e12cd4e00a83ab06e74cf03b8', True), extract_hash_from_line('e6cd379e-12cd-4e00-a83a-b06e74cf03b8') assert extract_hash_from_line('MD5 (hashvis.py) = e21c7b846f76826d52a0ade79ef9cb49\n') == ('e21c7b846f76826d52a0ade79ef9cb49', True) assert extract_hash_from_line('MD5 (hashvis.py) = e21c7b846f76826d52a0ade79ef9cb49') == ('e21c7b846f76826d52a0ade79ef9cb49', True) assert extract_hash_from_line('8b948e9c85fdf68f872017d7064e839c hashvis.py\n') == ('8b948e9c85fdf68f872017d7064e839c', True) assert extract_hash_from_line('8b948e9c85fdf68f872017d7064e839c hashvis.py') == ('8b948e9c85fdf68f872017d7064e839c', True) assert extract_hash_from_line('2c9997ce32cb35823b2772912e221b350717fcb2d782c667b8f808be44ae77ba1a7b94b4111e386c64a2e87d15c64a2fc2177cd826b9a0fba6b348b4352ed924 hashvis.py\n') == ('2c9997ce32cb35823b2772912e221b350717fcb2d782c667b8f808be44ae77ba1a7b94b4111e386c64a2e87d15c64a2fc2177cd826b9a0fba6b348b4352ed924', True) assert extract_hash_from_line('2c9997ce32cb35823b2772912e221b350717fcb2d782c667b8f808be44ae77ba1a7b94b4111e386c64a2e87d15c64a2fc2177cd826b9a0fba6b348b4352ed924 hashvis.py') == ('2c9997ce32cb35823b2772912e221b350717fcb2d782c667b8f808be44ae77ba1a7b94b4111e386c64a2e87d15c64a2fc2177cd826b9a0fba6b348b4352ed924', True) assert extract_hash_from_line('#!/usr/bin/python\n')[0] is None # Protip: Use vis -co to generate these. (line,) = hash_to_pic('78', represent_as_hex=True, deep_color=False) assert line == '\033[1m\033[37m\033[100m78\033[0m', repr(line) (line,) = hash_to_pic('7f', represent_as_hex=True, deep_color=False) assert line == '\033[1m\033[37m\033[107m7f\033[0m', repr(line) assert list(hash_to_pic('aebece', deep_color=False)) != list(hash_to_pic('deeefe', deep_color=False)), (list(hash_to_pic('aebece', deep_color=False)), list(hash_to_pic('deeefe', deep_color=False))) assert list(hash_to_pic('eaebec', deep_color=False)) != list(hash_to_pic('edeeef', deep_color=False)), (list(hash_to_pic('eaebec', deep_color=False)), list(hash_to_pic('edeeef', deep_color=False))) sys.exit(0) # #mark - Main use_256color = os.getenv('TERM') == 'xterm-256color' import argparse parser = argparse.ArgumentParser(description="Visualize hexadecimal input (hashes, UUIDs, etc.) as an arrangement of color blocks.") parser.add_argument('--one-line', '--oneline', action='store_true', help="Unconditionally produce a rectangle 1 character tall. The default is to choose a pair of width and height based upon one of the bytes of the input.") parser.add_argument('--color-test', '--colortest', action='store_true', help="Print the 16-color, 256-color foreground, and 256-color background color palettes, then exit.") options, args = parser.parse_known_args() if options.color_test: for x in range(16): print fgcolor(x, deep_color=False), print bgcolor(x, deep_color=False), else: print for x in range(256): sys.stdout.write(fgcolor(x, deep_color=True) + bgcolor(x, deep_color=True) + '%02x' % (x,)) else: print RESET import sys sys.exit(0) import fileinput for input_line in fileinput.input(args): print input_line.rstrip('\n') hash, is_hex = extract_hash_from_line(input_line) if hash: for output_line in hash_to_pic(hash, only_ever_one_line=options.one_line, represent_as_hex=is_hex, deep_color=use_256color): print output_line
46.087786
319
0.729441
0
0
2,100
0.173769
0
0
0
0
5,839
0.483161
39f2718894e3565b21d9ad13de2638c2e9273b26
270
py
Python
euler_7_nth_prime.py
igorakkerman/euler-challenge
1fdedce439520fc31a2e5fb66abe23b6f99f04db
[ "MIT" ]
null
null
null
euler_7_nth_prime.py
igorakkerman/euler-challenge
1fdedce439520fc31a2e5fb66abe23b6f99f04db
[ "MIT" ]
null
null
null
euler_7_nth_prime.py
igorakkerman/euler-challenge
1fdedce439520fc31a2e5fb66abe23b6f99f04db
[ "MIT" ]
null
null
null
# https://projecteuler.net/problem=7 import math def sieve(xmax): p = {i for i in range(2, xmax + 1)} for i in range(2, xmax): r = {j * i for j in range(2, int(xmax / i) + 1)} p -= r return sorted(p) print(sum(sieve(2000000)))
20.769231
57
0.533333
0
0
0
0
0
0
0
0
37
0.137037
39f3a173967eb82662e3417309654bea4d1eda7a
3,066
py
Python
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
eubr-atmosphere/jenkins
a9065584d810238c6fa101d92d12c131d1d317cb
[ "Apache-2.0" ]
null
null
null
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
eubr-atmosphere/jenkins
a9065584d810238c6fa101d92d12c131d1d317cb
[ "Apache-2.0" ]
null
null
null
docker/ubuntu/16-04/ub_limonero/migrations/versions/32053847c4db_add_new_types.py
eubr-atmosphere/jenkins
a9065584d810238c6fa101d92d12c131d1d317cb
[ "Apache-2.0" ]
null
null
null
"""Add new types Revision ID: 32053847c4db Revises: 05a62958a9cc Create Date: 2019-06-11 10:36:14.456629 """ from alembic import context from sqlalchemy.orm import sessionmaker # revision identifiers, used by Alembic. revision = '32053847c4db' down_revision = '05a62958a9cc' branch_labels = None depends_on = None all_commands = [ (""" ALTER TABLE data_source CHANGE `format` `format` ENUM( 'CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5','DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC','JSON','NETCDF4','PARQUET','PICKLE','SHAPEFILE', 'TAR_IMAGE_FOLDER','TEXT', 'VIDEO_FOLDER', 'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", """ ALTER TABLE data_source CHANGE `format` `format` ENUM( 'CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON', 'NETCDF4','PARQUET','PICKLE','SHAPEFILE','TEXT', 'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" ), (""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC','LOCAL') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", """ ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", ), ( """ALTER TABLE `model` CHANGE `type` `type` ENUM( 'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """, """ALTER TABLE `model` CHANGE `type` `type` ENUM( 'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """ ) ] def upgrade(): ctx = context.get_context() session = sessionmaker(bind=ctx.bind)() connection = session.connection() try: for cmd in all_commands: if isinstance(cmd[0], (unicode, str)): connection.execute(cmd[0]) elif isinstance(cmd[0], list): for row in cmd[0]: connection.execute(row) else: cmd[0]() except: session.rollback() raise session.commit() def downgrade(): ctx = context.get_context() session = sessionmaker(bind=ctx.bind)() connection = session.connection() connection.execute('SET foreign_key_checks = 0;') try: for cmd in reversed(all_commands): if isinstance(cmd[1], (unicode, str)): connection.execute(cmd[1]) elif isinstance(cmd[1], list): for row in cmd[1]: connection.execute(row) else: cmd[1]() except: session.rollback() raise connection.execute('SET foreign_key_checks = 1;') session.commit()
32.967742
80
0.599152
0
0
0
0
0
0
0
0
1,756
0.572733
39f4f90e9b80ade83346acbec06fcedbaeda8cb3
88
py
Python
advanced_tools/__init__.py
kvdogan/advanced_tools
7e93232374980d83fda8051496a190188c11fe0d
[ "MIT" ]
null
null
null
advanced_tools/__init__.py
kvdogan/advanced_tools
7e93232374980d83fda8051496a190188c11fe0d
[ "MIT" ]
null
null
null
advanced_tools/__init__.py
kvdogan/advanced_tools
7e93232374980d83fda8051496a190188c11fe0d
[ "MIT" ]
null
null
null
from advanced_tools.IO_path_utils import * from advanced_tools.algorithm_utils import *
29.333333
44
0.863636
0
0
0
0
0
0
0
0
0
0
39f5a45cf3414a12f90b8d040d893593304736d0
2,836
py
Python
sets-master/sets-master/sets/utility.py
FedericoMolinaChavez/tesis-research
d77cc621d452c9ecf48d9ac80349b41aeb842412
[ "MIT" ]
null
null
null
sets-master/sets-master/sets/utility.py
FedericoMolinaChavez/tesis-research
d77cc621d452c9ecf48d9ac80349b41aeb842412
[ "MIT" ]
4
2021-03-09T20:33:57.000Z
2022-02-18T12:56:32.000Z
sets-master/sets-master/sets/utility.py
FedericoMolinaChavez/tesis-research
d77cc621d452c9ecf48d9ac80349b41aeb842412
[ "MIT" ]
null
null
null
import os import pickle import functools import errno import shutil from urllib.request import urlopen #import definitions def read_config(schema='data/schema.yaml', name='sets'): filename = '.{}rc'.format(name) paths = [ os.path.join(os.curdir, filename), os.path.expanduser(os.path.join('~', filename)), os.environ.get('{}_CONFIG'.format(name.upper())), ] schema = os.path.join(os.path.dirname(__file__), schema) parser = definitions.Parser(schema) for path in paths: if path and os.path.isfile(path): return parser(path) return parser('{}') def disk_cache(basename, directory, method=False): """ Function decorator for caching pickleable return values on disk. Uses a hash computed from the function arguments for invalidation. If 'method', skip the first argument, usually being self or cls. The cache filepath is 'directory/basename-hash.pickle'. """ directory = os.path.expanduser(directory) ensure_directory(directory) def wrapper(func): @functools.wraps(func) def wrapped(*args, **kwargs): key = (tuple(args), tuple(kwargs.items())) # Don't use self or cls for the invalidation hash. if method and key: key = key[1:] filename = '{}-{}.pickle'.format(basename, hash(key)) filepath = os.path.join(directory, filename) if os.path.isfile(filepath): with open(filepath, 'rb') as handle: return pickle.load(handle) result = func(*args, **kwargs) with open(filepath, 'wb') as handle: pickle.dump(result, handle) return result return wrapped return wrapper def download(url, directory, filename=None): """ Download a file and return its filename on the local file system. If the file is already there, it will not be downloaded again. The filename is derived from the url if not provided. Return the filepath. """ if not filename: _, filename = os.path.split(url) directory = os.path.expanduser(directory) ensure_directory(directory) filepath = os.path.join(directory, filename) if os.path.isfile(filepath): return filepath print('Download', filepath) with urlopen(url) as response, open(filepath, 'wb') as file_: shutil.copyfileobj(response, file_) return filepath def ensure_directory(directory): """ Create the directories along the provided directory path that do not exist. """ directory = os.path.expanduser(directory) try: os.makedirs(directory) except OSError as e: if e.errno != errno.EEXIST: raise e
35.012346
80
0.619182
0
0
0
0
679
0.239422
0
0
765
0.269746
f2cdba45917fad7ff9ab33f608fa9dbb603aec4b
1,984
py
Python
src/test_fps.py
pjenpoomjai/tfpose-herokuNEW
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
[ "Apache-2.0" ]
null
null
null
src/test_fps.py
pjenpoomjai/tfpose-herokuNEW
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
[ "Apache-2.0" ]
null
null
null
src/test_fps.py
pjenpoomjai/tfpose-herokuNEW
7d1085a3fcb02c0f6d16ed7f2cf1ad8daff103ea
[ "Apache-2.0" ]
null
null
null
import cv2 import time import numpy as np import imutils camera= 0 cam = cv2.VideoCapture(camera) fgbg = cv2.createBackgroundSubtractorMOG2(history=1000,varThreshold=0,detectShadows=False) width=600 height=480 fps_time = 0 while True: ret_val,image = cam.read() image = cv2.resize(image,(width,height)) image = cv2.GaussianBlur(image, (5, 5), 0) fgmask = fgbg.apply(image) # image = fgbg.apply(image,learningRate=0.001) # image = imutils.resize(image, width=500) # gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) cnts = cv2.findContours(fgmask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnts = cnts[0] if imutils.is_cv2() else cnts[1] # loop over the contours x_left = -1 y_left = -1 x_right = -1 y_right = -1 for c in cnts: # if the contour is too small, ignore it # if cv2.contourArea(c) > 500: # continue # compute the bounding box for the contour, draw it on the frame, # and update the text (x, y, w, h) = cv2.boundingRect(c) if x_left ==-1 : x_left = x y_left = y if x < x_left: x_left = x if y < y_left: y_left = y if x+w > x_right: x_right = x+w if y+h > y_right: y_right = y+h # cv2.rectangle(image, (x, y), (x+w, y+h), (255, 0, 0), 2) if (x_left==0 and y_left==0 and x_right==width and y_right==height)==False: cv2.rectangle(image, (x_left, y_left), (x_right, y_right), (0, 255, 0), 2) # cv2.putText(image, # "FPS: %f [press 'q'to quit]" % (1.0 / (time.time() - fps_time)), # (10, 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, # (0, 255, 0), 2) cv2.imshow('tf-pose-estimation result',fgmask) cv2.imshow('tf-pose-estimation result2',image) fps_time = time.time() if cv2.waitKey(1)==ord('q'): cam.release() cv2.destroyAllWindows() break
28.342857
90
0.579133
0
0
0
0
0
0
0
0
640
0.322581
f2ce254695f631034aa335be9147cb99e06d1cfc
999
py
Python
Python/367.ValidPerfectSquare.py
nizD/LeetCode-Solutions
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
[ "MIT" ]
263
2020-10-05T18:47:29.000Z
2022-03-31T19:44:46.000Z
Python/367.ValidPerfectSquare.py
nizD/LeetCode-Solutions
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
[ "MIT" ]
1,264
2020-10-05T18:13:05.000Z
2022-03-31T23:16:35.000Z
Python/367.ValidPerfectSquare.py
nizD/LeetCode-Solutions
7f4ca37bab795e0d6f9bfd9148a8fe3b62aa5349
[ "MIT" ]
760
2020-10-05T18:22:51.000Z
2022-03-29T06:06:20.000Z
#Given a positive integer num, write a function which returns True if num is a perfect square else False. class Solution(object): def isPerfectSquare(self, num): low=0 high=num #Starting from zero till the number we need to check for perfect square while(low<=high): #Calulating middle value by using right shift operator mid=(low+high)>>1 #If the square of the middle value is equal to the number then it is a perfect square else not if(mid*mid==num): return True #If the square of the middle value is less than the number we increment the low variable else the high variable is decremented. #The loop will continue till the low value becomes more than the high value or the number is a perfect square then True will be #returned elif(mid*mid<num): low=mid+1 else: high=mid-1 return False
47.571429
140
0.617618
884
0.884885
0
0
0
0
0
0
588
0.588589
f2d302744caca38acace037f6391b1ffee2c8630
1,432
py
Python
src/minescrubber/controller.py
alok1974/minescrubber
0c18d960b385a4a59ac0cf38bc69271a23c667e7
[ "MIT" ]
1
2020-08-11T23:08:34.000Z
2020-08-11T23:08:34.000Z
src/minescrubber/controller.py
alok1974/minescrubber
0c18d960b385a4a59ac0cf38bc69271a23c667e7
[ "MIT" ]
null
null
null
src/minescrubber/controller.py
alok1974/minescrubber
0c18d960b385a4a59ac0cf38bc69271a23c667e7
[ "MIT" ]
null
null
null
from minescrubber_core import abstract from . import mainwindow class UI(abstract.UI): def __init__(self): self.main_window = mainwindow.MainWidget() def init_board(self, board): self.main_window.init_board(board) def refresh(self, board, init_image=True): self.main_window.refresh(board=board, init_image=init_image) def game_over(self, board): self.main_window.game_over(board=board) def game_solved(self, board): self.main_window.game_solved(board=board) def run(self): self.main_window.show() @property def new_game_signal(self): return self.main_window.NEW_GAME_SIGNAL @property def cell_selected_signal(self): return self.main_window.CELL_SELECTED_SIGNAL @property def cell_flagged_signal(self): return self.main_window.CELL_FLAGGED_SIGNAL @property def wiring_method_name(self): return 'connect' class Controller(abstract.Controller): def pre_callback(self): import sys from PySide2 import QtWidgets QtWidgets.QApplication(sys.argv) def post_callback(self): import sys from PySide2 import QtWidgets app = ( QtWidgets.QApplication.instance() or QtWidgets.QApplication(sys.argv) ) sys.exit(app.exec_()) def run(): controller = Controller() controller.run(ui_class=UI)
22.730159
68
0.670391
1,285
0.897346
0
0
350
0.244413
0
0
9
0.006285
f2d339d173f754cc9a0dd3025640fbb292c58b5b
36
py
Python
CADRE/power_dymos/__init__.py
johnjasa/CADRE
a4ffd61582b8474953fc309aa540838a14f29dcf
[ "Apache-2.0" ]
null
null
null
CADRE/power_dymos/__init__.py
johnjasa/CADRE
a4ffd61582b8474953fc309aa540838a14f29dcf
[ "Apache-2.0" ]
null
null
null
CADRE/power_dymos/__init__.py
johnjasa/CADRE
a4ffd61582b8474953fc309aa540838a14f29dcf
[ "Apache-2.0" ]
null
null
null
from .power_group import PowerGroup
18
35
0.861111
0
0
0
0
0
0
0
0
0
0
f2d47c8b76e7230c4405127adcd43ba0cfb587fd
2,386
py
Python
client/elementtype.py
Schille/weimar-graphstore
76b47f98fba419ec6290628b56a202c60d8f2d46
[ "MIT" ]
2
2016-08-27T04:51:01.000Z
2020-09-05T01:34:41.000Z
client/elementtype.py
Schille/weimar-graphstore
76b47f98fba419ec6290628b56a202c60d8f2d46
[ "MIT" ]
null
null
null
client/elementtype.py
Schille/weimar-graphstore
76b47f98fba419ec6290628b56a202c60d8f2d46
[ "MIT" ]
null
null
null
""" .. module:: elementtype.py :platform: Linux .. moduleauthor:: Michael Schilonka <michael@schilonka.de> """ import logging class ElementType(object): ''' The ElementType class is an in-memory representation of a graph element type. It provides some functions to operate on all entities of the same type and keeps the description of the structured attributes. ''' def __init__(self, type_svr, type_name, graph_name): self._type_svr = type_svr self._typename = type_name self._graphname = graph_name def get_type_definition(self): ''' Returns a dictionary comprising the structured attributes of this graph element type. Return: The created type declaration (dict). ''' return self._type_svr.get_type_definition(self._graphname, self._typename) def get_type_name(self): ''' Returns the type name of this object. Return: The type name (str). ''' return self._typename def count(self): ''' Returns the number of graph elements associated with this type. Return: Count of related graph elements (int). ''' return self._type_svr.count(self._graphname, self._typename) #TDOD provide search method on these elements class VertexType(ElementType, object): ''' The VertexType. ''' def __init__(self, type_svr, vertex_type, graph_name): vertex_type = 'vertex:' + vertex_type super(VertexType, self).__init__(type_svr, vertex_type, graph_name) def get_vertices(self): pass def remove(self): ''' Removes this element type and all associated elements. ''' self._type_svr.remove(self._graphname, self._typename) class EdgeType(ElementType, object): ''' The EdgeType. ''' def __init__(self, type_svr, edge_type, graph_name): edge_type = 'edge:' + edge_type super(EdgeType, self).__init__(type_svr, edge_type, graph_name) def get_edges(self): pass def remove(self): ''' Removes this element type and all associated elements. ''' self._type_svr.remove(self._graphname, self._typename)
24.854167
82
0.60855
2,229
0.934199
0
0
0
0
0
0
1,095
0.458927
f2d4d9817772d3d480a3be486cdd4fa4ac3b04f2
672
py
Python
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
davidvlaminck/OTLClassPython
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
2
2022-02-01T08:58:11.000Z
2022-02-08T13:35:17.000Z
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
davidvlaminck/OTLMOW
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
null
null
null
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
davidvlaminck/OTLMOW
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
[ "MIT" ]
null
null
null
# coding=utf-8 from OTLMOW.OTLModel.Classes.Put import Put from OTLMOW.OTLModel.Classes.PutRelatie import PutRelatie from OTLMOW.GeometrieArtefact.VlakGeometrie import VlakGeometrie # Generated with OTLClassCreator. To modify: extend, do not edit class Infiltratievoorziening(Put, PutRelatie, VlakGeometrie): """Voorziening voor infiltratie van onvervuild water.""" typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Infiltratievoorziening' """De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI.""" def __init__(self): Put.__init__(self) PutRelatie.__init__(self) VlakGeometrie.__init__(self)
37.333333
93
0.763393
422
0.627976
0
0
0
0
0
0
290
0.431548
f2d563db44644c1403a6f057432f77eaa66bdff6
1,517
py
Python
Chapter04/chapter4.py
Kushalshingote/Hands-On-Generative-Adversarial-Networks-with-Keras
fccada4810ba1fe8b79c5a74420a590c95623b52
[ "MIT" ]
76
2019-05-27T23:38:53.000Z
2021-12-19T00:31:13.000Z
Chapter04/chapter4.py
Kushalshingote/Hands-On-Generative-Adversarial-Networks-with-Keras
fccada4810ba1fe8b79c5a74420a590c95623b52
[ "MIT" ]
9
2019-05-29T21:01:32.000Z
2020-07-30T12:00:02.000Z
Chapter04/chapter4.py
Kushalshingote/Hands-On-Generative-Adversarial-Networks-with-Keras
fccada4810ba1fe8b79c5a74420a590c95623b52
[ "MIT" ]
35
2019-05-12T04:20:54.000Z
2022-03-03T19:46:06.000Z
# get the training data D, sample the Generator with random z to produce r N = X_train z = np.random.uniform(-1, 1, (1, z_dim)) r = G.predict_on_batch(z) # define our distance measure S to be L1 S = lambda n, r: np.sum(np.abs(n - r)) # compute the distances between the reference and the samples in N using the measure D distances = [D(n, r) for n in N] # find the indices of the most similar samples and select them from N nearest_neighbors_index = np.argpartition(distances, k) nearest_neighbors_images = N[nearest_neighbors_index] # generate fake images from the discriminator n_fake_images = 5000 z = np.random.uniform(-1, 1, (n_fake_images, z_dim)) x = G.predict_on_batch(z) def compute_inception_score(x, inception_model, n_fake_images, z_dim): # probability of y given x p_y_given_x = inception_model.predict_on_batch(x) # marginal probability of y q_y = np.mean(p_y_given_x, axis=0) inception_scores = p_y_given_x * (np.log(p_y_given_x) - np.log(q_y) inception_score = np.exp(np.mean(inception_scores)) return inception_score def get_mean_and_covariance(data): mean = np.mean(data, axis=0) covariance = np.cov(data, rowvar=False) # rowvar? return mean, covariance def compute_frechet_inception_distance(mean_r, mean_f, cov_r, cov_f): l2_mean = np.sum((mean_r - mean_f)**2) cov_mean, _ = np.trace(scipy.linalg.sqrtm(np.dot(cov_r, cov_f))) return l2_mu + np.trace(cov_r) + np.trace(cov_f) - 2 * np.trace(cov_mean)
35.27907
87
0.712591
0
0
0
0
0
0
0
0
384
0.253131
f2d5d419d88204df9613b1050b9f75f4f36ef80c
20,923
py
Python
naspi/naspi.py
fgiroult321/simple-nas-pi
6d1a13523f1f20ebe26f780c758a3ff15be899ff
[ "MIT" ]
null
null
null
naspi/naspi.py
fgiroult321/simple-nas-pi
6d1a13523f1f20ebe26f780c758a3ff15be899ff
[ "MIT" ]
null
null
null
naspi/naspi.py
fgiroult321/simple-nas-pi
6d1a13523f1f20ebe26f780c758a3ff15be899ff
[ "MIT" ]
null
null
null
import os import boto3 # import subprocess from subprocess import Popen, PIPE from time import sleep import json import ast from datetime import datetime, time, timedelta, date import logging import logging.handlers import sys, getopt import glob import shutil logger = logging.getLogger() logger.setLevel(logging.INFO) def main(): ### Order of tasks # # 0 check disks are here, catch output # # 1 sync to replica disk, catch output # # 2 sync to aws, catch output # # 3 compare disks files vs replica, catch oputput # # 4 compare disks files vs s3, catch out # # # Run option # -l, --system : only analyze_disks & get_server_metrics , every 5m # -a, --analyze : analyze_s3_files & analyze_local_files, every 1 or 3 hours # -s, --sync : run_s3_syncs & run_local_syncs, every night # -d, --syncdelete : run_s3_syncs & run_local_syncs with delete no cron #### exception handling in logger: sys.excepthook = handle_exception valid_modes = ["system","analyze","sync","syncdelete","synclocal","syncs3","backup","osbackup","init_config"] mode = '' config = '' usage_message = 'naspi -c /path/to/config.json -m <system|analyze|sync|syncdelete|synclocal|syncs3|backup|osbackup|init_config>' try: opts, args = getopt.getopt(sys.argv[1:],"hm:c:",["mode=","config="]) # except getopt.GetoptError: except Exception as e: print(usage_message) sys.exit(2) for opt, arg in opts: if opt == '-h': print(usage_message) sys.exit() elif opt in ("-m", "--mode"): mode = arg elif opt in ("-c", "--config"): config = arg # # # checking values passed if not mode: print("Error, mode is mandatory !!") print(usage_message) sys.exit(2) elif not config: print("Error, config file is mandatory !!") print(usage_message) sys.exit(2) elif mode not in valid_modes: print("Wrong mode selected, correct modes are : {}".format(valid_modes)) print(usage_message) sys.exit(2) # logger.info("Context info : ") # logger.info(os.getcwd()) # logger.info(__file__) if mode == "init_config": output = init_config_file(config) sys.exit(0) else: #### Configuration loading disks_list,folder_to_sync_locally,folders_to_sync_s3,configuration = load_configuration(config) global NUMBER_DAYS_RETENTION global MIN_DELAY_BETWEEN_SYNCS_SECONDS global working_dir NUMBER_DAYS_RETENTION = configuration.get('NUMBER_DAYS_RETENTION') MIN_DELAY_BETWEEN_SYNCS_SECONDS = configuration.get('MIN_DELAY_BETWEEN_SYNCS_SECONDS') working_dir = configuration.get('working_dir') home_dir = os.environ['HOME'] global export_path_cmd export_path_cmd = 'export PATH={}/.local/bin:$PATH'.format(home_dir) ### Logging setup # Change root logger level from WARNING (default) to NOTSET in order for all messages to be delegated. logging.getLogger('').setLevel(logging.NOTSET) # Add file rotatin handler, with level DEBUG rotatingHandler = logging.handlers.RotatingFileHandler(filename='{}/nas_monitor.log'.format(working_dir), maxBytes=1000000, backupCount=5) rotatingHandler.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') rotatingHandler.setFormatter(formatter) logging.getLogger('').addHandler(rotatingHandler) global logger logger = logging.getLogger("naspi." + __name__) logger.info("") logger.info("") logger.info("----------------------------------------------------------------------------------------") logger.info("----------------------------------------------------------------------------------------") logger.info("### Starting Nas Monitor") logger.info('Mode is {} and config file is {}'.format(mode,config)) output = open_or_init_output_file(working_dir) if mode == "backup": output = backup_naspi(configuration['backup'],output) if mode == "osbackup": output = os_backup(configuration['backup'],output) if mode == "system": output = analyze_disks(disks_list,output) output = get_server_metrics(output) if mode == "synclocal": output = analyze_local_files(folder_to_sync_locally, output) output = run_local_syncs(folder_to_sync_locally,configuration,output) output = analyze_local_files(folder_to_sync_locally, output) # File stored to s3 once per hour like local sync (TODO can be improved with a dedicated mode and cron) res_s3 = write_and_cleanup_output_file_to_s3(output,'archive-fgi') if mode == "syncs3": output = analyze_s3_files(folders_to_sync_s3, output) output = run_s3_syncs(folders_to_sync_s3,configuration,output) output = analyze_s3_files(folders_to_sync_s3, output) if mode == "sync": output = run_s3_syncs(folders_to_sync_s3,configuration,output) output = run_local_syncs(folder_to_sync_locally,configuration,output) if mode == "analyze" or mode == "sync": output = analyze_s3_files(folders_to_sync_s3, output) output = analyze_local_files(folder_to_sync_locally, output) result = write_and_cleanup_output_file(output,configuration) # res_s3 = write_and_cleanup_output_file_to_s3(output,'archive-fgi') logger.info(json.dumps(output)) #### #### function defs #### def handle_exception(exc_type, exc_value, exc_traceback): if issubclass(exc_type, KeyboardInterrupt): sys.__excepthook__(exc_type, exc_value, exc_traceback) return logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) def load_configuration(conf_file): try: f = open(conf_file, "r") dict_conf = json.loads(f.read()) f.close() return( dict_conf['disks_list'], dict_conf['folder_to_sync_locally'], dict_conf['folders_to_sync_s3'], dict_conf['naspi_configuration'] ) except FileNotFoundError as e: print("Conf file not found, provide a file named {}".format(conf_file)) raise(e) # sys.exit(2) def today_time(): today = datetime.today() d1 = today.strftime("%Y-%m-%d %H:%M:%S") return(d1) def today_date(): today = datetime.today() d1 = today.strftime("%Y-%m-%d") return(d1) def date_diff_in_seconds(dt2, dt1): timediff = dt2 - dt1 return timediff.days * 24 * 3600 + timediff.seconds def run_shell_command(command): message = "" logger.info("### Running {}".format(command)) df_out = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) sleep(.2) retcode = df_out.poll() while retcode is None: # Process running # logger.info("### Process not finished, waiting...") sleep(10) retcode = df_out.poll() # Here, `proc` has finished with return code `retcode` if retcode != 0: """Error handling.""" logger.info("### Error !") message = df_out.stderr.read().decode("utf-8") logger.info(retcode) logger.info(message) return(retcode,message) message = df_out.stdout.read().decode("utf-8") logger.info(retcode) logger.info(message) return(retcode,message) def open_or_init_output_file(working_dir): today = today_date() try: f = open("{}/naspi_status_{}.json".format(working_dir,today), "r") dict_output = json.loads(f.read()) f.close() except FileNotFoundError: logger.info("File for today does not exist, initializing it") dict_output = {} dict_output['disks'] = {} dict_output['disks']['disk-list'] = [] dict_output['local_sync'] = {} dict_output['local_sync']['success'] = True dict_output['s3_sync'] = {} dict_output['s3_sync']['success'] = True dict_output['server'] = {} return(dict_output) def init_config_file(file_name): print("initializing config file {}".format(file_name)) if os.path.exists(file_name): print("Error, config file {} already exists !!".format(file_name)) sys.exit(2) else: dict_conf = {} dict_conf['disks_list'] = [] dict_conf['folder_to_sync_locally'] = [] dict_conf['folders_to_sync_s3'] = [] dict_conf['naspi_configuration'] = {} dict_conf['naspi_configuration']['working_dir'] = "" dict_conf['naspi_configuration']['NUMBER_DAYS_RETENTION'] = 7 dict_conf['naspi_configuration']['MIN_DELAY_BETWEEN_SYNCS_SECONDS'] = 14400 dict_conf['naspi_configuration']['backup'] = {} dict_conf['naspi_configuration']['backup']['files_to_backup'] = [] dict_conf['naspi_configuration']['backup']['backup_location'] = "" dict_conf['naspi_configuration']['backup']['os_backup_location'] = "" f = open("{}".format(file_name), "w") f.write(json.dumps(dict_conf,indent=4)) f.close() return("ok") def write_and_cleanup_output_file_to_s3(output,bucket): s3_client = boto3.client('s3',region_name='eu-west-1') today = today_date() response = s3_client.put_object( Body=json.dumps(output), Bucket=bucket, Key="status/naspi_status_{}.json".format(today) ) return(response) def write_and_cleanup_output_file(output,configuration): NUMBER_DAYS_RETENTION = configuration.get('NUMBER_DAYS_RETENTION') working_dir = configuration.get('working_dir') today = today_date() f = open("{}/naspi_status_{}.json".format(working_dir,today), "w") f.write(json.dumps(output,indent=4)) f.close() existing_output_files = glob.glob('{}/naspi_status_*.json'.format(working_dir)) existing_output_files.sort() for out_file in existing_output_files: if out_file not in existing_output_files[-NUMBER_DAYS_RETENTION:]: logger.info("Deleting {}".format(out_file)) os.remove(out_file) return("done") def analyze_disks(disks_list,output): output['disks']['all_disks_ok'] = True output['disks']['disk-list'] = [] retcode,message = run_shell_command('df -kh | tail -n +2') #logger.info(message) all_disks_present = True for disk in disks_list: disk_output = {} if disk in message: logger.info("### disk {} is here".format(disk)) usage = message.split(disk)[0][-4:] logger.info("### usage : {}".format(usage)) disk_output['name'] = disk disk_output['occupied_%'] = usage disk_output['present'] = True output['disks']['disk-list'].append(disk_output) else: logger.info("### disk {} not here".format(disk)) all_disks_present = False disk_output['name'] = disk disk_output['occupied_%'] = "NA" disk_output['present'] = False output['disks']['disk-list'].append(disk_output) if not all_disks_present: logger.info("### some disks are missing") output['disks']['all_disks_ok'] = False output['disks']['last_run'] = today_time() return(output) def acquire_sync_lock(output,local_or_s3,configuration): # Make sure only one sync process runs at a time can_run = True MIN_DELAY_BETWEEN_SYNCS_SECONDS = configuration.get('MIN_DELAY_BETWEEN_SYNCS_SECONDS') if 'last_started' in output[local_or_s3]: started_time = datetime.strptime(output[local_or_s3]['last_started'], '%Y-%m-%d %H:%M:%S') else: started_time = datetime.strptime('2020-12-25 12:00:00', '%Y-%m-%d %H:%M:%S') now_time = datetime.now() logger.info(" %d seconds from previous run" %(date_diff_in_seconds(now_time, started_time))) if 'locked' in output[local_or_s3] and output[local_or_s3]['locked'] == True and date_diff_in_seconds(now_time, started_time) < MIN_DELAY_BETWEEN_SYNCS_SECONDS: logger.info("Can't run sync as another process might be running") can_run = False else: logger.info("Acquiring lock for {}".format(local_or_s3)) output[local_or_s3]['locked'] = True output[local_or_s3]['last_started'] = today_time() logger.info(output) # Acquire lock and write it to disk: result = write_and_cleanup_output_file(output,configuration) return(can_run,output) def run_s3_syncs(folders_to_sync_s3,configuration, output): can_run,output = acquire_sync_lock(output, 's3_sync',configuration) if can_run: success = True for folder in folders_to_sync_s3: exclusions_flags = '' if 'exclude' in folder: for exclusion in folder['exclude']: exclusions_flags = exclusions_flags + ' --exclude "{}/*" '.format(exclusion) # command = 'aws s3 sync {} {} {} --storage-class DEEP_ARCHIVE --dryrun'.format(folder['source_folder'],folder['dest_folder'],exclusions_flags) command = 'aws s3 sync {} {} {} --storage-class DEEP_ARCHIVE --only-show-errors'.format(folder['source_folder'],folder['dest_folder'],exclusions_flags) ret,msg = run_shell_command('{}; {}'.format(export_path_cmd,command)) if ret != 0: success = False output['s3_sync']['success'] = success output['s3_sync']['last_run'] = today_time() output['s3_sync']['locked'] = False else: logger.info("/!\ Cant run the sync, there is a sync process ongoing") return(output) def count_files_in_dir(folder,exclude_list): exclude_directories = set(exclude_list) #directory (only names) want to exclude total_file = 0 for dname, dirs, files in os.walk(folder): #this loop though directies recursively dirs[:] = [d for d in dirs if d not in exclude_directories] # exclude directory if in exclude list total_file += len(files) logger.info("Files in {} : {}".format(folder,total_file)) return(total_file) def analyze_s3_files(folders_to_sync_s3, output): output['s3_sync']['files_source'] = 0 output['s3_sync']['files_dest'] = 0 output['s3_sync']['folders'] = [] for folder in folders_to_sync_s3: one_folder = {} one_folder['source_folder'] = folder['source_folder'] # Get local files count if 'exclude' in folder: exclude_directories = set(folder['exclude']) #directory (only names) want to exclude else: exclude_directories = [] total_file = 0 for dname, dirs, files in os.walk(folder['source_folder']): #this loop though directies recursively dirs[:] = [d for d in dirs if d not in exclude_directories] # exclude directory if in exclude list # print(len(files)) total_file += len(files) logger.info("Files in {} : {}".format(folder['source_folder'],total_file)) one_folder['source_count'] = total_file output['s3_sync']['files_source'] += total_file # Get s3 files count ret,msg = run_shell_command('{}; aws s3 ls {} --recursive --summarize | grep "Total Objects"'.format(export_path_cmd,folder['dest_folder'])) output['s3_sync']['files_dest'] += int(msg.split(': ')[1]) one_folder['dest_folder'] = folder['dest_folder'] one_folder['dest_count'] = int(msg.split(': ')[1]) output['s3_sync']['folders'].append(one_folder) output['s3_sync']['files_delta'] = output['s3_sync']['files_source'] - output['s3_sync']['files_dest'] logger.info("Analyze s3 file output : {}".format(json.dumps(output))) return(output) def run_local_syncs(folder_to_sync_locally,configuration, output): # rsync -anv dir1 dir2 # n = dryrun, v = verbose # will create dir2/dir1 can_run,output = acquire_sync_lock(output, 'local_sync', configuration) if can_run: success = True for folder in folder_to_sync_locally: delete = "" if folder['delete']: delete = "--delete" ret,msg = run_shell_command('mkdir -p {}'.format(folder['dest_folder'])) ret,msg = run_shell_command('rsync -aq {} {} {}'.format(folder['source_folder'],folder['dest_folder'],delete)) if ret != 0: success = False output['local_sync']['success'] = success output['local_sync']['last_run'] = today_time() output['local_sync']['locked'] = False else: logger.info("/!\ Cant run the sync, there is a sync process ongoing") return(output) def analyze_local_files(folder_to_sync_locally, output): output['local_sync']['files_source'] = 0 output['local_sync']['files_dest'] = 0 output['local_sync']['folders'] = [] for folder in folder_to_sync_locally: one_folder = {} one_folder['source_folder'] = folder['source_folder'] src_count = count_files_in_dir(folder['source_folder'],['']) output['local_sync']['files_source'] += src_count one_folder['source_count'] = src_count dest_folder = "{}/{}".format(folder['dest_folder'],folder['source_folder'].split("/")[-1]) one_folder['dest_folder'] = dest_folder dest_count = count_files_in_dir(dest_folder,['']) output['local_sync']['files_dest'] += dest_count one_folder['dest_count'] = dest_count output['local_sync']['folders'].append(one_folder) output['local_sync']['files_delta'] = output['local_sync']['files_source'] - output['local_sync']['files_dest'] logger.info("Analyze local file output : {}".format(json.dumps(output))) return(output) def get_server_metrics(output): # get cpu usage ret,msg = run_shell_command('top -bn 1 | grep Cpu | head -c 14 | tail -c 5') output['server']['cpu_%'] = msg ret,msg = run_shell_command('free -m | grep Mem | head -c 32 | tail -c 5') output['server']['ram_Mo'] = msg ret,msg = run_shell_command('vcgencmd measure_temp | head -c 11 | tail -c 6') output['server']['temp_c'] = msg output['server']['last_run'] = today_time() return(output) def backup_naspi(backup,output): backup_location = backup.get('backup_location') backup_dir = "{}{}".format(backup_location,today_date()) ret,msg = run_shell_command('mkdir -p {}'.format(backup_dir)) files_to_backup = backup.get("files_to_backup") for entry in files_to_backup: if os.path.isdir(entry): ret,msg = run_shell_command('rsync -aqR {} {}'.format(entry,backup_dir)) else: subdir = entry.rsplit('/',1)[0] ret,msg = run_shell_command('mkdir -p {}{}'.format(backup_dir,subdir)) ret,msg = run_shell_command('rsync -aq {} {}{}'.format(entry,backup_dir,entry)) # old bkp cleanup existing_backup_dir = glob.glob('{}/*'.format(backup_location)) existing_backup_dir.sort() for out_file in existing_backup_dir: if out_file not in existing_backup_dir[-10:]: print("Deleting {}".format(out_file)) shutil.rmtree(out_file,ignore_errors=True) return(output) def os_backup(backup,output): os_backup_location = backup.get('os_backup_location') backup_name = "osbkp-{}.img".format(today_date()) # sudo dd if=/dev/mmcblk0 of=/disks/Elements/os_bkp/osbkp18082021.img bs=1M # sudo ./pishrink.sh -z osbkp18082021.img ret,msg = run_shell_command('sudo dd if=/dev/mmcblk0 of={}/{} bs=1M'.format(os_backup_location,backup_name)) if not os.path.exists("{}/pishrink.sh".format(working_dir)): ret,msg = run_shell_command('wget https://raw.githubusercontent.com/Drewsif/PiShrink/master/pishrink.sh -P {}'.format(working_dir)) # wget https://raw.githubusercontent.com/Drewsif/PiShrink/master/pishrink.sh ret,msg = run_shell_command('sudo chmod +x {}/pishrink.sh'.format(working_dir)) # sudo chmod +x pishrink.sh ret,msg = run_shell_command('sudo bash {}/pishrink.sh -z {}/{}'.format(working_dir,os_backup_location,backup_name)) ret,msg = run_shell_command('sudo chown pi:pi *.img.gz') # old bkp cleanup existing_backup_dir = glob.glob('{}/*'.format(os_backup_location)) existing_backup_dir.sort() for out_file in existing_backup_dir: if out_file not in existing_backup_dir[-4:]: print("Deleting {}".format(out_file)) shutil.rmtree(out_file,ignore_errors=True) return(output) if __name__=='__main__': main() # main(sys.argv[1:])
38.461397
164
0.633322
0
0
0
0
0
0
0
0
6,617
0.316255
f2d7e6c6a86e1314f1b2716ac6227b1dc354be91
14,328
py
Python
fawkes/differentiator_lowkey.py
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
ffadb82b666e8c1561a036a10d9922db8a3266cc
[ "MIT" ]
1
2021-05-18T01:14:44.000Z
2021-05-18T01:14:44.000Z
fawkes/differentiator_lowkey.py
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
ffadb82b666e8c1561a036a10d9922db8a3266cc
[ "MIT" ]
null
null
null
fawkes/differentiator_lowkey.py
biergaiqiao/Oriole-Thwarting-Privacy-against-Trustworthy-Deep-Learning-Models
ffadb82b666e8c1561a036a10d9922db8a3266cc
[ "MIT" ]
1
2021-05-18T01:14:47.000Z
2021-05-18T01:14:47.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Date : 2020-10-21 # @Author : Emily Wenger (ewenger@uchicago.edu) import time import numpy as np import tensorflow as tf import tensorflow_addons as tfa from keras.utils import Progbar class FawkesMaskGeneration: # if the attack is trying to mimic a target image or a neuron vector MIMIC_IMG = True # number of iterations to perform gradient descent MAX_ITERATIONS = 10000 # larger values converge faster to less accurate results LEARNING_RATE = 1e-2 # the initial constant c to pick as a first guess INITIAL_CONST = 1 # pixel intensity range INTENSITY_RANGE = 'imagenet' # threshold for distance L_THRESHOLD = 0.03 # whether keep the final result or the best result KEEP_FINAL = False # max_val of image MAX_VAL = 255 MAXIMIZE = False IMAGE_SHAPE = (224, 224, 3) RATIO = 1.0 LIMIT_DIST = False LOSS_TYPE = 'features' # use features (original Fawkes) or gradients (Witches Brew) to run Fawkes? def __init__(self, bottleneck_model_ls, mimic_img=MIMIC_IMG, batch_size=1, learning_rate=LEARNING_RATE, max_iterations=MAX_ITERATIONS, initial_const=INITIAL_CONST, intensity_range=INTENSITY_RANGE, l_threshold=L_THRESHOLD, max_val=MAX_VAL, keep_final=KEEP_FINAL, maximize=MAXIMIZE, image_shape=IMAGE_SHAPE, verbose=1, ratio=RATIO, limit_dist=LIMIT_DIST, loss_method=LOSS_TYPE): assert intensity_range in {'raw', 'imagenet', 'inception', 'mnist'} # constant used for tanh transformation to avoid corner cases self.it = 0 self.tanh_constant = 2 - 1e-6 self.MIMIC_IMG = mimic_img self.LEARNING_RATE = learning_rate self.MAX_ITERATIONS = max_iterations self.initial_const = initial_const self.batch_size = batch_size self.intensity_range = intensity_range self.l_threshold = l_threshold self.max_val = max_val self.keep_final = keep_final self.verbose = verbose self.maximize = maximize self.learning_rate = learning_rate self.ratio = ratio self.limit_dist = limit_dist self.single_shape = list(image_shape) self.bottleneck_models = bottleneck_model_ls self.loss_method = loss_method self.input_shape = tuple([self.batch_size] + self.single_shape) self.bottleneck_shape = tuple([self.batch_size] + self.single_shape) # the variable we're going to optimize over self.modifier = tf.Variable(np.ones(self.input_shape, dtype=np.float32) * 1e-6) self.const = tf.Variable(np.ones(batch_size) * self.initial_const, dtype=np.float32) self.mask = tf.Variable(np.ones(batch_size), dtype=np.bool) @staticmethod def resize_tensor(input_tensor, model_input_shape): if input_tensor.shape[1:] == model_input_shape or model_input_shape[1] is None: return input_tensor resized_tensor = tf.image.resize(input_tensor, model_input_shape[:2]) return resized_tensor def input_space_process(self, img): if self.intensity_range == 'imagenet': mean = np.repeat([[[[103.939, 116.779, 123.68]]]], self.batch_size, axis=0) raw_img = (img - mean) else: raw_img = img return raw_img def reverse_input_space_process(self, img): if self.intensity_range == 'imagenet': mean = np.repeat([[[[103.939, 116.779, 123.68]]]], self.batch_size, axis=0) raw_img = (img + mean) else: raw_img = img return raw_img def clipping(self, imgs): imgs = self.reverse_input_space_process(imgs) imgs = np.clip(imgs, 0, self.max_val) imgs = self.input_space_process(imgs) return imgs def calc_dissim(self, source_raw, source_mod_raw): return 0.0, 0.0, 0.0 # msssim_split = tf.image.ssim(source_raw, source_mod_raw, max_val=255.0) # dist_raw = (1.0 - tf.stack(msssim_split)) / 2.0 # dist = tf.maximum(dist_raw - self.l_threshold, 0.0) # # dist_raw_sum = tf.reduce_sum(tf.where(self.mask, dist_raw, tf.zeros_like(dist_raw))) # dist_raw_sum = tf.reduce_sum(dist_raw) # # dist_sum = tf.reduce_sum(tf.where(self.mask, dist, tf.zeros_like(dist))) # dist_sum = tf.reduce_sum(dist) # return dist, dist_sum, dist_raw_sum def calc_bottlesim(self, tape, source_raw, target_raw, source_filtered, original_raw): """ original Fawkes loss function. """ bottlesim = 0.0 bottlesim_sum = 0.0 # make sure everything is the right size. model_input_shape = self.single_shape cur_aimg_input = self.resize_tensor(source_raw, model_input_shape) cur_source_filtered = self.resize_tensor(source_filtered, model_input_shape) # cur_timg_input = self.resize_tensor(target_raw, model_input_shape) for bottleneck_model in self.bottleneck_models: if tape is not None: try: tape.watch(bottleneck_model.variables) except AttributeError: tape.watch(bottleneck_model.model.variables) # get the respective feature space reprs. bottleneck_a = bottleneck_model(cur_aimg_input) bottleneck_filter = bottleneck_model(cur_source_filtered) bottleneck_s = bottleneck_model(original_raw) # compute the differences. bottleneck_diff = bottleneck_a - bottleneck_s bottleneck_diff_filter = bottleneck_filter - bottleneck_s # get scale factor. scale_factor = tf.sqrt(tf.reduce_sum(tf.square(bottleneck_s), axis=1)) scale_factor_filter = tf.sqrt(tf.reduce_sum(tf.square(bottleneck_diff_filter), axis=1)) # compute the loss cur_bottlesim = tf.reduce_sum(tf.square(bottleneck_diff), axis=1) cur_bottlesim_filter = tf.reduce_sum(tf.square(bottleneck_diff_filter), axis=1) cur_bottlesim = cur_bottlesim / scale_factor cur_bottlesim_filter = cur_bottlesim_filter / scale_factor_filter bottlesim += cur_bottlesim + cur_bottlesim_filter bottlesim_sum += tf.reduce_sum(cur_bottlesim) + tf.reduce_sum(cur_bottlesim_filter) return bottlesim, bottlesim_sum def compute_feature_loss(self, tape, aimg_raw, simg_raw, aimg_input, timg_input, simg_input, aimg_filtered): """ Compute input space + feature space loss. """ input_space_loss, input_space_loss_sum, input_space_loss_raw_sum = self.calc_dissim(aimg_raw, simg_raw) feature_space_loss, feature_space_loss_sum = self.calc_bottlesim(tape, aimg_input, timg_input, aimg_filtered, simg_input) if self.maximize: loss = self.const * input_space_loss - feature_space_loss else: if self.it < self.MAX_ITERATIONS: loss = self.const * input_space_loss + 1000 * feature_space_loss # - feature_space_loss_orig else: loss = self.const * 100 * input_space_loss + feature_space_loss # loss_sum = tf.reduce_sum(tf.where(self.mask, loss, tf.zeros_like(loss))) loss_sum = tf.reduce_sum(loss) # return loss_sum, input_space_loss, feature_space_loss, input_space_loss_sum, input_space_loss_raw_sum, feature_space_loss_sum return loss_sum, 0, feature_space_loss, 0, 0, feature_space_loss_sum def attack(self, source_imgs, target_imgs, weights=None): """ Main function that runs cloak generation. """ if weights is None: weights = np.ones([source_imgs.shape[0]] + list(self.bottleneck_shape[1:])) assert weights.shape[1:] == self.bottleneck_shape[1:] assert source_imgs.shape[1:] == self.input_shape[1:] assert source_imgs.shape[0] == weights.shape[0] if self.MIMIC_IMG: assert target_imgs.shape[1:] == self.input_shape[1:] assert source_imgs.shape[0] == target_imgs.shape[0] else: assert target_imgs.shape[1:] == self.bottleneck_shape[1:] assert source_imgs.shape[0] == target_imgs.shape[0] start_time = time.time() adv_imgs = [] print('%d batches in total' % int(np.ceil(len(source_imgs) / self.batch_size))) for idx in range(0, len(source_imgs), self.batch_size): # print('processing image %d at %s' % (idx + 1, datetime.datetime.now())) adv_img = self.attack_batch(source_imgs[idx:idx + self.batch_size], target_imgs[idx:idx + self.batch_size]) adv_imgs.extend(adv_img) elapsed_time = time.time() - start_time print('protection cost %f s' % elapsed_time) return np.array(adv_imgs) def attack_batch(self, source_imgs, target_imgs): """ TF2 method to generate the cloak. """ # preprocess images. global progressbar nb_imgs = source_imgs.shape[0] mask = [True] * nb_imgs + [False] * (self.batch_size - nb_imgs) self.mask = np.array(mask, dtype=np.bool) LR = self.learning_rate # make sure source/target images are an array source_imgs = np.array(source_imgs, dtype=np.float32) target_imgs = np.array(target_imgs, dtype=np.float32) # metrics to test best_bottlesim = [0] * nb_imgs if self.maximize else [np.inf] * nb_imgs best_adv = np.zeros(source_imgs.shape) total_distance = [0] * nb_imgs finished_idx = set() # make the optimizer optimizer = tf.keras.optimizers.Adam(self.learning_rate) # optimizer = tf.keras.optimizers.Adadelta(self.learning_rate) # get the modifier self.modifier = tf.Variable(np.ones(self.input_shape, dtype=np.float32) * 1e-4) # self.modifier = tf.Variable(np.random.uniform(-8.0, 8.0, self.input_shape), dtype=tf.float32) if self.verbose == 0: progressbar = Progbar( self.MAX_ITERATIONS, width=30, verbose=1 ) # watch relevant variables. simg_tanh = tf.Variable(source_imgs, dtype=np.float32) timg_tanh = tf.Variable(target_imgs, dtype=np.float32) # simg_tanh = self.reverse_input_space_process(simg_tanh) # timg_tanh = self.reverse_input_space_process(timg_tanh) # run the attack self.it = 0 below_thresh = False while self.it < self.MAX_ITERATIONS: self.it += 1 with tf.GradientTape(persistent=True) as tape: tape.watch(self.modifier) tape.watch(simg_tanh) tape.watch(timg_tanh) aimg_raw = simg_tanh + self.modifier aimg_filtered_raw = simg_tanh + tfa.image.gaussian_filter2d(self.modifier, [7, 7], 3.0) final_filtered_raw = simg_tanh + tfa.image.gaussian_filter2d(self.modifier, [1, 1], 2.0) simg_raw = simg_tanh timg_raw = timg_tanh # Convert further preprocess for bottleneck aimg_input = self.input_space_process(aimg_raw) aimg_filtered = self.input_space_process(aimg_filtered_raw) timg_input = self.input_space_process(timg_raw) simg_input = self.input_space_process(simg_raw) # aimg_input = aimg_raw # timg_input = timg_raw # simg_input = simg_raw # get the feature space loss. loss, input_dist, internal_dist, input_dist_sum, input_dist_raw_sum, internal_dist_sum = self.compute_feature_loss( tape, aimg_raw, simg_raw, aimg_input, timg_input, simg_input, aimg_filtered) # compute gradients grad = tape.gradient(loss, [self.modifier]) # grad[0] = grad[0] * 1e11 grad[0] = tf.sign(grad[0]) * 0.6375 # optimizer.apply_gradients(zip(grad, [self.modifier])) self.modifier = self.modifier - grad[0] self.modifier = tf.clip_by_value(self.modifier, -12.0, 12.0) for e, (feature_d, mod_img) in enumerate(zip(internal_dist, final_filtered_raw)): if e >= nb_imgs: break if (feature_d < best_bottlesim[e] and (not self.maximize)) or ( feature_d > best_bottlesim[e] and self.maximize): # print('found improvement') best_bottlesim[e] = feature_d best_adv[e] = mod_img # compute whether or not your perturbation is too big. # thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100 # if self.it != 0 and (self.it % (self.MAX_ITERATIONS // 3) == 0): # LR = LR * 0.8 # np.array([LR * 0.8]) # optimizer.learning_rate = LR # print("LR: {}".format(LR)) # print iteration result # if self.it % 10 == 0: if self.verbose == 1: thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100 # import pdb # pdb.set_trace() print( "ITER {:0.0f} Total Loss: {:.4f} perturb: {:0.4f} ({:0.4f} over, {:0.4f} raw); sim: {:.4f}".format( self.it, loss, input_dist_sum, thresh_over, input_dist_raw_sum, internal_dist_sum / nb_imgs)) if self.verbose == 0: progressbar.update(self.it) # DONE: print results if self.verbose == 1: thresh_over = input_dist_sum / self.batch_size / self.l_threshold * 100 print( "END after {} iterations: Total Loss: {} perturb: {:0.4f} ({:0.4f} over, {:0.4f} raw); sim: {}".format( self.it, loss, input_dist_sum, thresh_over, input_dist_raw_sum, internal_dist_sum / nb_imgs)) print("\n") best_adv = self.clipping(best_adv[:nb_imgs]) return best_adv
44.775
135
0.620254
14,085
0.98304
0
0
297
0.020729
0
0
3,188
0.222501
f2d93f0a50f1963382d3895bbaf47dcf3e2de6e0
1,124
py
Python
routes/class_incoming.py
fingerecho/proms-4.0
6c3a1fd62c9394761664e100fc1dde50fd79dc11
[ "CC-BY-4.0" ]
2
2019-11-23T03:56:28.000Z
2019-12-03T15:48:34.000Z
routes/class_incoming.py
fingerecho/proms-4.0
6c3a1fd62c9394761664e100fc1dde50fd79dc11
[ "CC-BY-4.0" ]
null
null
null
routes/class_incoming.py
fingerecho/proms-4.0
6c3a1fd62c9394761664e100fc1dde50fd79dc11
[ "CC-BY-4.0" ]
3
2019-04-12T18:09:35.000Z
2020-03-14T14:38:45.000Z
from abc import ABCMeta, abstractmethod import database from . import w_l class IncomingClass(metaclass=ABCMeta): @abstractmethod def __init__(self, request): self.request = request self.graph = None self.uri = None self.named_graph_uri = None self.error_messages = None @abstractmethod def valid(self): pass @abstractmethod def determine_uri(self): pass def stored(self): """ Add an item to PROMS""" if self.graph is None or self.named_graph_uri is None: msg = 'The graph and the named_grapoh_uri properties of this class instance must not be None when trying ' \ 'to store this instance in the provenance DB.' self.error_messages = msg return False try: w_l(str(self.graph)) w_l(str(self.named_graph_uri)) database.insert(self.graph, self.named_graph_uri) return True except Exception as e: self.error_messages = ['Could not connect to the provenance database'] return False
29.578947
120
0.615658
1,047
0.931495
0
0
306
0.272242
0
0
219
0.19484
f2da20f8cd9ede45ff2e1e9791b316945d38036c
418
py
Python
openwater/utils/decorator.py
jeradM/openwater
740b7e76622a1ee909b970d9e5c612a840466cec
[ "MIT" ]
null
null
null
openwater/utils/decorator.py
jeradM/openwater
740b7e76622a1ee909b970d9e5c612a840466cec
[ "MIT" ]
null
null
null
openwater/utils/decorator.py
jeradM/openwater
740b7e76622a1ee909b970d9e5c612a840466cec
[ "MIT" ]
null
null
null
from typing import Callable def blocking(func: Callable): setattr(func, "_ow_blocking", True) return func def is_blocking(func: Callable): return getattr(func, "_ow_blocking", False) is True def nonblocking(func: Callable) -> Callable: setattr(func, "_ow_nonblocking", True) return func def is_nonblocking(func: Callable) -> bool: return getattr(func, "_ow_nonblocking", False) is True
20.9
58
0.717703
0
0
0
0
0
0
0
0
62
0.148325
f2db2b20dcde6fe54280e2d0105ffc23c0015da0
404
py
Python
setup.py
TDGerve/ramCOH
328f27891906e7207344fb3c5a685648a0924dd2
[ "MIT" ]
2
2022-03-08T12:30:55.000Z
2022-03-29T19:46:59.000Z
setup.py
TDGerve/ramCOH
328f27891906e7207344fb3c5a685648a0924dd2
[ "MIT" ]
null
null
null
setup.py
TDGerve/ramCOH
328f27891906e7207344fb3c5a685648a0924dd2
[ "MIT" ]
null
null
null
import setuptools setuptools.setup( name= 'ramCOH', version= '0.1', description= '...', author= 'Thomas van Gerve', packages= setuptools.find_packages( exclude= ['examples'] ), # package_dir= {'' : 'petroPy'}, package_data= {'ramCOH': ['static/*']}, install_requires= [ 'pandas', 'matplotlib', 'numpy', 'scipy', 'csaps' ] )
17.565217
44
0.534653
0
0
0
0
0
0
0
0
137
0.339109
f2dd43c40f9fe338eecf074d6dac1c0de992c516
798
py
Python
chess.py
jrj92280/python-eve-backend
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
[ "MIT" ]
null
null
null
chess.py
jrj92280/python-eve-backend
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
[ "MIT" ]
null
null
null
chess.py
jrj92280/python-eve-backend
c0566cdef5e5c75e2b75e59bde804e0d4ce407e3
[ "MIT" ]
null
null
null
from chess_game._board import make_board from chess_game.chess_game import ChessGame from chess_game.play_game import get_user_input, game_event_loop if __name__ == "__main__": game_board = make_board() # pawn = Pawn('x', 'y', None, None, None) # pawn.move() print('Chess') print(' : Rules') print(' : input - piece''s position x,y, second x,y = destination') print(" : x = row number 1 though 8") print(" : y = column number 1 though 8") player1_name = get_user_input(' : Enter player one name', is_move=False) player2_name = get_user_input(' : Enter player two name', is_move=False) print('------------------------------------------------') chess_game = ChessGame(game_board, player1_name, player2_name) game_event_loop(chess_game)
33.25
76
0.639098
0
0
0
0
0
0
0
0
312
0.390977
f2dda34548b86bf17367a72a0ef32f5325649770
576
py
Python
python/binary_tree/104.maximum-depth-of-binary-tree.py
Nobodylesszb/LeetCode
0e902f6bff4834a93ce64cf9c57fd64297e63523
[ "MIT" ]
null
null
null
python/binary_tree/104.maximum-depth-of-binary-tree.py
Nobodylesszb/LeetCode
0e902f6bff4834a93ce64cf9c57fd64297e63523
[ "MIT" ]
null
null
null
python/binary_tree/104.maximum-depth-of-binary-tree.py
Nobodylesszb/LeetCode
0e902f6bff4834a93ce64cf9c57fd64297e63523
[ "MIT" ]
null
null
null
""" Given a binary tree, find its maximum depth. The maximum depth is the number of nodes along the longest path from the root node down to the farthest leaf node. Note: A leaf is a node with no children. Example: Given binary tree [3,9,20,null,null,15,7], 3 / \ 9 20 / \ 15 7 return its depth = 3. """ import Math class Solution: def findDeep(self,root): if not root: return 0 if not root.left or root.right: return 1 return 1+ Math.max(self.findDeep(root.left),self.findDeep(root.right))
20.571429
114
0.625
226
0.392361
0
0
0
0
0
0
327
0.567708
f2de6356f341ba86e79ed1873bc9d766068dfedf
1,589
py
Python
strstr/3-2.py
stonemary/lintcode_solutions
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
[ "Apache-2.0" ]
null
null
null
strstr/3-2.py
stonemary/lintcode_solutions
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
[ "Apache-2.0" ]
null
null
null
strstr/3-2.py
stonemary/lintcode_solutions
f41fd0e56fb88ab54d0ab624977bff1623a6d33a
[ "Apache-2.0" ]
null
null
null
# time 15 mins # used time 15 mins # time 15 mins # used time 15 mins # this is actually a correct solution # the code i submitted a day ago, which passed lintcode, is actually wrong after i looked KMP up # the previous version does not take care of the situations where the target contains repeatitive elements class Solution: def strStr(self, source, target): ## try O(n) with no bug if source is None or target is None: return -1 source_pointer = 0 target_pointer = 0 last_target_begining_match = None while source_pointer < len(source): if target_pointer == len(target): return source_pointer - len(target) if source[source_pointer] == target[target_pointer]: if target_pointer != 0 and target[target_pointer] == target[0] and last_target_begining_match is None: last_target_begining_match = target_pointer target_pointer += 1 else: if last_target_begining_match is not None: target_pointer = last_target_begining_match + 1 last_target_begining_match = None elif source[source_pointer] == target[0]: target_pointer = 1 else: target_pointer = 0 source_pointer += 1 else: if target_pointer == len(target): return source_pointer - len(target) return -1
34.543478
118
0.570799
1,273
0.801133
0
0
0
0
0
0
328
0.206419
f2e1fc7cc5cf4031b844d0facd03421c1cb64cd2
15,633
py
Python
ProyectoFinal.py
T0N1R/Recommendation-System-python-neo4J
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
[ "MIT" ]
null
null
null
ProyectoFinal.py
T0N1R/Recommendation-System-python-neo4J
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
[ "MIT" ]
null
null
null
ProyectoFinal.py
T0N1R/Recommendation-System-python-neo4J
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
[ "MIT" ]
null
null
null
# -*- coding: cp1252 -*- # -*- coding: utf-8 -*- """ Algoritmos y Estructuras de Datos Proyecto Final Antonio Reyes #17273 Esteban Cabrera #17781 Miguel #17102 """ import random import xlrd file_location = "C:/Users/Antonio/Desktop/Recommendation-System-python-neo4J-master/Database.xlsx" workbook = xlrd.open_workbook(file_location) sheet = workbook.sheet_by_index(0) from neo4jrestclient.client import GraphDatabase db = GraphDatabase("http://localhost:7474",username="neo4j", password="1111") dataB = db.labels.create("Database") gen = db.labels.create("Genero") #se crea un diccionario (como vimos en hashmaps) database = {} #donde se guardan los generos de las series que ya se vieron historial = [] #en el for se puede poner sheet.nrows para imprimir todo def add_Excel(): lista_gen = [] for x in range(sheet.nrows): name = sheet.cell_value(x,0) gen1 = sheet.cell_value(x,1) gen2 = sheet.cell_value(x,2) gen3 = sheet.cell_value(x,3) lista_gen = [] lista_gen.append(gen1) lista_gen.append(gen2) lista_gen.append(gen3) lista_gen.sort() gen1 = lista_gen[0] gen2 = lista_gen[1] gen3 = lista_gen[2] generos = [] generos.append(gen1) generos.append(gen2) generos.append(gen3) database[name] = generos unidad = db.nodes.create(nombre=name, genero1=gen1, genero2=gen2, genero3=gen3) dataB.add(unidad) try: unidad.relationships.create("contains", gen.get(genero=gen1)[0]) gen.get(genero=gen1)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen1) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen1)[0]) gen.get(genero=gen1)[0].relationships.create("contains", unidad) try: unidad.relationships.create("contains", gen.get(genero=gen2)[0]) gen.get(genero=gen2)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen2) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen2)[0]) gen.get(genero=gen2)[0].relationships.create("contains", unidad) try: unidad.relationships.create("contains", gen.get(genero=gen3)[0]) gen.get(genero=gen3)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen3) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen3)[0]) gen.get(genero=gen3)[0].relationships.create("contains", unidad) def add_database(): listaOrden = [] name = raw_input("Insert name: ") gen1 = raw_input("Insert genre1 ") gen2 = raw_input("Insert genre2: ") gen3 = raw_input("Insert genre3: ") listaOrden.append(gen1) listaOrden.append(gen2) listaOrden.append(gen3) listaOrden.sort() gen1 = listaOrden[0] gen2 = listaOrden[1] gen3 = listaOrden[2] unidad = db.nodes.create(nombre=name, genero1=gen1, genero2=gen2, genero3=gen3) dataB.add(unidad) try: unidad.relationships.create("contains", gen.get(genero=gen1)[0]) gen.get(genero=gen1)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen1) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen1)[0]) gen.get(genero=gen1)[0].relationships.create("contains", unidad) try: unidad.relationships.create("contains", gen.get(genero=gen2)[0]) gen.get(genero=gen2)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen2) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen2)[0]) gen.get(genero=gen2)[0].relationships.create("contains", unidad) try: unidad.relationships.create("contains", gen.get(genero=gen3)[0]) gen.get(genero=gen3)[0].relationships.create("contains", unidad) except Exception: genNode = db.nodes.create(genero=gen3) gen.add(genNode) unidad.relationships.create("contains", gen.get(genero=gen3)[0]) gen.get(genero=gen3)[0].relationships.create("contains", unidad) database[name] = [gen1,gen2,gen3] def watch(): name = raw_input("Insert name: ") try: query = "MATCH (n:Database) WHERE n.nombre='"+name+"' RETURN n.genero1, n.genero2, n.genero3" results = db.query(query, data_contents=True) a = results.rows for x in a: historial.append(x[0]) historial.append(x[1]) historial.append(x[2]) except Exception: print("The movie or TV show you were looking for is not in the database, you can add it by going to option 1") popular_topics(name) #se utiliza el código mostrado en este link para mostrar los generos que se repiten más veces #https://stackoverflow.com/questions/3594514/how-to-find-most-common-elements-of-a-list def popular_topics(name): nombre = name #diccionario que determinará cuales son los 5 generos más vistos top_5 = [] #por cada genero en la lista.... word_counter = {} for word in historial: if word in word_counter: word_counter[word] += 1 else: word_counter[word] = 1 popular_words = sorted(word_counter, key = word_counter.get, reverse = True) top_5 = popular_words[:5] #se ordenan los generos en orden alfabetico lista = [] print "Most watched genres: " for x in top_5: lista.append(x) print x print "We recommend: " print "-----------------" print "-----------------" try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[0]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[0]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[0]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[1]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[1]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[1]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[2]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[2]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass try: query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[2]+"'}) return collect(distinct a.nombre)" #query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) #print results a = results.rows #print len(a[0][0]) b = [] print a[0][0][0] for x in a[0][0]: if x not in b: b.append(x) valor = random.sample(range(0, len(b)+1), 3) print b[valor[0]] print b[valor[1]] print b[valor[2]] except Exception: pass #YourList.OrderBy(x => rnd.Next()).Take(5) #recomendation(name, top_5[0], top_5[1], top_5[2], top_5[3]) #método para mostrar todas las series y peliculas de un genero def show_genre(): genre = raw_input("Insert genre: ") try: query = "MATCH (n:Database {genero1:'"+genre+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) a = results.rows b = [] for x in a: if x not in b: b.append(x) print x except Exception: pass try: query = "MATCH (n:Database {genero2:'"+genre+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) a = results.rows b = [] for x in a: if x not in b: b.append(x) print x except Exception: pass try: query = "MATCH (n:Database {genero3:'"+genre+"'}) RETURN n.nombre" results = db.query(query, data_contents=True) a = results.rows b = [] for x in a: if x not in b: b.append(x) print x except Exception: pass #****************************************************************************************************** #******************************************************************************************************* def menu(): print("0. Add movies and TV shows to from Excel to Database") print("1. Add move or TV show to Database") print("2. Watch movie or TV Show") print("3. List of movies and TV shows by genre") print("9. Exit") menu() opcion = input("Option: ") print ("**********************************") print ("**********************************") while(opcion != 9): if(opcion == 0): add_Excel() print ("**********************************") print ("**********************************") print ("Values added to Database") menu() opcion = input("Option: ") elif(opcion == 1): add_database() print ("**********************************") print ("**********************************") menu() opcion = input("Option: ") elif(opcion == 2): watch() print ("**********************************") print ("**********************************") menu() opcion = input("Option: ") elif(opcion == 3): show_genre() print ("**********************************") print ("**********************************") menu() opcion = input("Option: ") else: print("This option is not valid") print ("**********************************") print ("**********************************") menu() opcion = input("Option: ") print ("Thanks for using the program")
30.414397
147
0.515832
0
0
0
0
0
0
0
0
5,070
0.324107
f2e37e6fb52ee6d2e740ecb159b5517384b2a2c4
324
py
Python
www/async_flask/__init__.py
StarAhri/flask
facd476065c945f3467d4bfd7bc4ca910cc27d74
[ "BSD-3-Clause" ]
null
null
null
www/async_flask/__init__.py
StarAhri/flask
facd476065c945f3467d4bfd7bc4ca910cc27d74
[ "BSD-3-Clause" ]
null
null
null
www/async_flask/__init__.py
StarAhri/flask
facd476065c945f3467d4bfd7bc4ca910cc27d74
[ "BSD-3-Clause" ]
null
null
null
from flask import Flask import time from _thread import get_ident app=Flask(__name__) @app.route("/") def hello_world(): time.sleep(20) return "hello world!"+str(get_ident()) @app.route("/index") def hello(): time.sleep(1) return "Hello"+str(get_ident()) if __name__=="__main__": app.run(port=6003)
17.052632
42
0.675926
0
0
0
0
183
0.564815
0
0
42
0.12963
f2e440f4b6da4c3dc8c1545aee15d9066fc4d3f5
724
py
Python
codility-python/util/test_strings.py
mforoni/codility
be5005e96612dd7bb33b88bb76a590d28084b032
[ "MIT" ]
null
null
null
codility-python/util/test_strings.py
mforoni/codility
be5005e96612dd7bb33b88bb76a590d28084b032
[ "MIT" ]
null
null
null
codility-python/util/test_strings.py
mforoni/codility
be5005e96612dd7bb33b88bb76a590d28084b032
[ "MIT" ]
null
null
null
import unittest import util.strings as strings class TestStrings(unittest.TestCase): def test_first_index_of(self): self.assertEqual(1, strings.first_index_of('1', "0103003004")) self.assertEqual(20, strings.first_index_of('f', "post this text on a form")) def test_last_index_of(self): self.assertEqual(9, strings.last_index_of('1', "01030030014")) self.assertEqual(20, strings.last_index_of('f', "post this text on a form")) def test_indexes_of(self): self.assertEqual([1, 9], strings.indexes_of('1', "01030030014")) self.assertEqual([20, 30], strings.indexes_of('f', "post this text on a fantastic form")) if __name__ == '__main__': unittest.main()
31.478261
97
0.686464
624
0.861878
0
0
0
0
0
0
154
0.212707
f2e49a7f41a62f84a3de746b66ce03eb20e0b955
1,395
py
Python
ipython/data/parseSource/input.py
cainja/RMG-Py
f9ad0f4244e476a28768c8a4a37410ad55bcd556
[ "MIT" ]
1
2020-01-14T09:12:22.000Z
2020-01-14T09:12:22.000Z
ipython/data/parseSource/input.py
speth/RMG-Py
1d2c2b684580396e984459d9347628a5ceb80e2e
[ "MIT" ]
72
2016-06-06T18:18:49.000Z
2019-11-17T03:21:10.000Z
ipython/data/parseSource/input.py
speth/RMG-Py
1d2c2b684580396e984459d9347628a5ceb80e2e
[ "MIT" ]
3
2017-09-22T15:47:37.000Z
2021-12-30T23:51:47.000Z
# Data sources database( thermoLibraries = ['primaryThermoLibrary'], reactionLibraries = [('C3', False)], seedMechanisms = ['GRI-Mech3.0'], kineticsDepositories = ['training'], kineticsFamilies = 'default', kineticsEstimator = 'rate rules', ) # List of species species( label='ethane', reactive=True, structure=SMILES("CC"), ) species( label='N2', reactive=False, structure=adjacencyList(""" 1 N u0 p1 c0 {2,T} 2 N u0 p1 c0 {1,T} """), ) # Reaction systems simpleReactor( temperature=(1350,'K'), pressure=(1.0,'bar'), initialMoleFractions={ "ethane": 0.1, "N2": 0.9 }, terminationConversion={ 'ethane': 0.9, }, terminationTime=(1e6,'s'), ) simulator( atol=1e-16, rtol=1e-8, ) model( toleranceKeepInEdge=0.0, toleranceMoveToCore=0.1, toleranceInterruptSimulation=0.1, maximumEdgeSpecies=100000, ) options( units='si', saveRestartPeriod=None, generateOutputHTML=True, generatePlots=False, saveEdgeSpecies=True, saveSimulationProfiles=True, verboseComments=True, ) pressureDependence( method='modified strong collision', maximumGrainSize=(0.5,'kcal/mol'), minimumNumberOfGrains=250, temperatures=(300,2200,'K',2), pressures=(0.01,100,'bar',3), interpolation=('Chebyshev', 6, 4), maximumAtoms=15, )
19.375
47
0.632258
0
0
0
0
0
0
0
0
283
0.202867
f2e593a65e27e8bb4c6dbcd20c5d00538ad0aa1c
438
py
Python
simbench/__init__.py
BaraaUniKassel/simbench
eca679bbef2b7c61d4a42dd9d9716ad969ff6f77
[ "BSD-3-Clause" ]
null
null
null
simbench/__init__.py
BaraaUniKassel/simbench
eca679bbef2b7c61d4a42dd9d9716ad969ff6f77
[ "BSD-3-Clause" ]
null
null
null
simbench/__init__.py
BaraaUniKassel/simbench
eca679bbef2b7c61d4a42dd9d9716ad969ff6f77
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2019-2021 by University of Kassel, Tu Dortmund, RWTH Aachen University and Fraunhofer # Institute for Energy Economics and Energy System Technology (IEE) Kassel and individual # contributors (see AUTHORS file for details). All rights reserved. __version__ = "1.3.0" __author__ = "smeinecke" import os sb_dir = os.path.dirname(os.path.realpath(__file__)) from simbench.converter import * from simbench.networks import *
33.692308
101
0.783105
0
0
0
0
0
0
0
0
275
0.627854
f2e72fd64f8c76f1c9fc74fe2d074f594b42d146
215
py
Python
src/output_module.py
abhishekpandeyIT/Virtual_Intelligent_Personal_Agent
786261fbcf1468bcbaee9f6d17aea3f3cc06f81e
[ "Apache-2.0" ]
null
null
null
src/output_module.py
abhishekpandeyIT/Virtual_Intelligent_Personal_Agent
786261fbcf1468bcbaee9f6d17aea3f3cc06f81e
[ "Apache-2.0" ]
null
null
null
src/output_module.py
abhishekpandeyIT/Virtual_Intelligent_Personal_Agent
786261fbcf1468bcbaee9f6d17aea3f3cc06f81e
[ "Apache-2.0" ]
null
null
null
import assistantResume from speak_module import speak from database import speak_is_on def output(o): # For command line input if speak_is_on(): speak(o) print(assistantResume.name +": "+o+"\n")
23.888889
44
0.702326
0
0
0
0
0
0
0
0
32
0.148837
f2ed016efef1c89871a2e33d8718c95390697abc
3,545
py
Python
vk_bot/needrework/relation.py
triangle1984/vk-bot
39dea7bf8043e791ef079ea1ac6616f95d5b5312
[ "BSD-3-Clause" ]
3
2019-11-05T12:32:04.000Z
2019-11-15T14:29:46.000Z
vk_bot/needrework/relation.py
anar66/vk-bot
39dea7bf8043e791ef079ea1ac6616f95d5b5312
[ "BSD-3-Clause" ]
1
2019-12-11T20:26:31.000Z
2019-12-11T20:26:31.000Z
vk_bot/needrework/relation.py
triangle1984/vk-bot
39dea7bf8043e791ef079ea1ac6616f95d5b5312
[ "BSD-3-Clause" ]
5
2019-11-20T14:20:30.000Z
2022-02-05T10:37:01.000Z
import vk_api from vk_api.utils import get_random_id from vk_bot.core.sql.vksql import * def relationmeet(text, vk, event): check = checkrelation('waitmeet', event.object.from_id) if check == None: check = checkrelation('relation', event.object.from_id) if check == None: userid = "".join(text[2][3:]) userid = userid.split('|')[0] check = checkrelation('relation', userid) if check == None: check = checkrelation('waitmeet', userid) if check == None: tableadd("waitmeet", "id, id2", (f"{event.object.from_id}, {userid}")) vk.messages.send(user_id=int(userid), random_id=get_random_id(), message=f"*id{event.object.from_id}(Пользователь) предложил тебе встречаться!\nНапиши: '/отношения принять' или '/отношения отклонить'") else: return "Этому пользователю уже кто-то предложил встречатся!" else: return "Этот пользователь уже встречается с кем-то!" else: return "Ай-яй-яй! Изменять нехорошо" else: return "Ты уже отправил приглашение!" def reject(event, vk): check = checktable('waitmeet', 'id2', event.object.from_id) if check == None: return 'У тебя нет предложений встречаться!' else: userid = checktable('waitmeet', 'id2', event.object.from_id) vk.messages.send(user_id=int(userid['id']), random_id=get_random_id(), message=f"*id{event.object.from_id}(Пользователь) отклонил твое предложение :()") tablerm('waitmeet', "id2", event.object.from_id) return "Вы отклонили предложение" def accept(event, vk): check = checktable('waitmeet', 'id2', event.object.from_id) if check == None: return 'У тебя нет предложений встречаться!' else: relationaccept(event.object.from_id) tablerm('waitmeet', "id2", event.object.from_id) userid = checktable('relation', 'id2', event.object.from_id) vk.messages.send(user_id=int(userid['id']), random_id=get_random_id(), message=f"*id{event.object.from_id}(Пользователь) принял твое предложение! Поздравляем!") return "Вы приняли предложение! Поздравляем!" def test(event, vk, message, case): check = checkrelation('relation', event.object.from_id) if check == None: return {'message': 'Ты ни с кем не встречаешься :('} else: userid = checktable('relation', 'id', event.object.from_id) if userid == None: userid = checktable('relation', 'id2', event.object.from_id) if userid['id2'] == event.object.from_id: userid = f"*id{userid['id']}({vk.users.get(user_ids=userid['id'], name_case=case)[0]['first_name']})" return {'message':f"{message} {userid}"} elif userid['id'] == event.object.from_id: userid = f"*id{userid['id2']}({vk.users.get(user_ids=userid['id2'], name_case=case)[0]['first_name']})" return {'message':f"{message} {userid}"} def relation(event, vk, text): try: if text[1] == "принять": return {"message": accept(event, vk)} elif text[1] == "отклонить": return {"message": reject(event, vk)} elif text[:2] == ['/отношения', 'встречаться']: return {"message": relationmeet(text, vk, event)} except IndexError: return test(event, vk, "Ты встречаешься с", "ins")
48.561644
172
0.598025
0
0
0
0
0
0
0
0
1,672
0.416438
f2ed7a6bb514c982bc41d3c33e724e9e6365650e
1,746
py
Python
wallpaperdownloader/main.py
k-vinogradov/wallpaper-downloader
568c6a1e3a2307f710bf6fe313b39da2d620213a
[ "MIT" ]
null
null
null
wallpaperdownloader/main.py
k-vinogradov/wallpaper-downloader
568c6a1e3a2307f710bf6fe313b39da2d620213a
[ "MIT" ]
null
null
null
wallpaperdownloader/main.py
k-vinogradov/wallpaper-downloader
568c6a1e3a2307f710bf6fe313b39da2d620213a
[ "MIT" ]
null
null
null
"""Wallpaper Downloader Main Module.""" import argparse import asyncio import logging import sys from datetime import datetime from wallpaperdownloader.downloader import download, LOGGER_NAME def abort(*args): """Print message to the stderr and exit the program.""" print(*args, file=sys.stderr) sys.exit(1) def check_args(args): """Check if arguments are valid.""" month, year = (args.month, args.year) if month < 1 or month > 12: abort("Invalid month number %d", month) date_string = f"{year:04}{month:02}" if date_string < "201205": abort("There are no wallpapers older than May 2012") if date_string > datetime.now().strftime("%Y%M"): abort("Too early... come a bit later") def configure_logger(level): """Configure console log output.""" logger = logging.getLogger(LOGGER_NAME) handler = logging.StreamHandler() logger.setLevel(level) handler.setLevel(level) logger.addHandler(handler) def main(): """Run WD main routine.""" parser = argparse.ArgumentParser( description="Download wallpapers from www.smashingmagazine.com" ) parser.add_argument("month", type=int, help="Month number") parser.add_argument("year", type=int, help="Year") parser.add_argument("resolution", type=str, help="Image resolution") parser.add_argument( "-v", "--verbose", action="store_true", help="Enable verbose output" ) args = parser.parse_args() check_args(args) configure_logger(logging.DEBUG if args.verbose else logging.INFO) year, month, res = (args.year, args.month, args.resolution) asyncio.get_event_loop().run_until_complete(download(year, month, res)) if __name__ == "__main__": main()
29.59322
76
0.683849
0
0
0
0
0
0
0
0
501
0.286942
f2ee02add396584dc919e32b6bdd9a63f34df039
4,512
py
Python
Lib/site-packages/hackedit/app/common.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
null
null
null
Lib/site-packages/hackedit/app/common.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
20
2021-05-03T18:02:23.000Z
2022-03-12T12:01:04.000Z
Lib/site-packages/hackedit/app/common.py
fochoao/cpython
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
[ "bzip2-1.0.6", "0BSD" ]
null
null
null
""" Functions shared across the main window, the welcome window and the system tray. """ import os import qcrash.api as qcrash from PyQt5 import QtWidgets from hackedit.app import templates, settings from hackedit.app.dialogs.dlg_about import DlgAbout from hackedit.app.dialogs.dlg_template_answers import DlgTemplateVars from hackedit.app.dialogs.preferences import DlgPreferences from hackedit.app.wizards.new import WizardNew def show_about(window): """ Shows the about dialog on the parent window :param window: parent window. """ DlgAbout.show_about(window) def check_for_update(*args, **kwargs): """ Checks for update. :param window: parent window :param show_up_to_date_msg: True to show a message box when the app is up to date. """ # todo: improve this: make an update wizard that update both hackedit # and its packages (to ensure compatiblity) # if pip_tools.check_for_update('hackedit', __version__): # answer = QtWidgets.QMessageBox.question( # window, 'Check for update', # 'A new version of HackEdit is available...\n' # 'Would you like to install it now?') # if answer == QtWidgets.QMessageBox.Yes: # try: # status = pip_tools.graphical_install_package( # 'hackedit', autoclose_dlg=True) # except RuntimeError as e: # QtWidgets.qApp.processEvents() # QtWidgets.QMessageBox.warning( # window, 'Update failed', # 'Failed to update hackedit: %r' % e) # else: # QtWidgets.qApp.processEvents() # if status: # QtWidgets.QMessageBox.information( # window, 'Check for update', # 'Update completed with sucess, the application ' # 'will now restart...') # window.app.restart() # else: # QtWidgets.QMessageBox.warning( # window, 'Update failed', # 'Failed to update hackedit') # else: # _logger().debug('HackEdit up to date') # if show_up_to_date_msg: # QtWidgets.QMessageBox.information( # window, 'Check for update', 'HackEdit is up to date.') pass def open_folder(window, app): path = QtWidgets.QFileDialog.getExistingDirectory( window, _('Open directory'), settings.last_open_dir()) if path: settings.set_last_open_dir(os.path.dirname(path)) app.open_path(path, sender=window) def report_bug(window, title='', traceback=None, issue_description=''): qcrash.show_report_dialog( issue_title=title, traceback=traceback, parent=window, include_log=traceback is not None, include_sys_info=traceback is not None, issue_description=issue_description) return True def edit_preferences(window, app): DlgPreferences.edit_preferences(window, app) def not_implemented_action(window): QtWidgets.QMessageBox.information( window, _('Not implementeded'), _('This action has not been implemented yet...')) def create_new(app, window, current_project=None): source, template, dest_dir, single_file = WizardNew.get_parameters( window, current_project) if source is not None: create_new_from_template(source, template, dest_dir, single_file, window, app) def create_new_from_template(source, template, dest_dir, single_file, window, app): from .main_window import MainWindow try: variables = template['variables'] except KeyError: answers = {} else: answers = DlgTemplateVars.get_answers(variables, parent=window) if answers is None: # canceled by user return None files = templates.create(template, dest_dir, answers) if not files: # should not happen unless the template is empty return None if single_file: path = files[0] else: path = dest_dir from hackedit.app.welcome_window import WelcomeWindow if isinstance(window, WelcomeWindow): sender = None else: sender = window if single_file and isinstance(window, MainWindow): window.open_file(path) else: app.open_path(path, sender=sender) return path
32
77
0.623005
0
0
0
0
0
0
0
0
1,937
0.4293
f2ee858e562eab312d062843fa52105cd18f06ef
4,778
py
Python
pygame_menu/locals.py
apuly/pygame-menu
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
[ "MIT" ]
419
2017-05-01T20:00:08.000Z
2022-03-29T13:49:16.000Z
pygame_menu/locals.py
apuly/pygame-menu
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
[ "MIT" ]
363
2017-11-05T17:42:48.000Z
2022-03-27T21:13:33.000Z
pygame_menu/locals.py
apuly/pygame-menu
77bf8f2c8913de5a24674ee0d0d2c7c9b816a58b
[ "MIT" ]
167
2017-05-02T20:42:24.000Z
2022-03-24T16:17:38.000Z
""" pygame-menu https://github.com/ppizarror/pygame-menu LOCALS Local constants. License: ------------------------------------------------------------------------------- The MIT License (MIT) Copyright 2017-2021 Pablo Pizarro R. @ppizarror Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ------------------------------------------------------------------------------- """ __all__ = [ # Alignment 'ALIGN_CENTER', 'ALIGN_LEFT', 'ALIGN_RIGHT', # Data types 'INPUT_FLOAT', 'INPUT_INT', 'INPUT_TEXT', # Positioning 'POSITION_CENTER', 'POSITION_EAST', 'POSITION_NORTH', 'POSITION_NORTHEAST', 'POSITION_SOUTHWEST', 'POSITION_SOUTH', 'POSITION_SOUTHEAST', 'POSITION_NORTHWEST', 'POSITION_WEST', # Orientation 'ORIENTATION_HORIZONTAL', 'ORIENTATION_VERTICAL', # Scrollarea 'SCROLLAREA_POSITION_BOTH_HORIZONTAL', 'SCROLLAREA_POSITION_BOTH_VERTICAL', 'SCROLLAREA_POSITION_FULL', # Cursors 'CURSOR_ARROW', 'CURSOR_CROSSHAIR', 'CURSOR_HAND', 'CURSOR_IBEAM', 'CURSOR_NO', 'CURSOR_SIZEALL', 'CURSOR_SIZENESW', 'CURSOR_SIZENS', 'CURSOR_SIZENWSE', 'CURSOR_SIZEWE', 'CURSOR_WAIT', 'CURSOR_WAITARROW', # Event compatibility 'FINGERDOWN', 'FINGERMOTION', 'FINGERUP' ] import pygame as __pygame # Alignment ALIGN_CENTER = 'align-center' ALIGN_LEFT = 'align-left' ALIGN_RIGHT = 'align-right' # Input data type INPUT_FLOAT = 'input-float' INPUT_INT = 'input-int' INPUT_TEXT = 'input-text' # Position POSITION_CENTER = 'position-center' POSITION_EAST = 'position-east' POSITION_NORTH = 'position-north' POSITION_NORTHEAST = 'position-northeast' POSITION_NORTHWEST = 'position-northwest' POSITION_SOUTH = 'position-south' POSITION_SOUTHEAST = 'position-southeast' POSITION_SOUTHWEST = 'position-southwest' POSITION_WEST = 'position-west' # Menu ScrollArea position SCROLLAREA_POSITION_BOTH_HORIZONTAL = 'scrollarea-position-both-horizontal' SCROLLAREA_POSITION_BOTH_VERTICAL = 'scrollarea_position-both-vertical' SCROLLAREA_POSITION_FULL = 'scrollarea-position-full' # Orientation ORIENTATION_HORIZONTAL = 'orientation-horizontal' ORIENTATION_VERTICAL = 'orientation-vertical' # Cursors CURSOR_ARROW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_ARROW') else __pygame.SYSTEM_CURSOR_ARROW CURSOR_CROSSHAIR = None if not hasattr(__pygame, 'SYSTEM_CURSOR_CROSSHAIR') else __pygame.SYSTEM_CURSOR_CROSSHAIR CURSOR_HAND = None if not hasattr(__pygame, 'SYSTEM_CURSOR_HAND') else __pygame.SYSTEM_CURSOR_HAND CURSOR_IBEAM = None if not hasattr(__pygame, 'SYSTEM_CURSOR_IBEAM') else __pygame.SYSTEM_CURSOR_IBEAM CURSOR_NO = None if not hasattr(__pygame, 'SYSTEM_CURSOR_NO') else __pygame.SYSTEM_CURSOR_NO CURSOR_SIZEALL = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZEALL') else __pygame.SYSTEM_CURSOR_SIZEALL CURSOR_SIZENESW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENESW') else __pygame.SYSTEM_CURSOR_SIZENESW CURSOR_SIZENS = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENS') else __pygame.SYSTEM_CURSOR_SIZENS CURSOR_SIZENWSE = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZENWSE') else __pygame.SYSTEM_CURSOR_SIZENWSE CURSOR_SIZEWE = None if not hasattr(__pygame, 'SYSTEM_CURSOR_SIZEWE') else __pygame.SYSTEM_CURSOR_SIZEWE CURSOR_WAIT = None if not hasattr(__pygame, 'SYSTEM_CURSOR_WAIT') else __pygame.SYSTEM_CURSOR_WAIT CURSOR_WAITARROW = None if not hasattr(__pygame, 'SYSTEM_CURSOR_WAITARROW') else __pygame.SYSTEM_CURSOR_WAITARROW # Events compatibility with lower pygame versions FINGERDOWN = -1 if not hasattr(__pygame, 'FINGERDOWN') else __pygame.FINGERDOWN FINGERMOTION = -1 if not hasattr(__pygame, 'FINGERMOTION') else __pygame.FINGERMOTION FINGERUP = -1 if not hasattr(__pygame, 'FINGERUP') else __pygame.FINGERUP
35.392593
113
0.75429
0
0
0
0
0
0
0
0
2,849
0.596275
f2efb530b1ef641d5c0b78f798aa8a3ec91dbadc
3,184
py
Python
functions/constants.py
Katolus/functions
c4aff37231432ce6ef4ed6b37c8b5baaede5975a
[ "MIT" ]
4
2022-03-08T08:46:44.000Z
2022-03-19T07:52:11.000Z
functions/constants.py
Katolus/functions
c4aff37231432ce6ef4ed6b37c8b5baaede5975a
[ "MIT" ]
114
2021-10-30T05:48:54.000Z
2022-03-06T10:57:00.000Z
functions/constants.py
Katolus/functions
c4aff37231432ce6ef4ed6b37c8b5baaede5975a
[ "MIT" ]
null
null
null
import os import sys from enum import Enum from enum import unique from typing import List # Set system constants based on the current platform if sys.platform.startswith("win32"): DEFAULT_SYSTEM_CONFIG_PATH = os.path.join(os.environ["APPDATA"], "config") elif sys.platform.startswith("linux"): DEFAULT_SYSTEM_CONFIG_PATH = os.path.join(os.environ["HOME"], ".config") elif sys.platform.startswith("darwin"): DEFAULT_SYSTEM_CONFIG_PATH = os.path.join( os.environ["HOME"], "Library", "Application Support" ) else: DEFAULT_SYSTEM_CONFIG_PATH = os.path.join(os.environ["HOME"], "config") # System configuration PACKAGE_BASE_CONFIG_FOLDER = "ventress-functions" PACKAGE_CONFIG_DIR_PATH = os.path.join( DEFAULT_SYSTEM_CONFIG_PATH, PACKAGE_BASE_CONFIG_FOLDER ) DEFAULT_LOG_FILENAME = "functions.log" DEFAULT_LOG_FILEPATH = os.path.join(PACKAGE_CONFIG_DIR_PATH, DEFAULT_LOG_FILENAME) # Project constants PROJECT_VENDOR = "ventress" PROJECT_MARK = "ventress-functions" class ConfigName(str, Enum): """Represents various availabel names for a config file""" BASE = "config.json" class RequiredFile(str, Enum): """Enum for required file names in a function's directory""" CONFIG = "config.json" DOCKERFILE = "Dockerfile" DOCKERIGNORE = ".dockerignore" ENTRY_POINT = "main.py" REQUIREMENTS = "requirements.txt" class LoggingLevel(str, Enum): DEBUG = "debug" ERROR = "error" INFO = "info" WARNING = "warning" class FunctionType(str, Enum): """Represents the various types of functions that can be run""" HTTP = "http" PUBSUB = "pubsub" @classmethod def options(cls) -> List[str]: """Returns a list of all the function types""" return [enum.value for enum in cls] class LocalStatus(str, Enum): """Represents the status of a function locally""" ADDED = "added" BUILT = "new build" INVALID = "invalid" NEW = "new" REMOVED = "removed" RUNNING = "running" STOPPED = "stopped" UNKNOWN = "unknown" @classmethod def build_statuses(cls) -> List[str]: """Returns a list of statuses which mean that the image is built""" return [ cls.BUILT, cls.RUNNING, cls.STOPPED, ] class CloudStatus(str, Enum): """Represents the status of a function on the cloud""" DELETED = "deleted" DEPLOYED = "deployed" UNKNOWN = "unknown" @property def is_deployed(self) -> bool: return self == CloudStatus.DEPLOYED @unique class CloudProvider(str, Enum): """Represents the various cloud providers supported by the functions package""" # AWS = "aws" # AZURE = "azure" GCP = "gcp" # LOCAL = "local" # OPENFASS = "openfass" # OPENSTACK = "openstack" @classmethod def all(cls) -> List[str]: """Returns all the available service types""" return [enum.value for enum in cls] @unique class CloudServiceType(str, Enum): CLOUD_FUNCTION = "cloud_function" @classmethod def all(cls) -> List[str]: """Returns all the available service types""" return [enum.value for enum in cls]
25.269841
83
0.666143
2,149
0.674937
0
0
1,098
0.344849
0
0
1,163
0.365264
f2f174769c76e5752b21c530463b089bffb53275
1,076
py
Python
mkmk/extend.py
tundra/mkmk
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
[ "Apache-2.0" ]
null
null
null
mkmk/extend.py
tundra/mkmk
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
[ "Apache-2.0" ]
null
null
null
mkmk/extend.py
tundra/mkmk
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
[ "Apache-2.0" ]
null
null
null
#- Copyright 2014 GOTO 10. #- Licensed under the Apache License, Version 2.0 (see LICENSE). ## Utilities used for creating build extensions. from abc import ABCMeta, abstractmethod # Abstract superclass of the tool sets loaded implicitly into each context. # There can be many of these, one for each context. class ToolSet(object): __metaclass__ = ABCMeta def __init__(self, context): self.context = context # Returns the context this tool set belongs to. def get_context(self): return self.context # Controller for this kind of extension. There is only one of these for each # kind of extension. class ToolController(object): __metaclass__ = ABCMeta def __init__(self, env): self.env = env # Returns the build environment. def get_environment(self): return self.env # Gives this controller an opportunity to add some extra custom flags. By # default does nothing. def add_custom_flags(self, parser): pass # Returns a toolset instance, given a concrete context. @abstractmethod def get_tools(self, context): pass
25.619048
76
0.737918
662
0.615242
0
0
56
0.052045
0
0
590
0.548327
f2f29f0872d8843eb8b228cb03ec5eb0946af9b8
32,864
py
Python
tracklib/model/model.py
xueyuelei/tracklib
d33912baf1bebd1605d5e9c8dfc31484c96628cc
[ "MIT" ]
5
2020-03-04T11:36:19.000Z
2020-06-21T16:49:45.000Z
tracklib/model/model.py
xueyuelei/tracklib
d33912baf1bebd1605d5e9c8dfc31484c96628cc
[ "MIT" ]
null
null
null
tracklib/model/model.py
xueyuelei/tracklib
d33912baf1bebd1605d5e9c8dfc31484c96628cc
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- ''' REFERENCES: [1] Y. Bar-Shalom, X. R. Li, and T. Kirubarajan, "Estimation with Applications to Tracking and Navigation," New York: John Wiley and Sons, Inc, 2001. [2] R. A. Singer, "Estimating Optimal Tracking Filter Performance for Manned Maneuvering Targets," in IEEE Transactions on Aerospace and Electronic Systems, vol. AES-6, no. 4, pp. 473-483, July 1970. [3] X. Rong Li and V. P. Jilkov, "Survey of maneuvering target tracking. Part I. Dynamic models," in IEEE Transactions on Aerospace and Electronic Systems, vol. 39, no. 4, pp. 1333-1364, Oct. 2003. [4] W. Koch, "Tracking and Sensor Data Fusion: Methodological Framework and Selected Applications," Heidelberg, Germany: Springer, 2014. [5] Mo Longbin, Song Xiaoquan, Zhou Yiyu, Sun Zhong Kang and Y. Bar-Shalom, "Unbiased converted measurements for tracking," in IEEE Transactions on Aerospace and Electronic Systems, vol. 34, no. 3, pp. 1023-1027, July 1998 ''' from __future__ import division, absolute_import, print_function __all__ = [ 'F_poly', 'F_singer', 'F_van_keuk', 'Q_poly_dc', 'Q_poly_dd', 'Q_singer', 'Q_van_keuk', 'H_pos_only', 'R_pos_only', 'F_cv', 'f_cv', 'f_cv_jac', 'Q_cv_dc', 'Q_cv_dd', 'H_cv', 'h_cv', 'h_cv_jac', 'R_cv', 'F_ca', 'f_ca', 'f_ca_jac', 'Q_ca_dc', 'Q_ca_dd', 'H_ca', 'h_ca', 'h_ca_jac', 'R_ca', 'F_ct', 'f_ct', 'f_ct_jac', 'Q_ct', 'h_ct', 'h_ct_jac', 'R_ct', 'convert_meas', 'model_switch', 'trajectory_cv', 'trajectory_ca', 'trajectory_ct', 'trajectory_generator', 'trajectory_with_pd', 'trajectory_to_meas' ] import numbers import numpy as np import scipy.linalg as lg import scipy.stats as st import scipy.special as sl from tracklib.utils import sph2cart, pol2cart def F_poly(order, axis, T): ''' This polynomial transition matrix is used with discretized continuous-time models as well as direct discrete-time models. see section 6.2 and 6.3 in [1]. Parameters ---------- order : int The order of the filter. If order=2, then it is constant velocity, 3 means constant acceleration, 4 means constant jerk, etc. axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. Returns ------- F : ndarray The state transition matrix under a linear dynamic model of the given order and axis. ''' assert (order >= 1) assert (axis >= 1) F_base = np.zeros((order, order)) tmp = np.arange(order) F_base[0, :] = T**tmp / sl.factorial(tmp) for row in range(1, order): F_base[row, row:] = F_base[0, :order - row] F = np.kron(np.eye(axis), F_base) return F def F_singer(axis, T, tau=20): ''' Get the singer model transition matrix, see section 8.2 in [1]. Parameters ---------- axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. tau : float The time constant of the target acceleration autocorrelation, that is, the decorrelation time is approximately 2*tau. A reasonable range of tau for Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft are 20s for slow turn and 5s for an evasive maneuver. If this parameter is omitted, the default value of 20 is used.The time constant is assumed the same for all dimensions of motion, so this parameter is scalar. Returns ------- F : ndarray The state transition matrix under a Gauss-Markov dynamic model of the given axis. ''' assert (axis >= 1) alpha = 1 / tau F_base = np.zeros((3, 3)) aT = alpha * T eaT = np.exp(-aT) F_base[0, 0] = 1 F_base[0, 1] = T F_base[0, 2] = (aT - 1 + eaT) * tau**2 F_base[1, 1] = 1 F_base[1, 2] = (1 - eaT) * tau F_base[2, 2] = eaT F = np.kron(np.eye(axis), F_base) return F def F_van_keuk(axis, T, tau=20): ''' Get the state transition matrix for the van Keuk dynamic model. This is a direct discrete-time model such that the acceleration advances in each dimension over time as a[k+1]=exp(-T/tau)a[k]+std*sqrt(1-exp(-2*T/tau))*v[k], see section 2.2.1 in [4] Parameters ---------- axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. tau : float The time constant of the target acceleration autocorrelation, that is, the decorrelation time is approximately 2*tau. A reasonable range of tau for Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft are 20s for slow turn and 5s for an evasive maneuver. If this parameter is omitted, the default value of 20 is used.The time constant is assumed the same for all dimensions of motion, so this parameter is scalar. Returns ------- F : ndarray The state transition matrix under a Gauss-Markov dynamic model of the given axis. ''' assert (axis >= 1) F_base = F_poly(3, 1, T) F_base[-1, -1] = np.exp(-T / tau) F = np.kron(np.eye(axis), F_base) return F def Q_poly_dc(order, axis, T, std): ''' Process noise covariance matrix used with discretized continuous-time models. see section 6.2 in [1]. Parameters ---------- order : int The order of the filter. If order=2, then it is constant velocity, 3 means constant acceleration, 4 means constant jerk, etc. axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. std : number, list The standard deviation (square root of intensity) of continuous-time porcess noise Returns ------- Q : ndarray Process noise convariance ''' assert (order >= 1) assert (axis >= 1) if isinstance(std, numbers.Number): std = [std] * axis sel = np.arange(order - 1, -1, -1) col, row = np.meshgrid(sel, sel) Q_base = T**(col + row + 1) / (sl.factorial(col) * sl.factorial(row) * (col + row + 1)) Q = np.kron(np.diag(std)**2, Q_base) return Q def Q_poly_dd(order, axis, T, std, ht=0): ''' Process noise covariance matrix used with direct discrete-time models. see section 6.3 in [1]. Parameters ---------- order : int The order of the filter. If order=2, then it is constant velocity, 3 means constant acceleration, 4 means constant jerk, etc. axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. std : number, list The standard deviation of discrete-time porcess noise ht : int ht means that the order of the noise is `ht` greater than the highest order of the state, e.g., if the highest order of state is acceleration, then ht=0 means that the noise is of the same order as the highest order of state, that is, the noise is acceleration and the model is DWPA, see section 6.3.3 in [1]. If the highest order is velocity, the ht=1 means the noise is acceleration and the model is DWNA, see section 6.3.2 in [1]. Returns ------- Q : ndarray Process noise convariance Notes ----- For the model to which the alpha filter applies, we have order=0, ht=2. Likewise, for the alpha-beta filter, order=1, ht=1 and for the alpha- beta-gamma filter, order=2, ht=0 ''' assert (order >= 1) assert (axis >= 1) if isinstance(std, numbers.Number): std = [std] * axis sel = np.arange(ht + order - 1, ht - 1, -1) L = T**sel / sl.factorial(sel) Q_base = np.outer(L, L) Q = np.kron(np.diag(std)**2, Q_base) return Q def Q_singer(axis, T, std, tau=20): ''' Process noise covariance matrix used with Singer models. see section 8.2 in [1] Parameters ---------- axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. std : number, list std is the instantaneous standard deviation of the acceleration knowm as Ornstein-Uhlenbeck process, which can be obtained by assuming it to be 1. Equal to a maxmum acceleration a_M with probability p_M and -a_M with the same probability 2. Equal to zero with probability p_0 3. Uniformly distributed in [-a_M, a_M] with the remaining probability mass All parameters mentioned above are chosen by the designer. So the expected std^2 is (a_M^2 / 3)*(1 + 4*p_M - p_0) tau : float The time constant of the target acceleration autocorrelation, that is, the decorrelation time is approximately 2*tau. A reasonable range of tau for Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft are 20s for slow turn and 5s for an evasive maneuver. If this parameter is omitted, the default value of 20 is used.The time constant is assumed the same for all dimensions of motion, so this parameter is scalar. Returns ------- Q : ndarray Process noise convariance ''' assert (axis >= 1) if isinstance(std, numbers.Number): std = [std] * axis alpha = 1 / tau aT = alpha * T eaT = np.exp(-aT) e2aT = np.exp(-2 * aT) q11 = tau**4 * (1 - e2aT + 2 * aT + 2 * aT**3 / 3 - 2 * aT**2 - 4 * aT * eaT) q12 = tau**3 * (e2aT + 1 - 2 * eaT + 2 * aT * eaT - 2 * aT + aT**2) q13 = tau**2 * (1 - e2aT - 2 * aT * eaT) q22 = tau**2 * (4 * eaT - 3 - e2aT + 2 * aT) q23 = tau * (e2aT + 1 - 2 * eaT) q33 = 1 - e2aT Q_base = np.array([[q11, q12, q13], [q12, q22, q23], [q13, q23, q33]], dtype=float) Q = np.kron(np.diag(std)**2, Q_base) return Q def Q_van_keuk(axis, T, std, tau=20): ''' Process noise covariance matrix for a Van Keuk dynamic model, see section 2.2.1 in [4] Parameters ---------- axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. T : float The time-duration of the propagation interval. std : number, list std is the instantaneous standard deviation of the acceleration knowm as Ornstein-Uhlenbeck process, which can be obtained by assuming it to be 1. Equal to a maxmum acceleration a_M with probability p_M and -a_M with the same probability 2. Equal to zero with probability p_0 3. Uniformly distributed in [-a_M, a_M] with the remaining probability mass All parameters mentioned above are chosen by the designer. So the expected std^2 is (a_M^2 / 3)*(1 + 4*p_M - p_0) tau : float The time constant of the target acceleration autocorrelation, that is, the decorrelation time is approximately 2*tau. A reasonable range of tau for Singer's model is between 5 and 20 seconds. Typical values of tau for aircraft are 20s for slow turn and 5s for an evasive maneuver. If this parameter is omitted, the default value of 20 is used. The time constant is assumed the same for all dimensions of motion, so this parameter is scalar. Returns ------- Q : ndarray Process noise convariance ''' assert (axis >= 1) if isinstance(std, numbers.Number): std = [std] * axis Q_base = np.diag([0., 0., 1.]) Q_base = (1 - np.exp(-2 * T / tau)) * Q_base Q = np.kron(np.diag(std)**2, Q_base) return Q def H_pos_only(order, axis): ''' Position-only measurement matrix is used with discretized continuous-time models as well as direct discrete-time models. see section 6.5 in [1]. Parameters ---------- order : int The order of the filter. If order=2, then it is constant velocity, 3 means constant acceleration, 4 means constant jerk, etc. axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. Returns ------- H : ndarray the measurement or obervation matrix ''' assert (order >= 1) assert (axis >= 1) H = np.eye(order * axis) H = H[::order] return H def R_pos_only(axis, std): ''' Position-only measurement noise covariance matrix and the noise of each axis is assumed to be uncorrelated. Parameters ---------- axis : int Motion directions in Cartesian coordinate. If axis=1, it means x-axis, 2 means x-axis and y-axis, etc. Returns ------- R : ndarray the measurement noise covariance matrix ''' assert (axis >= 1) if isinstance(std, numbers.Number): std = [std] * axis R = np.diag(std)**2 return R def F_cv(axis, T): return F_poly(2, axis, T) def f_cv(axis, T): F = F_cv(axis, T) def f(x, u=None): return np.dot(F, x) return f def f_cv_jac(axis, T): F = F_cv(axis, T) def fjac(x, u=None): return F return fjac def Q_cv_dc(axis, T, std): return Q_poly_dc(2, axis, T, std) def Q_cv_dd(axis, T, std): return Q_poly_dd(2, axis, T, std, ht=1) def H_cv(axis): return H_pos_only(2, axis) def h_cv(axis): H = H_cv(axis) def h(x): return np.dot(H, x) return h def h_cv_jac(axis): H = H_cv(axis) def hjac(x): return H return hjac def R_cv(axis, std): return R_pos_only(axis, std) def F_ca(axis, T): return F_poly(3, axis, T) def f_ca(axis, T): F = F_ca(axis, T) def f(x, u=None): return np.dot(F, x) return f def f_ca_jac(axis, T): F = F_ca(axis, T) def fjac(x, u=None): return F return fjac def Q_ca_dc(axis, T, std): return Q_poly_dc(3, axis, T, std) def Q_ca_dd(axis, T, std): return Q_poly_dd(3, axis, T, std, ht=0) def H_ca(axis): return H_pos_only(3, axis) def h_ca(axis): H = H_ca(axis) def h(x): return np.dot(H, x) return h def h_ca_jac(axis): H = H_ca(axis) def hjac(x): return H return hjac def R_ca(axis, std): return R_pos_only(axis, std) def F_ct(axis, turnrate, T): assert (axis >= 2) omega = np.deg2rad(turnrate) if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps): wt = omega * T sin_wt = np.sin(wt) cos_wt = np.cos(wt) sin_div = sin_wt / omega cos_div = (cos_wt - 1) / omega else: sin_wt = 0 cos_wt = 1 sin_div = T cos_div = 0 F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt], [0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]], dtype=float) if axis == 3: zblock = F_cv(1, T) F = lg.block_diag(F, zblock) return F def f_ct(axis, T): assert (axis >= 2) def f(x, u=None): omega = np.deg2rad(x[4]) if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps): wt = omega * T sin_wt = np.sin(wt) cos_wt = np.cos(wt) sin_div = sin_wt / omega cos_div = (cos_wt - 1) / omega else: sin_wt = 0 cos_wt = 1 sin_div = T cos_div = 0 F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt], [0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]], dtype=float) F = lg.block_diag(F, 1) if axis == 3: zblock = F_cv(1, T) F = lg.block_diag(F, zblock) return np.dot(F, x) return f def f_ct_jac(axis, T): assert (axis >= 2) def fjac(x, u=None): omega = np.deg2rad(x[4]) if np.fabs(omega) >= np.sqrt(np.finfo(omega).eps): wt = omega * T sin_wt = np.sin(wt) cos_wt = np.cos(wt) sin_div = sin_wt / omega cos_div = (cos_wt - 1) / omega f0 = np.deg2rad(((wt * cos_wt - sin_wt) * x[1] + (1 - cos_wt - wt * sin_wt) * x[3]) / omega**2) f1 = np.deg2rad((-x[1] * sin_wt - x[3] * cos_wt) * T) f2 = np.deg2rad((wt * (x[1] * sin_wt + x[3] * cos_wt) - (x[1] * (1 - cos_wt) + x[3] * sin_wt)) / omega**2) f3 = np.deg2rad((x[1]*cos_wt - x[3]*sin_wt) * T) else: sin_wt = 0 cos_wt = 1 sin_div = T cos_div = 0 f0 = np.deg2rad(-x[3] * T**2 / 2) f1 = np.deg2rad(-x[3] * T) f2 = np.deg2rad(x[1] * T**2 / 2) f3 = np.deg2rad(x[1] * T) F = np.array([[1, sin_div, 0, cos_div], [0, cos_wt, 0, -sin_wt], [0, -cos_div, 1, sin_div], [0, sin_wt, 0, cos_wt]], dtype=float) F = lg.block_diag(F, 1) F[0, -1] = f0 F[1, -1] = f1 F[2, -1] = f2 F[3, -1] = f3 if axis == 3: zblock = F_cv(1, T) F = lg.block_diag(F, zblock) return F return fjac def Q_ct(axis, T, std): assert (axis >= 2) if isinstance(std, numbers.Number): std = [std] * (axis + 1) # omega block = np.array([T**2 / 2, T], dtype=float).reshape(-1, 1) L = lg.block_diag(block, block, T) Q = np.diag(std)**2 if axis == 3: L = lg.block_diag(L, block) return L @ Q @ L.T def h_ct(axis): assert (axis >= 2) if axis == 3: H = H_pos_only(2, 3) else: H = H_pos_only(2, 2) H = np.insert(H, 4, 0, axis=1) def h(x): return np.dot(H, x) return h def h_ct_jac(axis): assert (axis >= 2) if axis == 3: H = H_pos_only(2, 3) else: H = H_pos_only(2, 2) H = np.insert(H, 4, 0, axis=1) def hjac(x): return H return hjac def R_ct(axis, std): assert (axis >= 2) return R_pos_only(axis, std) def convert_meas(z, R, elev=False): if elev: # coverted measurement r, az, el = z[0], z[1], z[2] var_r, var_az, var_el = R[0, 0], R[1, 1], R[2, 2] lamb_az = np.exp(-var_az / 2) lamb_el = np.exp(-var_el / 2) z_cart = np.array(sph2cart(r, az, el), dtype=float) z_cart[0] = z_cart[0] / lamb_az / lamb_el z_cart[1] = z_cart[1] / lamb_az / lamb_el z_cart[2] = z_cart[2] / lamb_el # coverted covariance r11 = (1 / (lamb_az * lamb_el)**2 - 2) * (r * np.cos(az) * np.cos(el))**2 + (r**2 + var_r) * (1 + lamb_az**4 * np.cos(2 * az)) * (1 + lamb_el**4 * np.cos(2 * el)) / 4 r22 = (1 / (lamb_az * lamb_el)**2 - 2) * (r * np.sin(az) * np.cos(el))**2 + (r**2 + var_r) * (1 - lamb_az**4 * np.cos(2 * az)) * (1 + lamb_el**4 * np.cos(2 * el)) / 4 r33 = (1 / lamb_el**2 - 2) * (r * np.sin(el))**2 + (r**2 + var_r) * (1 - lamb_el**4 * np.cos(2 * el)) / 2 r12 = (1 / (lamb_az * lamb_el)**2 - 2) * r**2 * np.sin(az) * np.cos(az) * np.cos(el)**2 + (r**2 + var_r) * lamb_az**4 * np.sin(2 * az) * (1 + lamb_el**4 * np.cos(2 * el)) / 4 r13 = (1 / (lamb_az * lamb_el**2) - 1 / lamb_az - lamb_az) * r**2 * np.cos(az) * np.sin(el) * np.cos(el) + (r**2 + var_r) * lamb_az * lamb_el**4 * np.cos(az) * np.sin(2 * el) / 2 r23 = (1 / (lamb_az * lamb_el**2) - 1 / lamb_az - lamb_az) * r**2 * np.sin(az) * np.sin(el) * np.cos(el) + (r**2 + var_r) * lamb_az * lamb_el**4 * np.sin(az) * np.sin(2 * el) / 2 R_cart = np.array([[r11, r12, r13], [r12, r22, r23], [r13, r23, r33]], dtype=float) else: # coverted measurement r, az = z[0], z[1] var_r, var_az = R[0, 0], R[1, 1] lamb_az = np.exp(-var_az / 2) z_cart = np.array(pol2cart(r, az), dtype=float) / lamb_az # coverted covariance r11 = (r**2 + var_r) / 2 * (1 + lamb_az**4 * np.cos(2 * az)) + (1 / lamb_az**2 - 2) * (r * np.cos(az))**2 r22 = (r**2 + var_r) / 2 * (1 - lamb_az**4 * np.cos(2 * az)) + (1 / lamb_az**2 - 2) * (r * np.sin(az))**2 r12 = (r**2 + var_r) / 2 * lamb_az**4 * np.sin(2 * az) + (1 / lamb_az**2 - 2) * r**2 * np.sin(az) * np.cos(az) R_cart = np.array([[r11, r12], [r12, r22]], dtype=float) return z_cart, R_cart def state_switch(state, type_in, type_out): dim = len(state) state = state.copy() if type_in == 'cv': axis = dim // 2 if type_out == 'cv': return state elif type_out == 'ca': ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[:, sel] stmp = np.dot(slct, state) return stmp elif type_out == 'ct': slct = np.eye(5, 4) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) stmp = np.dot(slct, state) return stmp else: raise ValueError('unknown output type: %s' % type_out) elif type_in == 'ca': axis = dim // 3 if type_out == 'cv': ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[sel] stmp = np.dot(slct, state) return stmp elif type_out == 'ca': return state elif type_out == 'ct': # ca to cv ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[sel] stmp = np.dot(slct, state) # cv to ct slct = np.eye(5, 4) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) stmp = np.dot(slct, stmp) return stmp else: raise ValueError('unknown output type: %s' % type_out) elif type_in == 'ct': axis = dim // 2 if type_out == 'cv': slct = np.eye(4, 5) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) stmp = np.dot(slct, state) return stmp elif type_out == 'ca': # ct to cv slct = np.eye(4, 5) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) stmp = np.dot(slct, state) # cv to ca ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[:, sel] stmp = np.dot(slct, stmp) return stmp elif type_out == 'ct': return state else: raise ValueError('unknown output type: %s' % type_out) else: raise ValueError('unknown input type: %s' % type_in) def cov_switch(cov, type_in, type_out): dim = len(cov) cov = cov.copy() uncertainty = 100 if type_in == 'cv': axis = dim // 2 if type_out == 'cv': return cov elif type_out == 'ca': ca_dim = 3 * axis sel_diff = range(2, ca_dim, 3) sel = np.setdiff1d(range(ca_dim), sel_diff) slct = np.eye(ca_dim)[:, sel] ctmp = slct @ cov @ slct.T ctmp[sel_diff, sel_diff] = uncertainty return ctmp elif type_out == 'ct': slct = np.eye(5, 4) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) ctmp = slct @ cov @ slct.T ctmp[4, 4] = uncertainty return ctmp else: raise ValueError('unknown output type: %s' % type_out) elif type_in == 'ca': axis = dim // 3 if type_out == 'cv': ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[sel] ctmp = slct @ cov @ slct.T return ctmp elif type_out == 'ca': return cov elif type_out == 'ct': # ca to cv ca_dim = 3 * axis sel = np.setdiff1d(range(ca_dim), range(2, ca_dim, 3)) slct = np.eye(ca_dim)[sel] ctmp = slct @ cov @ slct.T # cv to ct slct = np.eye(5, 4) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) ctmp = slct @ ctmp @ slct.T ctmp[4, 4] = uncertainty return ctmp else: raise ValueError('unknown output type: %s' % type_out) elif type_in == 'ct': axis = dim // 2 if type_out == 'cv': slct = np.eye(4, 5) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) ctmp = slct @ cov @ slct.T return ctmp elif type_out == 'ca': # ct to cv slct = np.eye(4, 5) if axis == 3: slct = lg.block_diag(slct, np.eye(2)) ctmp = slct @ cov @ slct.T # cv to ca ca_dim = 3 * axis sel_diff = range(2, ca_dim, 3) sel = np.setdiff1d(range(ca_dim), sel_diff) slct = np.eye(ca_dim)[:, sel] ctmp = slct @ ctmp @ slct.T ctmp[sel_diff, sel_diff] = uncertainty return ctmp elif type_out == 'ct': return cov else: raise ValueError('unknown output type: %s' % type_out) else: raise ValueError('unknown input type: %s' % type_in) def model_switch(x, type_in, type_out): dim = len(x) if isinstance(x, np.ndarray): if len(x.shape) == 1: state = state_switch(x, type_in, type_out) return state elif len(x.shape) == 2: cov = cov_switch(x, type_in, type_out) return cov else: raise ValueError("shape of 'x' must be 1 or 2") elif hasattr(x, '__getitem__'): state = state_switch(x[0], type_in, type_out) cov = cov_switch(x[1], type_in, type_out) return state, cov else: raise TypeError("error 'x' type: '%s'" % x.__class__.__name__) def trajectory_cv(state, interval, length, velocity): head = state.copy() dim = head.size order = 2 axis = dim // order traj_cv = np.zeros((length, dim)) vel = velocity cur_vel = head[1:dim:order] if isinstance(vel, numbers.Number): vel *= (cur_vel / lg.norm(cur_vel)) else: vel = [cur_vel[i] if vel[i] is None else vel[i] for i in range(axis)] cur_vel[:] = vel # it will also change the head head_cv = head F = F_cv(axis, interval) for i in range(length): head = np.dot(F, head) traj_cv[i] = head return traj_cv, head_cv def trajectory_ca(state, interval, length, acceleration): head = state.copy() dim = state.size order = 3 axis = dim // order traj_ca = np.zeros((length, dim)) acc = acceleration cur_vel = head[1:dim:order] cur_acc = head[2:dim:order] if isinstance(acc, numbers.Number): acc *= (cur_vel / lg.norm(cur_vel)) else: acc = [cur_acc[i] if acc[i] is None else acc[i] for i in range(axis)] cur_acc[:] = acc # it will also change the head head_ca = head F = F_ca(axis, interval) for i in range(length): head = np.dot(F, head) traj_ca[i] = head return traj_ca, head_ca def trajectory_ct(state, interval, length, turnrate, velocity=None): head = state.copy() dim = state.size order = 2 axis = dim // order traj_ct = np.zeros((length, dim)) if velocity is not None: vel = velocity cur_vel = head[1:dim:order] if isinstance(vel, numbers.Number): vel *= (cur_vel / lg.norm(cur_vel)) else: vel = [cur_vel[i] if vel[i] is None else vel[i] for i in range(axis)] cur_vel[:] = vel head_ct = head F = F_ct(axis, turnrate, interval) for i in range(length): head = np.dot(F, head) traj_ct[i] = head return traj_ct, head_ct def trajectory_generator(record): ''' record = { 'interval': [1, 1], 'start': [ [0, 0, 0], [0, 5, 0] ], 'pattern': [ [ {'model': 'cv', 'length': 100, 'velocity': [250, 250, 0]}, {'model': 'ct', 'length': 25, 'turnrate': 30} ], [ {'model': 'cv', 'length': 100, 'velocity': [250, 250, 0]}, {'model': 'ct', 'length': 30, 'turnrate': 30, 'velocity': 30} ] ], 'noise': [ 10 * np.eye(3), 10 * np.eye(3) ], 'pd': [ 0.9, 0.9 ], 'entries': 2 } ''' dim, order, axis = 9, 3, 3 ca_sel = range(dim) acc_sel = range(2, dim, order) cv_sel = np.setdiff1d(ca_sel, acc_sel) ct_sel = np.setdiff1d(ca_sel, acc_sel) insert_sel = [2, 4, 6] interval = record['interval'] start = record['start'] pattern = record['pattern'] noise = record['noise'] entries = record['entries'] trajs_state = [] for i in range(entries): head = np.kron(start[i], [1., 0., 0.]) state = np.kron(start[i], [1., 0., 0.]).reshape(1, -1) for pat in pattern[i]: if pat['model'] == 'cv': ret, head_cv = trajectory_cv(head[cv_sel], interval[i], pat['length'], pat['velocity']) ret = np.insert(ret, insert_sel, 0, axis=1) head = ret[-1, ca_sel] state[-1, acc_sel] = 0 # set the acceleration of previous state to zero state[-1, cv_sel] = head_cv # change the velocity of previous state state = np.vstack((state, ret)) elif pat['model'] == 'ca': ret, head_ca = trajectory_ca(head, interval[i], pat['length'], pat['acceleration']) head = ret[-1, ca_sel] state[-1, ca_sel] = head_ca # change the acceleartion of previous state state = np.vstack((state, ret)) elif pat['model'] == 'ct': if 'velocity' in pat: ret, head_ct = trajectory_ct(head[ct_sel], interval[i], pat['length'], pat['turnrate'], pat['velocity']) else: ret, head_ct = trajectory_ct(head[ct_sel], interval[i], pat['length'], pat['turnrate']) ret = np.insert(ret, insert_sel, 0, axis=1) head = ret[-1, ca_sel] state[-1, acc_sel] = 0 state[-1, ct_sel] = head_ct state = np.vstack((state, ret)) else: raise ValueError('invalid model') trajs_state.append(state) # add noise trajs_meas = [] for i in range(entries): H = H_ca(axis) traj_len = trajs_state[i].shape[0] noi = st.multivariate_normal.rvs(cov=noise[i], size=traj_len) trajs_meas.append(np.dot(trajs_state[i], H.T) + noi) return trajs_state, trajs_meas def trajectory_with_pd(trajs_meas, pd=0.8): for traj in trajs_meas: traj_len = traj.shape[0] remove_idx = st.uniform.rvs(size=traj_len) >= pd traj[remove_idx] = np.nan return trajs_meas def trajectory_to_meas(trajs_meas, lamb=0): trajs_num = len(trajs_meas) min_x, max_x = np.inf, -np.inf min_y, max_y = np.inf, -np.inf min_z, max_z = np.inf, -np.inf max_traj_len = 0 for traj in trajs_meas: min_x, max_x = min(min_x, traj[:, 0].min()), max(max_x, traj[:, 0].max()) min_y, max_y = min(min_y, traj[:, 1].min()), max(max_y, traj[:, 1].max()) min_z, max_z = min(min_z, traj[:, 2].min()), max(max_z, traj[:, 2].max()) max_traj_len = max(max_traj_len, len(traj)) trajs = [] for i in range(max_traj_len): tmp = [] for j in range(trajs_num): if i >= len(trajs_meas[j]) or np.any(np.isnan(trajs_meas[j][i])): continue tmp.append(trajs_meas[j][i]) clutter_num = st.poisson.rvs(lamb) for j in range(clutter_num): x = np.random.uniform(min_x, max_x) y = np.random.uniform(min_y, max_y) z = np.random.uniform(min_z, max_z) tmp.append(np.array([x, y, z], dtype=float)) tmp = np.array(tmp, dtype=float).reshape(-1, 3) trajs.append(tmp) return trajs
32.538614
222
0.549781
0
0
0
0
0
0
0
0
11,754
0.357656
f2f2ed4004131258cff5093c7d766ecf35ed6781
848
py
Python
orders/migrations/0005_auto_20210619_0848.py
garrett-rh/Slice-of-a-Pizza
0e30e3a27b0e65e77cd52db1ef7db0470dea7a3f
[ "MIT" ]
2
2020-05-15T10:20:13.000Z
2021-04-03T12:38:37.000Z
orders/migrations/0005_auto_20210619_0848.py
garrett-rh/Slice-of-a-Pizza
0e30e3a27b0e65e77cd52db1ef7db0470dea7a3f
[ "MIT" ]
2
2020-05-15T10:39:42.000Z
2021-11-26T03:01:19.000Z
orders/migrations/0005_auto_20210619_0848.py
garrett-rh/Slice-of-a-Pizza
0e30e3a27b0e65e77cd52db1ef7db0470dea7a3f
[ "MIT" ]
1
2021-11-12T12:10:57.000Z
2021-11-12T12:10:57.000Z
# Generated by Django 3.2.4 on 2021-06-19 08:48 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('orders', '0004_auto_20210619_0847'), ] operations = [ migrations.AlterField( model_name='dinner_platters', name='Tops', field=models.ManyToManyField(related_name='DPAddons', to='orders.Topping'), ), migrations.AlterField( model_name='regular_pizza', name='toppings', field=models.ManyToManyField(blank=True, related_name='rpAddons', to='orders.Topping'), ), migrations.AlterField( model_name='sicilian_pizza', name='Tops', field=models.ManyToManyField(blank=True, related_name='spAddons', to='orders.Topping'), ), ]
29.241379
99
0.604953
755
0.89033
0
0
0
0
0
0
228
0.268868
f2f3e2812670f2833f39a5b2980f1ac2b7819f19
1,229
py
Python
benchbuild/engine.py
sturmianseq/benchbuild
e3cc1a24e877261e90baf781aa67a9d6f6528dac
[ "MIT" ]
11
2017-10-05T08:59:35.000Z
2021-05-29T01:43:07.000Z
benchbuild/engine.py
sturmianseq/benchbuild
e3cc1a24e877261e90baf781aa67a9d6f6528dac
[ "MIT" ]
326
2016-07-12T08:11:43.000Z
2022-03-28T07:10:11.000Z
benchbuild/engine.py
sturmianseq/benchbuild
e3cc1a24e877261e90baf781aa67a9d6f6528dac
[ "MIT" ]
13
2016-06-17T12:13:35.000Z
2022-01-04T16:09:12.000Z
""" Orchestrate experiment execution. """ import typing as tp import attr from benchbuild.experiment import Experiment from benchbuild.project import Project from benchbuild.utils import actions, tasks ExperimentCls = tp.Type[Experiment] Experiments = tp.List[ExperimentCls] ProjectCls = tp.Type[Project] Projects = tp.List[ProjectCls] ExperimentProject = tp.Tuple[ExperimentCls, ProjectCls] Actions = tp.Sequence[actions.Step] StepResults = tp.List[actions.StepResult] @attr.s class Experimentator: experiments: Experiments = attr.ib() projects: Projects = attr.ib() _plan: tp.Sequence[actions.Step] = attr.ib(init=False, default=None) def plan(self) -> Actions: if not self._plan: self._plan = tasks.generate_plan(self.experiments, self.projects) return self._plan @property def num_actions(self) -> int: p = self.plan() return sum([len(child) for child in p]) def start(self) -> StepResults: p = self.plan() # Prepare project environment. return tasks.execute_plan(p) def print_plan(self) -> None: p = self.plan() print("Number of actions to execute: {}".format(self.num_actions)) print(*p)
25.604167
77
0.68511
745
0.606184
0
0
753
0.612693
0
0
105
0.085435
f2f4e04a8614d8edbaff0777a5f1c47f01d09f5f
6,751
py
Python
misc_code/fcn_loss_layer.py
kbardool/mrcnn3
f4cbb1e34de97ab08558b56fb7362647436edbd7
[ "MIT" ]
7
2018-08-07T13:56:32.000Z
2021-04-06T11:07:20.000Z
misc_code/fcn_loss_layer.py
kbardool/Contextual-Inference-V2
f4cbb1e34de97ab08558b56fb7362647436edbd7
[ "MIT" ]
null
null
null
misc_code/fcn_loss_layer.py
kbardool/Contextual-Inference-V2
f4cbb1e34de97ab08558b56fb7362647436edbd7
[ "MIT" ]
1
2019-02-01T06:52:18.000Z
2019-02-01T06:52:18.000Z
""" Mask R-CNN Dataset functions and classes. Copyright (c) 2017 Matterport, Inc. Licensed under the MIT License (see LICENSE for details) Written by Waleed Abdulla """ import numpy as np import tensorflow as tf import keras.backend as KB import keras.layers as KL import keras.initializers as KI import keras.engine as KE import mrcnn.utils as utils from mrcnn.loss import smooth_l1_loss import pprint pp = pprint.PrettyPrinter(indent=2, width=100) ##----------------------------------------------------------------------- ## FCN loss ##----------------------------------------------------------------------- def fcn_loss_graph(target_masks, pred_masks): # def fcn_loss_graph(input): # target_masks, pred_masks = input """Mask binary cross-entropy loss for the masks head. target_masks: [batch, height, width, num_classes]. pred_masks: [batch, height, width, num_classes] float32 tensor """ # Reshape for simplicity. Merge first two dimensions into one. print('\n fcn_loss_graph ' ) print(' target_masks shape :', target_masks.get_shape()) print(' pred_masks shape :', pred_masks.get_shape()) mask_shape = tf.shape(target_masks) print(' mask_shape shape :', mask_shape.shape) target_masks = KB.reshape(target_masks, (-1, mask_shape[1], mask_shape[2])) print(' target_masks shape :', target_masks.shape) pred_shape = tf.shape(pred_masks) print(' pred_shape shape :', pred_shape.shape) pred_masks = KB.reshape(pred_masks, (-1, pred_shape[1], pred_shape[2])) print(' pred_masks shape :', pred_masks.get_shape()) # Compute binary cross entropy. If no positive ROIs, then return 0. # shape: [batch, roi, num_classes] # Smooth-L1 Loss loss = KB.switch(tf.size(target_masks) > 0, smooth_l1_loss(y_true=target_masks, y_pred=pred_masks), tf.constant(0.0)) loss = KB.mean(loss) loss = KB.reshape(loss, [1, 1]) print(' loss type is :', type(loss)) return loss ##----------------------------------------------------------------------- ## FCN loss for L2 Normalized graph ##----------------------------------------------------------------------- def fcn_norm_loss_graph(target_masks, pred_masks): ''' Mask binary cross-entropy loss for the masks head. target_masks: [batch, height, width, num_classes]. pred_masks: [batch, height, width, num_classes] float32 tensor ''' print(type(target_masks)) pp.pprint(dir(target_masks)) # Reshape for simplicity. Merge first two dimensions into one. print('\n fcn_norm_loss_graph ' ) print(' target_masks shape :', target_masks.shape) print(' pred_masks shape :', pred_masks.shape) print('\n L2 normalization ------------------------------------------------------') output_shape=KB.int_shape(pred_masks) print(' output shape is :' , output_shape, ' ', pred_masks.get_shape(), pred_masks.shape, tf.shape(pred_masks)) output_flatten = KB.reshape(pred_masks, (pred_masks.shape[0], -1, pred_masks.shape[-1]) ) output_norm1 = KB.l2_normalize(output_flatten, axis = 1) output_norm = KB.reshape(output_norm1, KB.shape(pred_masks) ) print(' output_flatten : ', KB.int_shape(output_flatten) , ' Keras tensor ', KB.is_keras_tensor(output_flatten) ) print(' output_norm1 : ', KB.int_shape(output_norm1) , ' Keras tensor ', KB.is_keras_tensor(output_norm1) ) print(' output_norm final : ', KB.int_shape(output_norm) , ' Keras tensor ', KB.is_keras_tensor(output_norm) ) pred_masks1 = output_norm print('\n L2 normalization ------------------------------------------------------') gauss_flatten = KB.reshape(target_masks, (target_masks.shape[0], -1, target_masks.shape[-1]) ) gauss_norm1 = KB.l2_normalize(gauss_flatten, axis = 1) gauss_norm = KB.reshape(gauss_norm1, KB.shape(target_masks)) print(' guass_flatten : ', KB.int_shape(gauss_flatten), 'Keras tensor ', KB.is_keras_tensor(gauss_flatten) ) print(' gauss_norm shape : ', KB.int_shape(gauss_norm1) , 'Keras tensor ', KB.is_keras_tensor(gauss_norm1) ) print(' gauss_norm final shape: ', KB.int_shape(gauss_norm) , 'Keras tensor ', KB.is_keras_tensor(gauss_norm) ) print(' complete') target_masks1 = gauss_norm mask_shape = tf.shape(target_masks1) print(' mask_shape shape :', mask_shape.shape) target_masks1 = KB.reshape(target_masks1, (-1, mask_shape[1], mask_shape[2])) print(' target_masks shape :', target_masks1.shape) pred_shape = tf.shape(pred_masks1) print(' pred_shape shape :', pred_shape.shape) pred_masks1 = KB.reshape(pred_masks1, (-1, pred_shape[1], pred_shape[2])) print(' pred_masks shape :', pred_masks1.get_shape()) # Compute binary cross entropy. If no positive ROIs, then return 0. # shape: [batch, roi, num_classes] # Smooth-L1 Loss loss = KB.switch(tf.size(target_masks1) > 0, smooth_l1_loss(y_true=target_masks1, y_pred=pred_masks1), tf.constant(0.0)) loss = KB.mean(loss) loss = KB.reshape(loss, [1, 1]) print(' loss type is :', type(loss)) return loss class FCNLossLayer(KE.Layer): """ Returns: ------- """ def __init__(self, config=None, **kwargs): super().__init__(**kwargs) print('>>> FCN Loss Layer : initialization') self.config = config def call(self, inputs): print('\n FCN Loss Layer : call') print(' target_masks .shape/type :', inputs[0].shape) # , type(inputs[0])) print(' pred_masks shape/type :', inputs[1].shape) # , type(inputs[1])) target_masks = inputs[0] pred_masks = inputs[1] loss = KB.placeholder(shape=(1), dtype = 'float32', name = 'fcn_loss') norm_loss = KB.placeholder(shape=(1), dtype = 'float32', name = 'fcn_norm_loss') loss = fcn_loss_graph(target_masks, pred_masks) norm_loss = fcn_norm_loss_graph(target_masks, pred_masks) return [loss, norm_loss] def compute_output_shape(self, input_shape): # may need to change dimensions of first return from IMAGE_SHAPE to MAX_DIM return [(1), (1)]
40.915152
123
0.578877
1,105
0.163679
0
0
0
0
0
0
2,581
0.382314
f2f6b4c27e7561e29dbb147f768e0c58a7d09bb7
2,150
py
Python
mysticbit/plots.py
Connossor/mystic-bit
f57f471d3d154560d23bc9eff17fd5b8f284684c
[ "MIT" ]
6
2018-11-23T20:13:53.000Z
2019-02-25T15:54:55.000Z
mysticbit/plots.py
Connossor/mystic-bit
f57f471d3d154560d23bc9eff17fd5b8f284684c
[ "MIT" ]
null
null
null
mysticbit/plots.py
Connossor/mystic-bit
f57f471d3d154560d23bc9eff17fd5b8f284684c
[ "MIT" ]
11
2018-11-23T20:55:44.000Z
2021-12-20T17:25:24.000Z
import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns def plot_well_map(df_logs, fig_size=(10, 10)): """ Simple map of locations of nearby wells """ f, ax = plt.subplots(figsize=fig_size) df = df_logs.drop_duplicates(subset=['HACKANAME', 'X', 'Y']) plt.scatter(df['X'], df['Y']) plt.axis('scaled') for label, x, y in zip(df['HACKANAME'], df['X'], df['Y']): plt.annotate(label, xy=(x, y), xytext=(-10, 10), textcoords='offset points') return f, ax def make_log_plot(df_logs, well_name, cols=['GR', 'DT', 'CALI'], ztop=None, zbot=None, fig_size=(8, 12)): """ Single well log plot, both GR and Resistivity """ logs = df_logs[df_logs['HACKANAME'] == well_name] logs = logs.sort_values(by='TVDSS') if not ztop: ztop = logs.TVDSS.min() if not zbot: zbot = logs.TVDSS.max() f, ax = plt.subplots(nrows=1, ncols=len(cols), figsize=fig_size) for i in range(len(ax)): log_name = cols[i] ax[i].scatter(logs[log_name], logs['TVDSS'], marker='+') ax[i].set_xlabel(log_name) ax[i].set_ylim(ztop, zbot) ax[i].invert_yaxis() ax[i].grid() ax[i].locator_params(axis='x', nbins=3) if i > 0: ax[i].set_yticklabels([]) # ax[0].set_xlabel("GR") # ax[0].set_xlim(0, 150) # ax[1].set_xlabel("RESD") # ax[1].set_xscale('log') # ax[1].set_xlim(0.2, 2000) # ax[1].set_yticklabels([]) f.suptitle('Well: {}'.format(well_name), fontsize=14, y=0.94) return f, ax def add_predictions(ax, predictions): """ Add predicted bands onto plt axes""" # Scatter plot ax.scatter(predictions['value'], predictions['TVDSS'], marker='+') # Shaded bands tvds = predictions[predictions.model_name == 'high']['TVDSS'] x_hi = predictions[predictions.model_name == 'high']['value'] x_lo = predictions[predictions.model_name == 'low']['value'] ax.fill(np.concatenate([x_lo, x_hi[::-1]]), np.concatenate([tvds, tvds[::-1]]), alpha=0.5)
28.289474
105
0.58093
0
0
0
0
0
0
0
0
494
0.229767
f2f8aa778931cc06d7071bcf8a708498a3154677
5,244
py
Python
cc_plugin_eustace/eustace_global_attrs.py
eustace-data/cc-plugin-eustace
4b44d287433b632ea6f859cd72d5dd4b8c361cee
[ "BSD-2-Clause" ]
null
null
null
cc_plugin_eustace/eustace_global_attrs.py
eustace-data/cc-plugin-eustace
4b44d287433b632ea6f859cd72d5dd4b8c361cee
[ "BSD-2-Clause" ]
null
null
null
cc_plugin_eustace/eustace_global_attrs.py
eustace-data/cc-plugin-eustace
4b44d287433b632ea6f859cd72d5dd4b8c361cee
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python """ cc_plugin_eustace.eustace_global_attrs Compliance Test Suite: Check core global attributes in EUSTACE files """ import os from netCDF4 import Dataset # Import base objects from compliance checker from compliance_checker.base import Result, BaseNCCheck, GenericFile # Restrict which vocabs will load (for efficiency) os.environ["ESSV_VOCABS_ACTIVE"] = "eustace-team" # Import checklib import checklib.register.nc_file_checks_register as check_package class EUSTACEGlobalAttrsCheck(BaseNCCheck): register_checker = True name = 'eustace-global-attrs' _cc_spec = 'eustace-global-attrs' _cc_spec_version = '0.2' supported_ds = [GenericFile, Dataset] _cc_display_headers = { 3: 'Required', 2: 'Recommended', 1: 'Suggested' } def setup(self, ds): pass def check_cr01(self, ds): return check_package.ValidGlobalAttrsMatchFileNameCheck(kwargs={'delimiter': '_', 'order': 'institution_id,realm,frequency', 'extension': '.nc'}, level="HIGH", vocabulary_ref="eustace-team:eustace")(ds) def check_cr02(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': 'CF-1\\.6', 'attribute': 'Conventions'}, level="HIGH", vocabulary_ref="")(ds) def check_cr03(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'source'}, level="HIGH", vocabulary_ref="")(ds) def check_cr04(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': 'EUSTACE', 'attribute': 'project_id'}, level="HIGH", vocabulary_ref="")(ds) def check_cr05(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'contact'}, level="HIGH", vocabulary_ref="")(ds) def check_cr06(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'history'}, level="MEDIUM", vocabulary_ref="")(ds) def check_cr07(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'references'}, level="MEDIUM", vocabulary_ref="")(ds) def check_cr08(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{1,}', 'attribute': 'product_version'}, level="HIGH", vocabulary_ref="")(ds) def check_cr09(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'title'}, level="HIGH", vocabulary_ref="")(ds) def check_cr10(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{20,}', 'attribute': 'summary'}, level="HIGH", vocabulary_ref="")(ds) def check_cr11(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.{4,}', 'attribute': 'creator_name'}, level="HIGH", vocabulary_ref="")(ds) def check_cr12(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '.+@.+\\..+', 'attribute': 'creator_email'}, level="HIGH", vocabulary_ref="")(ds) def check_cr13(self, ds): return check_package.GlobalAttrVocabCheck(kwargs={'attribute': 'frequency', 'vocab_lookup': 'canonical_name'}, level="LOW", vocabulary_ref="eustace-team:eustace")(ds) def check_cr14(self, ds): return check_package.GlobalAttrVocabCheck(kwargs={'attribute': 'institution_id', 'vocab_lookup': 'canonical_name'}, level="HIGH", vocabulary_ref="eustace-team:eustace")(ds) def check_cr15(self, ds): return check_package.GlobalAttrRegexCheck(kwargs={'regex': '\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}.*', 'attribute': 'creation_date'}, level="MEDIUM", vocabulary_ref="")(ds)
46.821429
153
0.474256
4,755
0.906751
0
0
0
0
0
0
1,218
0.232265
f2fc68a089e1541439b963f873f1136d0c533af5
705
py
Python
final_project/machinetranslation/tests/tests.py
NicoFRizzo/xzceb-flask_eng_fr
71c8a4c970e7a179f496ff0960d5fae2bba0ffc1
[ "Apache-2.0" ]
null
null
null
final_project/machinetranslation/tests/tests.py
NicoFRizzo/xzceb-flask_eng_fr
71c8a4c970e7a179f496ff0960d5fae2bba0ffc1
[ "Apache-2.0" ]
null
null
null
final_project/machinetranslation/tests/tests.py
NicoFRizzo/xzceb-flask_eng_fr
71c8a4c970e7a179f496ff0960d5fae2bba0ffc1
[ "Apache-2.0" ]
null
null
null
import unittest import translator class TestTranslator(unittest.TestCase): def test_one_e2f(self): respopnse = translator.english_to_french('IBM Translator') self.assertEqual(respopnse, 'Traducteur IBM') def test_un_f2e(self): respopnse = translator.french_to_english('Traducteur IBM') self.assertEqual(respopnse, 'IBM Translator') def test_hello_2f(self): respopnse = translator.english_to_french('Hello') self.assertEqual(respopnse, 'Bonjour') def test_bonjour_2e(self): respopnse = translator.french_to_english('Bonjour') self.assertEqual(respopnse, 'Hello') if __name__ == "__main__": unittest.main()
32.045455
66
0.695035
618
0.876596
0
0
0
0
0
0
106
0.150355
f2fc8f6f95ceeb8cf32d3eeed59de008b87d73f7
556
py
Python
src/appi/oop/classes/class_attributes.py
Kaju-Bubanja/APPI
011afc872a0055ff56001547be6da73017042ad5
[ "MIT" ]
null
null
null
src/appi/oop/classes/class_attributes.py
Kaju-Bubanja/APPI
011afc872a0055ff56001547be6da73017042ad5
[ "MIT" ]
null
null
null
src/appi/oop/classes/class_attributes.py
Kaju-Bubanja/APPI
011afc872a0055ff56001547be6da73017042ad5
[ "MIT" ]
null
null
null
class Student: # class variables school_name = 'ABC School' # constructor def __init__(self, name, age): # instance variables self.name = name self.age = age s1 = Student("Harry", 12) # access instance variables print('Student:', s1.name, s1.age) # access class variable print('School name:', Student.school_name) # Modify instance variables s1.name = 'Jessa' s1.age = 14 print('Student:', s1.name, s1.age) # Modify class variables Student.school_name = 'XYZ School' print('School name:', Student.school_name)
20.592593
42
0.676259
198
0.356115
0
0
0
0
0
0
237
0.426259
f2fea27459e59001e49be2e7ed0478672dee266a
264
py
Python
clamor/rest/endpoints/__init__.py
TomSputz/Clamor
13222b90532938e6ebdbe8aea0430512e7d22817
[ "MIT" ]
15
2019-07-05T20:26:18.000Z
2020-09-18T12:44:16.000Z
clamor/rest/endpoints/__init__.py
TomSputz/Clamor
13222b90532938e6ebdbe8aea0430512e7d22817
[ "MIT" ]
7
2019-07-07T19:55:07.000Z
2019-08-20T22:07:31.000Z
clamor/rest/endpoints/__init__.py
TomSputz/Clamor
13222b90532938e6ebdbe8aea0430512e7d22817
[ "MIT" ]
6
2019-07-07T20:39:29.000Z
2020-11-06T10:12:20.000Z
# -*- coding: utf-8 -*- from . import base from .audit_log import * from .channel import * from .emoji import * from .gateway import * from .guild import * from .invite import * from .oauth import * from .user import * from .voice import * from .webhook import *
18.857143
24
0.69697
0
0
0
0
0
0
0
0
23
0.087121
f2feb8df0aea648f82fd8f4f86ab95ad219d052f
1,878
py
Python
hamster2pdf.py
vleg1991/hamster2pdf
1dda22a39b65a0f24b76d278f3d708ac51d3c262
[ "MIT" ]
null
null
null
hamster2pdf.py
vleg1991/hamster2pdf
1dda22a39b65a0f24b76d278f3d708ac51d3c262
[ "MIT" ]
null
null
null
hamster2pdf.py
vleg1991/hamster2pdf
1dda22a39b65a0f24b76d278f3d708ac51d3c262
[ "MIT" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- import os import datetime import hamster.client import reports import argparse import pdfkit import gettext gettext.install('brainz', '../datas/translations/') # custom settings: reportTitle = "My Activities Report" activityFilter = "unfiled" def valid_date(s): try: return datetime.datetime.strptime(s, "%Y-%m-%d").date() except ValueError: msg = "Not a valid date: '{0}'.".format(s) raise argparse.ArgumentTypeError(msg) # find dates: today = datetime.date.today() first = today.replace(day=1) previousLast = first - datetime.timedelta(days=1) previousFirst = previousLast.replace(day=1) # assign arguments: parser = argparse.ArgumentParser(description="export the hamster database to pdf") parser.add_argument("--thismonth", action="store_true", help="export this month's records") parser.add_argument("--lastmonth", action="store_true", help="export last month's records") parser.add_argument("-s", dest="startDate", default=today, help="start date (default: today)", type=valid_date) parser.add_argument("-e", dest="endDate", default=today, help="end date (default: today)", type=valid_date) parser.add_argument("-o", dest="reportFile", default="report.pdf", help="output file (default: report.pdf)") # parse arguments: args = parser.parse_args() if args.thismonth: args.startDate = first args.endDate = today if args.lastmonth: args.startDate = previousFirst args.endDate = previousLast # prepare filenames: htmlFilename = os.path.splitext(args.reportFile)[0]+".html" pdfFilename = os.path.splitext(args.reportFile)[0]+".pdf" storage = hamster.client.Storage() facts = storage.get_facts(args.startDate, args.endDate) # generate report reports.simple(facts, args.startDate, args.endDate, htmlFilename) # convert .html to .pdf file: pdfkit.from_file(htmlFilename, pdfFilename)
27.617647
111
0.736954
0
0
0
0
0
0
0
0
577
0.307242
f2ff24739f7d32b20b931df9776f794aac82539a
589
py
Python
SingleTon.py
SuperLeis/meituan
71d521826bc50cb8e7bee5617f84e2c26dce1394
[ "MIT" ]
1
2020-05-02T14:30:18.000Z
2020-05-02T14:30:18.000Z
SingleTon.py
SuperLeis/meituan
71d521826bc50cb8e7bee5617f84e2c26dce1394
[ "MIT" ]
null
null
null
SingleTon.py
SuperLeis/meituan
71d521826bc50cb8e7bee5617f84e2c26dce1394
[ "MIT" ]
null
null
null
from functools import wraps # created by PL # git hello world def single_ton(cls): _instance = {} @wraps(cls) def single(*args, **kwargs): if cls not in _instance: _instance[cls] = cls(*args, **kwargs) return _instance[cls] return single @single_ton class SingleTon(object): val = 123 def __init__(self, a): self.a = a if __name__ == '__main__': s = SingleTon(1) t = SingleTon(2) print (s is t) print (s.a, t.a) print (s.val, t.val) print ('test') print ("git test")
19.633333
50
0.550085
89
0.151104
0
0
263
0.44652
0
0
60
0.101868
840025939ea1c2adbcc0cc3524f18c7230eb6fad
374
py
Python
exercicios-python/ex031.py
anavesilva/python-introduction
d85fb9381e348262584fd2942e4818ee822adbe5
[ "MIT" ]
null
null
null
exercicios-python/ex031.py
anavesilva/python-introduction
d85fb9381e348262584fd2942e4818ee822adbe5
[ "MIT" ]
null
null
null
exercicios-python/ex031.py
anavesilva/python-introduction
d85fb9381e348262584fd2942e4818ee822adbe5
[ "MIT" ]
null
null
null
# Custo da viagem distancia = float(input('Qual a distância da sua viagem? ')) valor1 = distancia * 0.5 valor2 = distancia * 0.45 print('Você está prestes a começar uma viagem de {}Km/h.'.format(distancia)) if distancia <= 200: print('O preço de sua passagem será de R${:.2f}.'.format(valor1)) else: print('O preço de sua passagem será de R${:.2f}.'.format(valor2))
37.4
76
0.687166
0
0
0
0
0
0
0
0
196
0.513089
8401761cbdcacb5f4d5eb5531d513247beb5261b
10,254
py
Python
datatest/differences.py
ajhynes7/datatest
78742e98de992807286655f5685a2dc33a7b452e
[ "Apache-2.0" ]
277
2016-05-12T13:22:49.000Z
2022-03-11T00:18:32.000Z
datatest/differences.py
ajhynes7/datatest
78742e98de992807286655f5685a2dc33a7b452e
[ "Apache-2.0" ]
57
2016-05-18T01:03:32.000Z
2022-02-17T13:48:43.000Z
datatest/differences.py
ajhynes7/datatest
78742e98de992807286655f5685a2dc33a7b452e
[ "Apache-2.0" ]
16
2016-05-22T11:35:19.000Z
2021-12-01T19:41:42.000Z
"""Difference classes.""" __all__ = [ 'BaseDifference', 'Missing', 'Extra', 'Invalid', 'Deviation', ] from cmath import isnan from datetime import timedelta from ._compatibility.builtins import * from ._compatibility import abc from ._compatibility.contextlib import suppress from ._utils import _make_token from ._utils import pretty_timedelta_repr NOVALUE = _make_token( 'NoValueType', '<no value>', 'Token to mark when a value does not exist.', truthy=False, ) NANTOKEN = _make_token( 'NanTokenType', '<nan token>', 'Token for comparing differences that contain not-a-number values.', ) def _nan_to_token(value): """Return NANTOKEN if *value* is NaN else return value unchanged.""" def func(x): with suppress(TypeError): if isnan(x): return NANTOKEN return x if isinstance(value, tuple): return tuple(func(x) for x in value) return func(value) def _safe_isnan(x): """Wrapper for isnan() so it won't fail on non-numeric values.""" try: return isnan(x) except TypeError: return False class BaseDifference(abc.ABC): """The base class for "difference" objects---all other difference classes are derived from this base. """ __slots__ = () @property @abc.abstractmethod def args(self): """The tuple of arguments given to the difference constructor. Some difference (like :class:`Deviation`) expect a certain number of arguments and assign a special meaning to the elements of this tuple, while others are called with only a single value. """ # Concrete method should return tuple of args used in __init__(). raise NotImplementedError def __eq__(self, other): if self.__class__ != other.__class__: return False self_args = tuple(_nan_to_token(x) for x in self.args) other_args = tuple(_nan_to_token(x) for x in other.args) return self_args == other_args def __ne__(self, other): # <- For Python 2.x support. There is return not self.__eq__(other) # no implicit relationship between # __eq__() and __ne__() in Python 2. def __hash__(self): try: return hash((self.__class__, self.args)) except TypeError as err: msg = '{0} in args tuple {1!r}'.format(str(err), self.args) hashfail = TypeError(msg) hashfail.__cause__ = getattr(err, '__cause__', None) # getattr for 2.x support raise hashfail def __repr__(self): cls_name = self.__class__.__name__ args_repr = ', '.join( getattr(x, '__name__', repr(x)) for x in self.args) return '{0}({1})'.format(cls_name, args_repr) class Missing(BaseDifference): """Created when *value* is missing from the data under test. In the following example, the required value ``'A'`` is missing from the data under test:: data = ['B', 'C'] requirement = {'A', 'B', 'C'} datatest.validate(data, requirement) Running this example raises the following error: .. code-block:: none :emphasize-lines: 2 ValidationError: does not satisfy set membership (1 difference): [ Missing('A'), ] """ __slots__ = ('_args',) def __init__(self, value): self._args = (value,) @property def args(self): return self._args class Extra(BaseDifference): """Created when *value* is unexpectedly found in the data under test. In the following example, the value ``'C'`` is found in the data under test but it's not part of the required values:: data = ['A', 'B', 'C'] requirement = {'A', 'B'} datatest.validate(data, requirement) Running this example raises the following error: .. code-block:: none :emphasize-lines: 2 ValidationError: does not satisfy set membership (1 difference): [ Extra('C'), ] """ __slots__ = ('_args',) def __init__(self, value): self._args = (value,) @property def args(self): return self._args class Invalid(BaseDifference): """Created when a value does not satisfy a function, equality, or regular expression requirement. In the following example, the value ``9`` does not satisfy the required function:: data = [2, 4, 6, 9] def is_even(x): return x % 2 == 0 datatest.validate(data, is_even) Running this example raises the following error: .. code-block:: none :emphasize-lines: 2 ValidationError: does not satisfy is_even() (1 difference): [ Invalid(9), ] """ __slots__ = ('_invalid', '_expected') def __init__(self, invalid, expected=NOVALUE): try: is_equal = invalid == expected except TypeError: is_equal = False if is_equal: msg = 'expects unequal values, got {0!r} and {1!r}' raise ValueError(msg.format(invalid, expected)) self._invalid = invalid self._expected = expected @property def args(self): if self._expected is NOVALUE: return (self._invalid,) return (self._invalid, self._expected) @property def invalid(self): """The invalid value under test.""" return self._invalid @property def expected(self): """The expected value (optional).""" return self._expected def __repr__(self): cls_name = self.__class__.__name__ invalid_repr = getattr(self._invalid, '__name__', repr(self._invalid)) if self._expected is not NOVALUE: expected_repr = ', expected={0}'.format( getattr(self._expected, '__name__', repr(self._expected))) else: expected_repr = '' return '{0}({1}{2})'.format(cls_name, invalid_repr, expected_repr) def _slice_datetime_repr_prefix(obj_repr): """Takes a default "datetime", "date", or "timedelta" repr and returns it with the module prefix sliced-off:: >>> _slice_datetime_repr_prefix('datetime.date(2020, 12, 25)') 'date(2020, 12, 25)' """ # The following implementation (using "startswith" and "[9:]") # may look clumsy but it can run up to 10 times faster than a # more concise "re.compile()" and "regex.sub()" approach. In # some situations, this function can get called many, many # times. DON'T GET CLEVER--KEEP THIS FUNCTION FAST. if obj_repr.startswith('datetime.datetime(') \ or obj_repr.startswith('datetime.date(') \ or obj_repr.startswith('datetime.timedelta('): return obj_repr[9:] return obj_repr class Deviation(BaseDifference): """Created when a quantative value deviates from its expected value. In the following example, the dictionary item ``'C': 33`` does not satisfy the required item ``'C': 30``:: data = {'A': 10, 'B': 20, 'C': 33} requirement = {'A': 10, 'B': 20, 'C': 30} datatest.validate(data, requirement) Running this example raises the following error: .. code-block:: none :emphasize-lines: 2 ValidationError: does not satisfy mapping requirement (1 difference): { 'C': Deviation(+3, 30), } """ __slots__ = ('_deviation', '_expected') def __init__(self, deviation, expected): try: if deviation + expected == expected: msg = 'deviation quantity must not be empty, got {0!r}' exc = ValueError(msg.format(deviation)) raise exc except TypeError: msg = ('Deviation arguments must be quantitative, ' 'got deviation={0!r}, expected={1!r}') exc = TypeError(msg.format(deviation, expected)) exc.__cause__ = None raise exc self._deviation = deviation self._expected = expected @property def args(self): return (self._deviation, self._expected) @property def deviation(self): """Quantative deviation from expected value.""" return self._deviation @property def expected(self): """The expected value.""" return self._expected def __repr__(self): cls_name = self.__class__.__name__ deviation = self._deviation if _safe_isnan(deviation): deviation_repr = "float('nan')" elif isinstance(deviation, timedelta): deviation_repr = pretty_timedelta_repr(deviation) else: try: deviation_repr = '{0:+}'.format(deviation) # Apply +/- sign except (TypeError, ValueError): deviation_repr = repr(deviation) expected = self._expected if _safe_isnan(expected): expected_repr = "float('nan')" else: expected_repr = repr(expected) if expected_repr.startswith('datetime.'): expected_repr = _slice_datetime_repr_prefix(expected_repr) return '{0}({1}, {2})'.format(cls_name, deviation_repr, expected_repr) def _make_difference(actual, expected, show_expected=True): """Returns an appropriate difference for *actual* and *expected* values that are known to be unequal. Setting *show_expected* to False, signals that the *expected* argument should be omitted when creating an Invalid difference (this is useful for reducing duplication when validating data against a single function or object). """ if actual is NOVALUE: return Missing(expected) if expected is NOVALUE: return Extra(actual) if isinstance(expected, bool) or isinstance(actual, bool): if show_expected: return Invalid(actual, expected) return Invalid(actual) try: deviation = actual - expected return Deviation(deviation, expected) except (TypeError, ValueError): if show_expected: return Invalid(actual, expected) return Invalid(actual)
29.048159
91
0.610396
7,359
0.717671
0
0
1,234
0.120343
0
0
4,621
0.450653
8401c1577e1e3475bf83b16d801193d6422761d2
2,735
py
Python
dashboard/urls.py
playfulMIT/kimchi
66802cc333770932a8c8b1a44ea5d235d916a8f1
[ "MIT" ]
null
null
null
dashboard/urls.py
playfulMIT/kimchi
66802cc333770932a8c8b1a44ea5d235d916a8f1
[ "MIT" ]
16
2019-12-10T19:40:27.000Z
2022-02-10T11:51:06.000Z
dashboard/urls.py
playfulMIT/kimchi
66802cc333770932a8c8b1a44ea5d235d916a8f1
[ "MIT" ]
null
null
null
from django.conf.urls import include, url, re_path from rest_framework import routers from . import views urlpatterns = [ re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/versiontime", views.get_last_processed_time), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/players", views.get_player_list), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/sessions", views.get_player_to_session_map), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/puzzles", views.get_puzzles), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/puzzlekeys", views.get_puzzle_keys), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/snapshotsperpuzzle", views.get_snapshot_metrics), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/attempted", views.get_attempted_puzzles), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/completed", views.get_completed_puzzles), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/timeperpuzzle", views.get_time_per_puzzle), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/funnelperpuzzle", views.get_funnel_per_puzzle), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/shapesperpuzzle", views.get_shapes_per_puzzle), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/modesperpuzzle", views.get_modes_per_puzzle), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/levelsofactivity", views.get_levels_of_activity), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/sequencebetweenpuzzles", views.get_sequence_between_puzzles), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/mloutliers", views.get_machine_learning_outliers), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/persistence", views.get_persistence_by_attempt_data), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/puzzlepersistence", views.get_persistence_by_puzzle_data), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/insights", views.get_insights), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/difficulty", views.get_puzzle_difficulty_mapping), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/misconceptions", views.get_misconceptions_data), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/competency", views.get_competency_data), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/report/(?P<start>[0-9]+)/(?P<end>[0-9]+)", views.get_report_summary), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/report", views.get_report_summary), re_path(r"^api/dashboard/(?P<slug>[a-zA-Z0-9-_]+)/(?P<player>[a-zA-Z0-9-_.]+)/(?P<level>[a-zA-Z0-9-_.]+)/replayurls", views.get_replay_urls), re_path(r"^(?P<slug>[a-zA-Z0-9-_]+)/dashboard/", views.dashboard), re_path(r"^(?P<slug>[a-zA-Z0-9-_]+)/thesisdashboard/", views.thesis_dashboard) ]
80.441176
145
0.697623
0
0
0
0
0
0
0
0
1,481
0.541499
8403354322f3d276144123191c8e910a521e71d2
1,945
py
Python
VQ2D/vq2d/baselines/predictor.py
emulhall/episodic-memory
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
[ "MIT" ]
27
2021-10-16T02:39:17.000Z
2022-03-31T11:16:11.000Z
VQ2D/vq2d/baselines/predictor.py
emulhall/episodic-memory
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
[ "MIT" ]
5
2022-03-23T04:53:36.000Z
2022-03-29T23:39:07.000Z
VQ2D/vq2d/baselines/predictor.py
emulhall/episodic-memory
27bafec6e09c108f0efe5ac899eabde9d1ac40cc
[ "MIT" ]
13
2021-11-25T19:17:29.000Z
2022-03-25T14:01:47.000Z
from typing import Any, Dict, List, Sequence import numpy as np import torch from detectron2.engine import DefaultPredictor class SiamPredictor(DefaultPredictor): def __call__( self, original_images: Sequence[np.ndarray], visual_crops: Sequence[np.ndarray], ) -> List[Dict[str, Any]]: """ Args: original_images (np.ndarray): a list of images of shape (H, W, C) (in BGR order). visual_crops (np.ndarray): a list of images of shape (H, W, C) (in BGR order) Returns: predictions (list[dict]): the output of the model for a list of images. See :doc:`/tutorials/models` for details about the format. """ with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258 # Apply pre-processing to image. inputs = [] for original_image, visual_crop in zip(original_images, visual_crops): if self.input_format == "RGB": # whether the model expects BGR inputs or RGB original_image = original_image[:, :, ::-1] visual_crop = visual_crop[:, :, ::-1] height, width = original_image.shape[:2] image = self.aug.get_transform(original_image).apply_image( original_image ) image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1)) reference = torch.as_tensor( visual_crop.astype("float32").transpose(2, 0, 1) ) inputs.append( { "image": image, "height": height, "width": width, "reference": reference, } ) predictions = self.model(inputs) return predictions
38.9
93
0.519794
1,817
0.93419
0
0
0
0
0
0
589
0.302828
84050b8c0b169a6bad0d62fb6d1d81572077e370
109
py
Python
user_agent2/__init__.py
dytttf/user_agent2
311bfc5c820ed8233207f57f27bfd7b789040d9d
[ "MIT" ]
null
null
null
user_agent2/__init__.py
dytttf/user_agent2
311bfc5c820ed8233207f57f27bfd7b789040d9d
[ "MIT" ]
1
2022-02-08T11:58:15.000Z
2022-02-08T16:59:37.000Z
user_agent2/__init__.py
dytttf/user_agent2
311bfc5c820ed8233207f57f27bfd7b789040d9d
[ "MIT" ]
3
2021-11-21T22:47:43.000Z
2022-02-15T00:45:40.000Z
from user_agent2.base import ( generate_user_agent, generate_navigator, generate_navigator_js, )
18.166667
30
0.761468
0
0
0
0
0
0
0
0
0
0
840519afb7f020a56b84911fb8113394b9946381
7,626
py
Python
mutagene/benchmark/multiple_benchmark.py
neksa/pymutagene
1122d64a5ab843a4960124933f78f3c2e388a792
[ "CC0-1.0" ]
3
2020-05-18T07:00:46.000Z
2022-02-20T02:55:48.000Z
mutagene/benchmark/multiple_benchmark.py
neksa/pymutagene
1122d64a5ab843a4960124933f78f3c2e388a792
[ "CC0-1.0" ]
31
2020-03-13T16:28:34.000Z
2021-02-27T22:12:15.000Z
mutagene/benchmark/multiple_benchmark.py
neksa/pymutagene
1122d64a5ab843a4960124933f78f3c2e388a792
[ "CC0-1.0" ]
3
2020-03-24T20:01:44.000Z
2020-11-26T17:30:39.000Z
import glob import random import uuid import numpy as np from multiprocessing import Pool from sklearn.metrics import ( recall_score, precision_score, accuracy_score, f1_score, mean_squared_error) from mutagene.io.profile import read_profile_file, write_profile, read_signatures from mutagene.signatures.identify import NegLogLik from mutagene.benchmark.deconstructsigs import deconstruct_sigs_custom from mutagene.benchmark.generate_benchmark import * # from mutagene.identify import decompose_mutational_profile_counts def multiple_benchmark_helper(j): dirname = "data/benchmark/multiple" # for i in [5, 10, 30]: for i in [30, ]: W, signature_names = read_signatures(i) N = W.shape[1] # r = random.randrange(2, i // 3 + 2) r = random.randrange(2, min(i + 1, 15)) # print(np.random.choice(N, r), .05 + np.random.dirichlet(np.ones(r), 1)) while True: h0 = np.zeros(N) h0[np.random.choice(N, r)] = 0.05 + np.random.dirichlet(np.ones(r), 1) if np.greater(h0, 0.05).sum() == r: break h0 /= h0.sum() v0 = W.dot(h0) # print(h0) n_mutations = random.randrange(10, 50) v0_counts = np.random.multinomial(n_mutations, v0 / v0.sum()) # print(v0_counts) random_name = str(uuid.uuid4())[:4] fname = dirname + "/{:02d}_{}_{}_{}".format(i, r, n_mutations, random_name) print(fname) profile_fname = fname + ".profile" info_fname = fname + ".info" mle_info = fname + ".MLE.info" mlez_info = fname + ".MLEZ.info" ds_info = fname + ".ds.info" write_profile(profile_fname, v0_counts) write_decomposition(info_fname, h0, signature_names) ################################################## results = deconstruct_sigs_custom(profile_fname, signatures=i) write_decomposition(ds_info, results, signature_names) ################################################## profile = read_profile_file(profile_fname) for method, method_fname in [("MLE", mle_info), ("MLEZ", mlez_info)]: _, _, results = decompose_mutational_profile_counts( profile, (W, signature_names), method, debug=False, others_threshold=0.0) write_decomposition(method_fname, results, signature_names) def multiple_benchmark(): # pathlib.Path(dirname).mkdir(parents=True, exist_ok=True) random.seed(13425) with Pool(10) as p: p.map(multiple_benchmark_helper, range(100)) def multiple_benchmark_run_helper(data): fname, signature_ids, W, force = data # methods = ['MLE', 'MLEZ', 'AICc', 'BIC', 'AICcZ', 'BICZ'] methods = ['AICc', 'AICcZ'] # print(fname) profile = read_profile_file(fname) for method in methods: info = "{}.{}.info".format(fname.split(".")[0], method) if isfile(info) and not force: continue print(info) _, _, results = decompose_mutational_profile_counts( profile, (W, signature_ids), method, debug=False, others_threshold=0.0) exposure_dict = {x['name']: x['score'] for x in results} exposure = [exposure_dict[name] for name in signature_ids] write_decomposition(info, np.array(exposure), signature_ids) def multiple_benchmark_run(N, signature_ids, W, force=False): def get_iterator(): for fname in glob.glob("data/benchmark/multiple/{:02d}_*.profile".format(N), recursive=True): yield (fname, signature_ids, W, force) random.seed(13425) with Pool(10) as p: p.map(multiple_benchmark_run_helper, get_iterator(), 100) def aggregate_multiple_benchmarks(): methods = { "mle": ".MLE.info", "mlez": ".MLEZ.info", "ds": ".ds.info", 'aicc': '.AICc.info', 'bic': '.BIC.info', 'aiccz': '.AICcz.info', 'bicz': '.BICz.info', } # signatures_thresholds = { # 5: 0.06, # 10: 0.03, # 30: 0.01, # } signatures_thresholds = { 5: 0.06, 10: 0.06, 30: 0.06, } # signatures_thresholds = { # 5: 0.0001, # 10: 0.0001, # 30: 0.0001, # } # only report the signature 2 value (as in DeconstructSigs benchmark) with open("data/benchmark/multiple/res1.txt", 'w') as o: o.write("file_id\tsigtype\tnsig\tnmut\tmethod\tSRMSE\tPRMSE\tSTRMSE\tLLIK\tLLIK0\tTLLIK\tTLLIK0\tprecision\trecall\taccuracy\tf1\n") for fname in glob.glob("data/benchmark/multiple/*.profile", recursive=True): file_id = fname.split("/")[-1].split(".")[0] sigtype, r, nmut, replica = fname.split("/")[-1].split(".")[0].split("_") sigtype = int(sigtype) if sigtype != 30: continue W, signature_names = read_signatures(sigtype) info_fname = fname.split(".")[0] + '.info' orig_profile = read_profile_file(fname) h0, names = read_decomposition(info_fname) # threshold = 0.06 threshold = 0.06 # threshold = 1.0 / np.sqrt(int(nmut)) if method != "ds" else 0.06 h0_threshold = np.where(h0 > threshold, h0, 0.0) # zero below threshold h0_binary = np.array(h0_threshold) > 0.0 # true / false for threshold nsig = np.count_nonzero(h0_binary) if nsig < int(r): print("LESS", sigtype, nsig, r) if nsig > int(r): print("MORE", sigtype, nsig, r) if nsig <= 1: continue if nsig > 10: continue for method in methods: method_fname = fname.split(".")[0] + methods[method] values, names = read_decomposition(method_fname) # print(method_fname) if values is None: continue h = np.array(values) if h.sum() == 0: continue h_threshold = np.where(h > threshold, h, 0.0) # zero below threshold reconstructed_profile = W.dot(h) # print(h) # print(reconstructed_profile) PRMSE = np.sqrt(mean_squared_error( np.array(orig_profile) / np.array(orig_profile).sum(), np.array(reconstructed_profile) / np.array(reconstructed_profile).sum())) SRMSE = np.sqrt(mean_squared_error(h0, h)) STRMSE = np.sqrt(mean_squared_error(h0_threshold, h_threshold)) LLIK0 = - NegLogLik(h0, W, orig_profile) TLLIK0 = - NegLogLik(h0_threshold, W, orig_profile) LLIK = - NegLogLik(h, W, orig_profile) TLLIK = - NegLogLik(h_threshold, W, orig_profile) # print(h0.sum()) # print(h.sum()) h_binary = np.array(h_threshold) > 0.0 # true / false for threshold precision = precision_score(h0_binary, h_binary) recall = recall_score(h0_binary, h_binary) accuracy = accuracy_score(h0_binary, h_binary) f1 = f1_score(h0_binary, h_binary) o.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format( file_id, sigtype, nsig, nmut, method, SRMSE, PRMSE, STRMSE, LLIK, LLIK0, TLLIK, TLLIK0, precision, recall, accuracy, f1))
35.142857
141
0.560976
0
0
356
0.046682
0
0
0
0
1,569
0.205744
8406877949c3d33a1b17a8c7fd596cba40c180cf
3,542
py
Python
Restaurant_Finder_App/restaurant_finder_app/restaurant_finder_app/restaurant/migrations/0001_initial.py
midhun3112/restaurant_locator
6ab5e906f26476352176059a8952c2c3f5b127bf
[ "Apache-2.0" ]
null
null
null
Restaurant_Finder_App/restaurant_finder_app/restaurant_finder_app/restaurant/migrations/0001_initial.py
midhun3112/restaurant_locator
6ab5e906f26476352176059a8952c2c3f5b127bf
[ "Apache-2.0" ]
null
null
null
Restaurant_Finder_App/restaurant_finder_app/restaurant_finder_app/restaurant/migrations/0001_initial.py
midhun3112/restaurant_locator
6ab5e906f26476352176059a8952c2c3f5b127bf
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-02-01 13:47 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Category', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], options={ 'verbose_name': 'category', 'verbose_name_plural': 'categories', 'default_related_name': 'categories', }, ), migrations.CreateModel( name='Collection', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('collection_name', models.CharField(max_length=255)), ], options={ 'default_related_name': 'collections', }, ), migrations.CreateModel( name='Restaurant', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('restaurant_name', models.CharField(max_length=255)), ('restaurant_image', models.ImageField(default='restaurant_pic/images/no-name.jpg', upload_to='images/restaurant_pic/')), ], options={ 'default_related_name': 'restaurant', }, ), migrations.CreateModel( name='RestaurantTiming', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('start_time', models.TimeField()), ('end_time', models.TimeField()), ('restaurant', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='restaurant_timing', to='restaurant.Restaurant')), ], options={ 'verbose_name': 'Restaurant Timing', 'verbose_name_plural': 'Restaurant Timings', 'default_related_name': 'restaurant_timing', }, ), migrations.CreateModel( name='WeekDay', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('day', models.CharField(max_length=255)), ], options={ 'verbose_name': 'WeekDay', 'verbose_name_plural': 'WeekDays', 'default_related_name': 'week_day', }, ), migrations.AddField( model_name='restauranttiming', name='working_days', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='restaurant_timing', to='restaurant.WeekDay'), ), migrations.AddField( model_name='collection', name='restaurant', field=models.ManyToManyField(related_name='collections', to='restaurant.Restaurant'), ), migrations.AddField( model_name='category', name='restaurant', field=models.ManyToManyField(related_name='categories', to='restaurant.Restaurant'), ), ]
38.5
168
0.559006
3,351
0.946076
0
0
0
0
0
0
919
0.259458
8407722043fe4e1043792c735a7c99de2eae2b6e
1,807
py
Python
ckl/run.py
damianbrunold/checkerlang-py
97abe5eda5f692ef61acf906a5f596c65688b582
[ "MIT" ]
null
null
null
ckl/run.py
damianbrunold/checkerlang-py
97abe5eda5f692ef61acf906a5f596c65688b582
[ "MIT" ]
null
null
null
ckl/run.py
damianbrunold/checkerlang-py
97abe5eda5f692ef61acf906a5f596c65688b582
[ "MIT" ]
null
null
null
import argparse import os import sys from ckl.values import ( ValueList, ValueString, NULL ) from ckl.errors import ( CklSyntaxError, CklRuntimeError ) from ckl.interpreter import Interpreter def main(): parser = argparse.ArgumentParser(description="CKL run command") parser.add_argument("-s", "--secure", action="store_true") parser.add_argument("-l", "--legacy", action="store_true") parser.add_argument("-m", "--modulepath", nargs="?") parser.add_argument("script") parser.add_argument("args", nargs="*") args = parser.parse_args(sys.argv[1:]) modulepath = ValueList() if args.modulepath: modulepath.addItem(ValueString(args.modulepath)) interpreter = Interpreter(args.secure, args.legacy) if not os.path.exists(args.script): print(f"File not found '{args.script}'", file=sys.stderr) sys.exit(1) scriptargs = ValueList() for scriptarg in args.args: scriptargs.addItem(ValueString(scriptarg)) interpreter.environment.put("args", scriptargs) interpreter.environment.put("scriptname", ValueString(args.script)) interpreter.environment.put("checkerlang_module_path", modulepath) with open(args.script, encoding="utf-8") as infile: script = infile.read() try: result = interpreter.interpret(script, args.script) if result != NULL: print(str(result)) except CklRuntimeError as e: print(str(e.value.asString().value) + ": " + e.msg + " (Line " + str(e.pos) + ")") if e.stacktrace: for st in e.stacktrace: print(str(st)) except CklSyntaxError as e: print(e.msg + ((" (Line " + str(e.pos) + ")") if e.pos else "")) if __name__ == "__main__": main()
28.234375
72
0.633094
0
0
0
0
0
0
0
0
230
0.127283
840a373b87a5269d4b1deb705abae42b6703a996
21,190
py
Python
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
sendgrid/JusticeEngine
9b39618c836bfcb120db5fb75557cc45c0105e9f
[ "MIT" ]
1
2019-03-27T18:52:54.000Z
2019-03-27T18:52:54.000Z
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
sendgrid/JusticeEngine
9b39618c836bfcb120db5fb75557cc45c0105e9f
[ "MIT" ]
4
2018-08-17T19:10:05.000Z
2018-11-16T16:46:04.000Z
Justice-Engine-source/security_monkey/alerters/custom/JusticeEngine.py
sendgrid/JusticeEngine
9b39618c836bfcb120db5fb75557cc45c0105e9f
[ "MIT" ]
2
2018-10-24T19:19:52.000Z
2018-11-16T16:38:23.000Z
import datetime import fnmatch import hashlib import json import time import arrow import os from botocore.exceptions import ClientError from boto.s3.key import Key from security_monkey.alerters import custom_alerter from security_monkey.common.sts_connect import connect from security_monkey import app, db from security_monkey.datastore import Account from security_monkey.task_scheduler.alert_scheduler import schedule_krampus_alerts class Notify: """Notification for resources outside of the Justice Engine.""" KILL = 0 DISABLE = 1 def __init__(self): self.conn = None self.bucket = None self.key = None self.s3connect(os.getenv('AWS_ACCOUNT_NAME'), os.getenv('KRAMPUS_BUCKET')) def s3connect(self, account, bucket): """ s3connect will attempt to connect to an s3 bucket resource. If the resource does not exist it will attempt to create it :param account: string the aws account you are connecting to :param bucket: string the name of the bucket you wish to connect to :returns: Boolean of connection Status """ self.conn = connect( account, 's3' ) if self.conn.lookup(bucket) is None: app.logger.debug("Bucket Does not exist. Creating one") self.bucket = self.conn.create_bucket(bucket) else: self.bucket = self.conn.get_bucket(bucket) self.key = Key(self.bucket) return True def get_s3_key(self, filename): """ Return the key contents for a specific s3 object :param filename: the file name of the s3 object :returns: data in the form of a Dict. """ if self.bucket.lookup(filename) is None: self.key = self.bucket.new_key(filename) self.key.set_contents_from_string(json.dumps(json.loads('{}'))) self.key.key = filename tmp = self.key.get_contents_as_string() return json.loads(tmp) def write_to_s3_object(self, filename, data): """ Write to s3 :param filename: the s3 object file name :param data: string of data to be written to the object :returns: Boolean of writing success """ try: self.key.key = filename self.key.set_contents_from_string(data) return True except ClientError as e: app.logger.critical( "Unable to push information back to s3. :: {0}".format(e)) return False class Jury(): """ The Jury makes verdict based on evidence. The Jury class contains the methods used to convert items with issues into actionable jobs for Krampus to kill. """ KILL_THRESHOLD = int(os.getenv('KILL_THRESHOLD')) DISABLE_THRESHOLD = int(os.getenv('DISABLE_THRESHOLD')) KILL_RESPONSE_DELTA = int(os.getenv('KILL_RESPONSE_DELTA')) DISABLE_RESPONSE_DELTA = int(os.getenv('DISABLE_RESPONSE_DELTA')) SECMONKEY_KRAMPUS_ITEM_MAP = { 's3': ['s3'], 'ebs': ['ebssnapshot', 'ebsvolume'], 'ec2': ['ec2image', 'ec2instance'], 'rds': [ 'rdsclustersnapshot', 'rdsdbcluster', 'rdsdbinstance', 'rdssecuritygroup', 'rdssnapshot', 'rdssubnetgroup'], 'iam': [ 'iamgroup', 'iamrole', 'iamssl', 'iamuser', 'policy', 'samlprovider', 'keypair'], 'security_group': ['securitygroup'], None: [ 'acm', 'sqs', 'cloudtrail', 'config', 'configrecorder', 'connection', 'virtual_gateway', 'elasticip', 'elasticsearchservice', 'elb', 'alb', 'networkinterface', 'gcefirewallrule', 'gcenetwork', 'gcsbucket', 'organization', 'repository', 'team', 'glacier', 'kms', 'lambda', 'redshift', 'route53', 'route53domains', 'ses', 'sns', 'dhcp', 'endpoint', 'flowlog', 'natgateway', 'networkacl', 'peering', 'routetable', 'subnet', 'vpc', 'vpn']} @staticmethod def calc_score(issues): """ Helper method for calculating scores after an audit. :param issues: list of the item issues to be turned into a score :return: int of the score based on the item's issues """ score = 0 for i in issues: if not i.justified: score += i.score return score @staticmethod def aws_object_type_mapper(aws_object_type): """ maps an aws_object_type from sec-monkey into an actionable type for krampus :param aws_object_type: string of the sec-monkey type :return: None """ for key in SECMONKEY_KRAMPUS_ITEM_MAP: if aws_object_type in SECMONKEY_KRAMPUS_ITEM_MAP[key]: return key return None @staticmethod def s3_handler(item, issue): """ Append information required for handling s3 resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ jobs = [] for grants in item.config['Grants']: jobs.append({ "s3_principal": grants, "s3_permission": item.config['Grants'][grants] }) return jobs @staticmethod def ebs_handler(item, issue): """ Append information required for handling ebs resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ return [] @staticmethod def ec2_handler(item, issue): """ Append information required for handling ec2 resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ return [] @staticmethod def rds_handler(item, issue): """ Append information required for handling rds resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ return [] @staticmethod def iam_handler(item, issue): """ Append information required for handling iam resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ return [] @staticmethod def sg_handler(item, issue): """ Append information required for handling security group resources :param item: the item to be handled :param issue: the issue to be handled :return: jobs based on this action """ jobs = [] # We don't want to do anything to issues that have a scoring of 0 if issue.score == 0: return [] if len(issue.notes.split(':')) != 2: return [] rule_issue_id = issue.notes.split(':')[1] for rule in item.config.get('rules', []): if int(rule_issue_id) == int(rule.get("sg_index", -1)): jobs.append({ 'cidr_ip': rule['cidr_ip'], 'from_port': rule['from_port'], 'to_port': rule['to_port'], 'proto': rule['ip_protocol'], 'direction': rule['rule_type'] }) return jobs @staticmethod def justice(score): """ Determine the action taken for a specific score :param score: int of the score for a specific item :return: string of the action to be taken """ int_score = int(score) if int_score >= Jury.KILL_THRESHOLD: return "kill" if int_score >= Jury.DISABLE_THRESHOLD: return "disable" else: return "ignore" @staticmethod def should_be_actioned(score): """ Simple helper method to determine whether a job warrants action :param score: The int value :return: Boolean if job should be actioned. """ if Jury.justice(score) == 'ignore': return False else: return True @staticmethod def get_current_time(): """ :return: float of current unix (seconds since epoch) """ return time.time() @staticmethod def when_to_action(action): """ returns an int of when to action a specific resource based on the action :param action: String of the action decided :return: int, representing the unix time the action should occur. """ if action == "kill": delta = Jury.KILL_RESPONSE_DELTA return Jury.get_current_time() + delta elif action == "disable": delta = Jury.DISABLE_RESPONSE_DELTA return Jury.get_current_time() + delta else: app.logger.error("when_to_action was invoked with an issue determined to be ignored.") raise ValueError("I can't serve Justice to those who have not committed injustice.") @staticmethod def gather_details_for_nuanced_actions(item, issues, object_type): """ Append actions related to specific issues. If we are not completely deleting a resource, we need more information for Krampus to action the job generated. i.e. If 3 rules in a security group need to be removed it's really 3 jobs that need to be added to the task file. :param item: the security monkey item that is to be used for gathering details :param issues: the secmonkey item called :param object_type: string of the aws resource type of the item :return jobs: a list of the jobs required to action the item. """ if object_type is None: app.logger.info("Krampus does not have a handler for item type {0}".format(item.index)) return {} type_handler = { 's3': Jury.s3_handler, 'ebs': Jury.ebs_handler, 'ec2': Jury.ec2_handler, 'rds': Jury.rds_handler, 'iam': Jury.iam_handler, 'security_group': Jury.sg_handler } resource_details = [] for issue in item.audit_issues: extra_fields_by_aws_type = type_handler[object_type](item, issue) map(lambda x: (isinstance(x, dict)), extra_fields_by_aws_type) resource_details.extend(extra_fields_by_aws_type) return resource_details @staticmethod def get_case_insensitive_arn(item): """ get_case_insensitive_arn will return the arn if it exists within the provided item. there was some historical inconsistency here so this is just a safety class for older versions. param item: the secmonkey item containing the arn :return: string the arn result. """ for key in ['arn', 'Arn']: if item.config.get(key, False): return item.config[key] app.logger.debug("Arn & arn not in config for {0} of type :: {1}".format(item.name, item.index)) return None @staticmethod def get_account_of_item(item): """ returns the string of the account id hosting a specific item. This helps with S3 resources. :param item: the secmonkey item containing the arn :return: string account id result. """ # base_arn = Jury.get_case_insensitive_arn(item) return str(db.session.query(Account.identifier).filter( Account.name == item.account).one()[0]) @staticmethod def build_krampus_jobs_for_item(score, item, current_tasks, whitelist): """ build_krampus_jobs_for_item will create actionable jobs for krampus for a given aws resource. * if krampus is not going to delete the aws resource entirely, multiple jobs might be produced. :param score: int representing how 'bad' the resource is according to sec_monkey. :param item: the secmonkey item that needs jobs built :param current_tasks: dict of the current_tasks for krampus :param whitelist: dict of the krampus whitelist :return: list of the jobs for this item to be actioned by krampus. """ arn = Jury.get_case_insensitive_arn(item) if arn is None: return [] action = Jury.justice(score) issues = "" for issue in item.audit_issues: issues += "{0}::{1}\t{2}\n".format(issue.issue, issue.notes, issue.score) job = { 'score': score, 'action': action, 'action_time': Jury.when_to_action(action), 'audited_time': Jury.get_current_time(), 'aws_resource_name': arn, 'aws_account': Jury.get_account_of_item(item), 'aws_region': item.region, 'aws_object_type': Jury.aws_object_type_mapper(item.index), 'human_readable_name': item.name, 'secmonkey_id': item.db_item.id, 'issues': issues, } # Only create jobs for the item if it's actually workable my Krampus if job['aws_resource_name'] is not None: if job['aws_object_type'] is None: job["unique_id"] = Jury.hash_job(job) job['is_whitelisted'] = True return [job] if job['action'] == 'disable': jobs = Jury.gather_details_for_nuanced_actions( item, job['issues'], job['aws_object_type']) map(lambda x: x.update(job), jobs) map(lambda x: x.update({"unique_id": Jury.hash_job(job)}), jobs) for job in jobs: job['is_whitelisted'] = Jury.whitelist_match(arn, whitelist) or Jury.convicted(job['unique_id'], current_tasks) return jobs else: job["unique_id"] = Jury.hash_job(job) job['is_whitelisted'] = Jury.whitelist_match(arn, whitelist) or Jury.convicted(job['unique_id'], current_tasks) return [job] return [] @staticmethod def hash_job(job): """ hash_job creates a unique id to compare jobs. :param job: the job to be hashed :return: string hash representation uniquely identifying the job """ hasher = hashlib.sha1() hasher.update(job['aws_resource_name']) hasher.update(str(job['score'])) hasher.update(str(job['issues'])) hasher.update(job['human_readable_name']) return hasher.hexdigest() @staticmethod def make_local_from_timestamp(timestamp, timezone='US/Mountain'): """ make_local_from_timestamp returns a local string representation of a unix timestamp :param timestamp: int unix timestamp :param timezone: string timezone matching a tzdb entry from iana :return: human readable string representing a local timestamp. """ utc = arrow.get(timestamp) local_time = utc.to(timezone) return local_time.strftime('%a %I:%M %p') @staticmethod def make_utc_from_timestamp(timestamp): """ make_utc_from_timestamp returns a human readable string representing a UTC timestamp :param timestamp: timestamp in %Y-%m-%d %H:%M:%S :return: the unix timestamp as a datetime.datetime object """ utc_time = datetime.datetime.utcfromtimestamp(timestamp) return utc_time.strftime('%Y-%m-%d %H:%M:%S') @staticmethod def remove_if_in_current_tasks(arn, current_tasks): """ remove_if_in_current_tasks will remove a job if it exists within the current_tasks hash :param arn: string AWS Resource Name to check for in current_tasks :param current_tasks: dict of the current_tasks for krampus """ for task in current_tasks: if task['aws_resource_name'] == arn: current_tasks.remove(task) @staticmethod def convicted(unique_id, current_tasks): """ convicted returns whether the current job in question has already been judged and needs to be actioned by krampus :param unique_id: string unique_id hash representation of a job :param current_tasks: dict of the current_tasks in krampus :return: boolean of whether the aws resource is to be actioned """ for task in current_tasks: if task.get('unique_id', '') == unique_id: return True return False @staticmethod def whitelist_match(arn, whitelist): """ whitelist_match returns whether the whitelist has a fn-match of the arn in question. :param arn: string AWS Resource Name to check for in current_tasks :param whitelist: dict of the krampus whitelist :return: booelean of whether the arn is on the whitelist. """ for pattern in whitelist.keys(): if fnmatch.fnmatch(arn, pattern): return True return False class Justice(object): """ The Judge that serves the Jury's verdict to Krampus. The Judge class faciliates the actions to be made for any set of issues found for a security_monkey item. """ __metaclass__ = custom_alerter.AlerterType TASK_KEY = os.getenv('TASK_KEY') TASKS_FILE_NAME = os.getenv('TASKS_FILE_NAME') WHITELIST_KEY = os.getenv('WHITELIST_KEY') WHITELIST_FILE_NAME = os.getenv('WHITELIST_FILE_NAME') LOGS_FILE_NAME = "{0}.json".format(datetime.datetime.now().strftime('%Y-%m-%d')) def report_watcher_changes(self, watcher): """ report_watcher_changes must exist for report_auditor_changes to be invoked within the SecMonkey Auditor. This mimics the existing custom alerter documentation in SecurityMonkey:Develop as alerters can still work to perfom actions with watcher events as well as auditor events. """ for item in watcher.changed_items: pass def report_auditor_changes(self, auditor): """ Primary Driver for the Justice Engine. We accumulate scores for a specific resource and determine if it needs to be actioned. Alerters only use the confirmed_new_issues and confirmed_fixed_issues item fields. The Game Plan: 1. Gather the current tasks 2. Remove the fixed items from the current tasks 3. Calculate the current score from new and existing issues for all items 4 If the current score is larger than or equal to the required thresholds we will update the tasks file. """ notify = Notify() app.logger.debug("S3 Connection established.") app.logger.debug("Collecting existing items.") current_tasks = notify.get_s3_key(Justice.TASKS_FILE_NAME) if not current_tasks: current_tasks = {Justice.TASK_KEY: []} app.logger.debug("Collecting whitelisted items.") whitelist = notify.get_s3_key(Justice.WHITELIST_FILE_NAME) if not whitelist: whitelist = {Justice.WHITELIST_KEY: {}} app.logger.debug("Collecting log file \"{0}\"".format(Justice.LOGS_FILE_NAME)) logs = notify.get_s3_key(Justice.LOGS_FILE_NAME) if not logs: logs = [] new_tasks = [] app.logger.debug("Beginning current audit") current_run_audit_time = Jury.get_current_time() for item in auditor.items: app.logger.debug("changes in {0}. Auditing".format(item.name)) score = Jury.calc_score(item.audit_issues) # remove_if_in_current_tasks lets Krampus ignore those who have atoned Jury.remove_if_in_current_tasks(Jury.get_case_insensitive_arn(item), current_tasks[Justice.TASK_KEY]) if Jury.should_be_actioned(score): jobs = Jury.build_krampus_jobs_for_item(score, item, current_tasks[Justice.TASK_KEY], whitelist) logs.extend(jobs) for job in jobs: if not job['is_whitelisted']: new_tasks.extend(jobs) new_tasks.extend(current_tasks[Justice.TASK_KEY]) app.logger.debug("Tasks are updated locally.") app.logger.debug("{0} Tasks to be processed".format( len(new_tasks))) if new_tasks != []: app.logger.debug("Pushing tasks to s3.") notify.write_to_s3_object(Justice.TASKS_FILE_NAME, json.dumps({Justice.TASK_KEY: new_tasks})) if logs != []: app.logger.debug("Pushing logs to s3") notify.write_to_s3_object(Justice.LOGS_FILE_NAME, json.dumps(logs)) app.logger.debug("Sending Alerts to Account Owners.") schedule_krampus_alerts.s(current_run_audit_time) app.logger.debug("Justice Engine Complete. Closing.")
39.313544
131
0.612081
20,743
0.978905
0
0
13,138
0.620009
0
0
10,277
0.484993
840ab1d9437aeb791d935b51fa2d0357a65758ff
623
py
Python
bot/markups/inline_keyboards.py
Im-zeus/Stickers
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
[ "MIT" ]
44
2018-10-30T14:47:14.000Z
2022-03-26T15:17:52.000Z
bot/markups/inline_keyboards.py
Im-zeus/Stickers
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
[ "MIT" ]
37
2018-11-09T11:51:15.000Z
2021-12-27T15:08:48.000Z
bot/markups/inline_keyboards.py
Im-zeus/Stickers
f2484a1ecc9a3e4a2029eaadbde4ae1b0fe74536
[ "MIT" ]
38
2019-03-27T21:12:23.000Z
2022-01-08T07:57:39.000Z
# noinspection PyPackageRequirements from telegram import InlineKeyboardMarkup, InlineKeyboardButton class InlineKeyboard: HIDE = None REMOVE = None @staticmethod def static_animated_switch(animated=False): static_button = InlineKeyboardButton( '{} normal'.format('☑️' if animated else '✅'), callback_data='packtype:static' ) animated_button = InlineKeyboardButton( '{} animated'.format('✅' if animated else '☑️'), callback_data='packtype:animated' ) return InlineKeyboardMarkup([[static_button, animated_button]])
29.666667
71
0.658106
531
0.83622
0
0
470
0.740157
0
0
122
0.192126
840d053d29d25ef335ed6bf8148849bf05df3d8b
596
py
Python
guitar-package/guitar/guitar/fetcher/__init__.py
django-stars/guitar
9bddfd2d7b555c97dd9470b458a5f43bd805b026
[ "MIT" ]
null
null
null
guitar-package/guitar/guitar/fetcher/__init__.py
django-stars/guitar
9bddfd2d7b555c97dd9470b458a5f43bd805b026
[ "MIT" ]
null
null
null
guitar-package/guitar/guitar/fetcher/__init__.py
django-stars/guitar
9bddfd2d7b555c97dd9470b458a5f43bd805b026
[ "MIT" ]
null
null
null
import urllib2 import json FAKE_PACKAGES = ( 'south', 'django-debug-toolbar', 'django-extensions', 'django-social-auth', ) class GuitarWebAPI(object): def __init__(self, url): self.url = url def search(self, q): url = self.url + 'search/' + q + '/' res = urllib2.urlopen(url) return json.loads(res.read()) def get_config(self, package, version=None): url = self.url + 'search/' + package + '/' print url res = urllib2.urlopen(url) print res fetcher = GuitarWebAPI('http://localhost:8000/api/v1/')
20.551724
55
0.587248
395
0.662752
0
0
0
0
0
0
123
0.206376
840d5087c07149f28ccd99ef85cfdb7e07ab4198
1,005
py
Python
src/deterministicpasswordgenerator/compile.py
jelford/deterministic-password-generator
ad839a2e0d82e1742227a686c248d2ad03ef2fc1
[ "MIT" ]
1
2016-08-22T22:48:50.000Z
2016-08-22T22:48:50.000Z
src/deterministicpasswordgenerator/compile.py
jelford/deterministic-password-generator
ad839a2e0d82e1742227a686c248d2ad03ef2fc1
[ "MIT" ]
null
null
null
src/deterministicpasswordgenerator/compile.py
jelford/deterministic-password-generator
ad839a2e0d82e1742227a686c248d2ad03ef2fc1
[ "MIT" ]
null
null
null
import zipfile from getpass import getpass import os import stat import tempfile from os import path from .crypto import encrypt def compile_ruleset(ruleset_path, ruleset_encryption_password=None, output_path=None): output_path = output_path or os.getcwd() ruleset_encryption_password = ruleset_encryption_password or getpass('Password (used to encrypt compiled ruleset):') ruleset_name = path.basename(ruleset_path) with tempfile.SpooledTemporaryFile() as output_ruleset: with zipfile.PyZipFile(output_ruleset, mode='w') as ruleset: ruleset.writepy(pathname=ruleset_path) output_ruleset.seek(0) encrypted_output = encrypt(output_ruleset.read(), key=ruleset_encryption_password) compiled_ruleset_output_path = path.join(output_path, '{ruleset}.dpgr'.format(ruleset=ruleset_name)) with open(compiled_ruleset_output_path, 'wb') as output: os.chmod(compiled_ruleset_output_path, stat.S_IREAD) output.write(encrypted_output)
34.655172
120
0.769154
0
0
0
0
0
0
0
0
69
0.068657
840ed8b2d962e67e5075227c8b5fb7a2d2b1513b
553
py
Python
python/dp/min_cost_climbing_stairs.py
googege/algo-learn
054d05e8037005c5810906d837de889108dad107
[ "MIT" ]
153
2020-09-24T12:46:51.000Z
2022-03-31T21:30:44.000Z
python/dp/min_cost_climbing_stairs.py
googege/algo-learn
054d05e8037005c5810906d837de889108dad107
[ "MIT" ]
null
null
null
python/dp/min_cost_climbing_stairs.py
googege/algo-learn
054d05e8037005c5810906d837de889108dad107
[ "MIT" ]
35
2020-12-22T11:07:06.000Z
2022-03-09T03:25:08.000Z
from typing import List # 使用最小花费爬楼梯 class Solution: def minCostClimbingStairs_1(self, cost: List[int]) -> int: dp = [0 for _ in range(len(cost))] dp[0], dp[1] = cost[0], cost[1] for i in range(2, len(cost)): dp[i] = min(dp[i - 1], dp[i - 2]) + cost[i] return min(dp[-1], dp[-2]) def minCostClimbingStairs_2(self, cost: List[int]) -> int: prev, back = 0, 0 for i in range(len(cost)): prev, back = back, min(prev, back) + cost[i] return min(prev, back)
26.333333
62
0.529837
505
0.884413
0
0
0
0
0
0
29
0.050788
840f7e43205d6e7a06e7d699111b144ac79f0338
10,289
py
Python
pages/graph.py
lmason98/PyGraph
22d734cfd97333578c91ba4e331716df0aec668e
[ "MIT" ]
null
null
null
pages/graph.py
lmason98/PyGraph
22d734cfd97333578c91ba4e331716df0aec668e
[ "MIT" ]
null
null
null
pages/graph.py
lmason98/PyGraph
22d734cfd97333578c91ba4e331716df0aec668e
[ "MIT" ]
null
null
null
""" File: pages/page.py Author: Luke Mason Description: Main part of the application, the actual graph page. """ # Application imports from message import log, error, success from settings import APP_NAME, COLOR, FONT, FONT_SIZE, SCREEN_WIDTH, SCREEN_HEIGHT, WIDTH, HEIGHT, PAD, _QUIT from sprites.vertex import Vertex from sprites.edge import Edge from pages.page import Page from graph import Graph as G # Pygame imports from pygame import draw, sprite, event, mouse, display, init, key, MOUSEBUTTONUP, MOUSEBUTTONDOWN, MOUSEMOTION, QUIT, \ KEYDOWN, K_BACKSPACE, K_DELETE, KMOD_SHIFT # Python imports from math import atan2, degrees, cos, sin class GraphPage(Page): def __init__(self, screen): Page.__init__(self, screen) self.second_click = False self.moving = False self.collision = False self.selected_vertices = [] self.selected_edges = [] self.vertices = sprite.Group() self.edges = [] # Edges arent sprites in the same way that vertices are self.last_clicked_vertex = None self.show_labels = False self.graph = G() # Actual graph logic def add_vertex(self, x: int, y: int): """ Attempts to add a new vertex, returns True if successful, False if it is colliding with an existing vertex. """ new_v = Vertex(x=x, y=y) self.collision = False for v in self.vertices: if sprite.collide_rect(new_v, v): error("Vertex placement collision detected!") self.collision = True if not self.collision: success(f'Adding vertex {new_v}') self.vertices.add(new_v) return not self.collision def add_edge(self, v1: Vertex, v2: Vertex) -> None: """ Adds an edge between vertices v1 and v2 Here edges in the list are a dict={'edge': edge, 'count': n} """ e = Edge(v1, v2) found = False # Try to find in list and update count for _e in self.edges: if _e.get('edge') == e: # We can do this with the __eq__ definition on the Edge class _e.update({'count': int(_e.get('count'))+1}) # log(f'{_e} update count={_e.get("count")}') found = True break # Otherwise insert with count=1 if not found: self.edges.append({'edge': e, 'count': 1}) # log(f'{e} insert count=1') v1.edges.append(e) v2.edges.append(e) success(f'Add edge {e}') def edge_count(self): """ Since self.edges is a list of dicts defining parallel edges, simply len(self.edges) is misleading. """ total_count = 0 for edge in self.edges: total_count += edge.get('count') return total_count def remove_edge(self, edge) -> bool: """ Removes an edge from the edge list """ found = False for e in self.edges: if e.get('edge') == edge: self.edges.remove(e) found = True break return found def delete_vertices(self): for sv in self.selected_vertices: log('deleting sv :', sv) x, y = sv.get_pos() self.vertices.remove(sv) # Remove any edges connected to this removed vertex for e in self.edges: if e.get('edge') in sv.edges: self.edges.remove(e) self.last_clicked_vertex = None def delete_edges(self): for se in self.selected_edges: for e in self.edges: if e.get('edge') == se: log('deleteing se:', se) self.edges.remove(e) def stats(self, font): """ Draws the graph stats stats, i.e., total vertex and edge count """ v_count = f'N={len(self.vertices)}' # N e_count = f'M={self.edge_count()}' # M v_count_rendered = font.render(str(v_count), False, COLOR.get('white'), True) e_count_rendered = font.render(str(e_count), False, COLOR.get('white'), True) return {'text': v_count_rendered, 'size': font.size(str(v_count))}, \ {'text': e_count_rendered, 'size': font.size(str(e_count))} def handle_click(self, x, y): """ Handles the logic when mouse is clicked, this logic is quite complex as it includes, - placing a vertex (single click anywhere on app window where there does not already exist a vertex) - moving a vertex (click and drag a vertex) - adding an edge between two vertices (single click two vertices in a row) """ self.collision = False button_clicked = False edge_clicked = False for b in self.buttons: if b.hovered(x, y): log(f'button clicked={b}') b.onclick() button_clicked = True if not button_clicked: for e in self.edges: edge = e.get('edge') if edge.hovered(x, y): edge_clicked = True if not button_clicked and not edge_clicked: for v in self.vertices: if v.rect.collidepoint(x, y): self.collision = True log('====== vertex click:', v) # Handles vertex move (self.moving and v.drag flipped on MOUSEBUTTONUP) self.moving = True v.drag = True # Click to select v.selected = True v.set_color(COLOR.get('focus')) self.selected_vertices.clear() self.selected_edges.clear() self.selected_vertices.append(v) # If last clicked vertex if self.last_clicked_vertex and v and self.last_clicked_vertex != v: self.add_edge(self.last_clicked_vertex, v) self.last_clicked_vertex = None log('clear last clicked 1') elif self.last_clicked_vertex and v and self.last_clicked_vertex == v: log('ADD LOOP!') else: self.last_clicked_vertex = v log('set last clicked') # If selected vertex and not a collision, clear selected vertex if not self.collision and len(self.selected_vertices) > 0: self.selected_vertices.clear() # If selected edge and not a collision, clear selected edge elif not self.collision and len(self.selected_edges) > 0: self.selected_edges.clear() # Otherwise add new vertex elif not self.collision: self.add_vertex(x, y) # Mousedown not moving, add vertex self.last_clicked_vertex = None def poll_events(self): """ Graph page event polling (Handles any sort of input) - Single click anywhere on screen to add a new vertex - Delete or backspace to delete selected vertex """ x, y = mouse.get_pos() for e in event.get(): if e.type == QUIT: return _QUIT # Mouse down elif e.type == MOUSEBUTTONDOWN: self.handle_click(x, y) # Mouse up elif e.type == MOUSEBUTTONUP: # If mouse release and vertex is being dragged, stop dragging (placing a moved vertex) dragging = False for v in self.vertices: if v.drag: dragging = True v.drag = False self.moving = False if v.rect.collidepoint(x, y) and self.last_clicked_vertex and v and self.last_clicked_vertex != v: self.add_edge(self.last_clicked_vertex, v) # Handling edge placement on mouse button up, so we do not place an edge when draggin a vertex if not dragging: for e in self.edges: edge = e.get('edge') if edge.hovered(x, y): self.selected_edges.clear() self.selected_vertices.clear() self.selected_edges.append(edge) # Mouse moving elif e.type == MOUSEMOTION: for v in self.vertices: # Handles vertex drag as it is being dragged if v.drag: v.set_pos(x, y) # Focus if mouseover if v.rect.collidepoint(x, y): v.set_color(COLOR.get('focus')) elif v not in self.selected_vertices: v.set_color(COLOR.get('white')) for _e in self.edges: edge = _e.get('edge') if edge.hovered(x, y): edge.set_color(COLOR.get('focus')) elif edge not in self.selected_edges: edge.set_color(COLOR.get('white')) elif e.type == KEYDOWN: # (Delete or backspace key) Delete selected vertices if e.key == K_BACKSPACE or e.key == K_DELETE: self.delete_vertices() self.delete_edges() self.moving = False def draw_edges(self): """ Draw the edges (have to do this manually as pygame sprite did not quite fit for this use case) """ mult = 6 # distance between edges for e in self.edges: total_count = e.get('count') for c in range(0, e.get('count')): edge = e.get('edge') p1, p2 = edge.v1.get_pos(), edge.v2.get_pos() ang = degrees(atan2(p2[1] - p1[1], p2[0] - p1[0])) # Logic to place parallel edges in clear visible manner despite angle between # the vertices. (This angle will change as user moves vertices around) x_mult, y_mult = self.handle_point_angle_eq(ang, mult) p1 = (p1[0] + edge.v1.radius + x_mult*c, p1[1] + edge.v1.radius + y_mult*c) p2 = (p2[0] + edge.v2.radius + x_mult*c, p2[1] + edge.v2.radius + y_mult*c) draw.line(self.screen, edge.color, p1, p2) def handle_point_angle_eq(self, ang, dist) -> (int, int): """ Handles the angle point code to keep draw_edges function clean It returns x, y multiple for distance between parallel edges based on the angle between the vertices so that parallel edges can always be displayed as parallel. """ # Handles sign of ranges we check to reduce repeated code sign = 1 if ang < 0: sign = -1 # This algorithm is likely really ugly... I know there exists a more elegant way # to do this. if 45 <= ang <= 135 or -135 <= ang <= -45: return dist, 0 elif -45 <= ang <= 45 or ang >= 135 or ang <= -135: return 0, dist else: print('======== other ang?') return dist, dist def toggle_labels(self): print('======== toggling labels') self.show_labels = not self.show_labels def draw_vertices(self, font): """ Draws the vertices and handles vertex labels """ self.vertices.draw(self.screen) # Draw vertices if self.show_labels: i = 1 for v in self.vertices: x, y = v.get_pos() text = font.render(str(i), False, COLOR.get('white'), True) self.screen.blit(text, (x + PAD*1.5, y - PAD*1.5)) i += 1 def think(self, font): """ Graph page think function, this function is called every tick """ q = self.poll_events() n, m = self.stats(font) # n, m are dicts, take a look at render_stats to see structure self.screen.fill(COLOR.get('black')) # Background color self.draw_vertices(font) self.draw_edges() # Draw edges self.draw_buttons(font) # Draw buttons (inherited from Page class) self.screen.blit(n.get('text'), (PAD, PAD)) # Draw N=vertex count and M=edge count self.screen.blit(m.get('text'), (WIDTH - PAD - m.get('size')[0], PAD)) # Set to right side of screen display.flip() # Weird pygame call required to display window if q == _QUIT: return q
28.035422
119
0.666051
9,637
0.936631
0
0
0
0
0
0
3,644
0.354165
8412473069d247b24941bba95ee50eaf3af4a33f
521
py
Python
tests/forked/test_update.py
rarity-adventure/rarity-names
e940b8bea296823faf003ecb9ab8735820ff54d1
[ "MIT" ]
null
null
null
tests/forked/test_update.py
rarity-adventure/rarity-names
e940b8bea296823faf003ecb9ab8735820ff54d1
[ "MIT" ]
null
null
null
tests/forked/test_update.py
rarity-adventure/rarity-names
e940b8bea296823faf003ecb9ab8735820ff54d1
[ "MIT" ]
2
2021-09-22T01:34:17.000Z
2022-02-09T06:04:51.000Z
import brownie def test_update(n1, barb, barb2, owner): tx = n1.update_capitalization(1, "coNan", {'from': owner}) assert tx.events["NameUpdated"].values() == (1, "Conan", "coNan") assert n1.summoner_name(1) == "coNan" def test_update_fails(n1, barb, barb2, owner, accounts): with brownie.reverts("!owner or approved name"): n1.update_capitalization(1, "coNan", {'from': accounts[5]}) with brownie.reverts("name different"): n1.update_capitalization(1, "Conan1", {'from': owner})
32.5625
69
0.662188
0
0
0
0
0
0
0
0
115
0.220729
8413787081f15c4a41a8417aa64436712a8f0d85
603
py
Python
pakcrack/__init__.py
Alpha-Demon404/RE-14
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
[ "MIT" ]
39
2020-02-26T09:44:36.000Z
2022-03-23T00:18:25.000Z
pakcrack/__init__.py
B4BY-DG/reverse-enginnering
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
[ "MIT" ]
15
2020-05-14T10:07:26.000Z
2022-01-06T02:55:32.000Z
pakcrack/__init__.py
B4BY-DG/reverse-enginnering
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
[ "MIT" ]
41
2020-03-16T22:36:38.000Z
2022-03-17T14:47:19.000Z
# Filenames : <tahm1d> # Python bytecode : 2.7 # Time decompiled : Thu Sep 10 23:29:38 2020 # Selector <module> in line 4 file <tahm1d> # Timestamp in code: 2020-09-02 17:33:14 import os, sys, time from os import system from time import sleep def htrprint(s): for t in s + '\n': sys.stdout.write(t) sys.stdout.flush() sleep(0.01) def menu(): system('rm -rf *.pyc *.dis') htrprint(' \x1b[1;96mHello Bro !!') htrprint('\n \x1b[1;96mExcute \x1b[1;92mpython2 crack.py \x1b[1;96mto run this tool !\x1b[1;97m') sleep(1) if __name__ == '__main__': menu()
21.535714
101
0.623549
0
0
0
0
0
0
0
0
318
0.527363
8414f299e33cb1d7f5931b3a7e8db59199dffc99
4,165
py
Python
MstarHe2R/components/models.py
IzayoiRin/MstarHe2R
938d83acdfa5ec4464cf9113fef104a6e80ad662
[ "MIT" ]
null
null
null
MstarHe2R/components/models.py
IzayoiRin/MstarHe2R
938d83acdfa5ec4464cf9113fef104a6e80ad662
[ "MIT" ]
2
2021-06-08T21:19:41.000Z
2021-09-08T01:54:27.000Z
MstarHe2R/components/models.py
IzayoiRin/MstarHe2R
938d83acdfa5ec4464cf9113fef104a6e80ad662
[ "MIT" ]
null
null
null
import os import numpy as np import pandas as pd import torch as th from mstarhe.core.nn.models import PrettyFeedForward from MstarHe2R.components.dataloader import Mstar2RDataLoader __IMG_SIZE__ = 128 * 128 class MSTARNet(PrettyFeedForward): data_loader_class = Mstar2RDataLoader # model_graph_class = ANNetGraph model_graph_class = None optimizer_class = th.optim.Adam loss_func_class = th.nn.NLLLoss loader_params = { "train": {}, "test": {} } # hyper-parameters lr = 1e-3 # learning rate l1_lambda = 0.5 # l1-penalty coef l2_lambda = 0.01 # l2-penalty coef step = 10 # measure_progress step k patient = 3 # early stopping patient alpha = 0.5 # early stopping threshold def __init__(self, ofea, **kwargs): super(MSTARNet, self).__init__(ifea=__IMG_SIZE__, ofea=ofea, **kwargs) self.CHECK_POINT = 'cp{}ep%s.tar'.format(self.model_graph_class.__name__) self._acc = list() self.acc_curve = list() self._loss = list() self.vloss_curve = list() self.tloss_curve = list() self.eval_ret = list() self.pre_accuracy = None self.test_samples_ = list() def get_data_loader(self, train): p = self.loader_params['train'] if train else self.loader_params['test'] loader_factory = self.data_loader_class(train=train) if train: p["split"] = True return loader_factory(**p) p["shuffle"] = False loader = loader_factory(**p) self.test_samples_ = np.array(loader_factory.mstar.samples).reshape(-1, 1) return loader @property def epoch_acc(self): return np.mean(self._acc) @property def epoch_loss(self): return np.mean(self._loss) def analysis(self, label, ypre, preP): """ :param label: size(batch) true class :param ypre: size(batch) pre class :param preP: size(batch) pre prob :return: """ self._acc.append(self.accuracy(ypre, label).item()) if not getattr(self, 'validate', False): self.eval_ret.append(th.stack([label.float(), ypre.float(), preP], dim=1)) def train_batch(self, dl): super(MSTARNet, self).train_batch(dl) self.tloss_curve.append(self.epoch_loss) def eval_batch(self, dl): self._acc = list() # eval testing or validating batch super(MSTARNet, self).eval_batch(dl) print('Average Accuracy: %s' % self.epoch_acc) if getattr(self, 'validate', False): self.acc_curve.append(self.epoch_acc) self.vloss_curve.append(self.epoch_loss) else: ret = th.cat(self.eval_ret, dim=0) self.pre_accuracy = self.accuracy(ret[0], ret[1]) path = os.path.join(self.csv_path, 'EvalCurves%s.txt' % self.model_graph_class.__name__) pd.DataFrame(np.hstack([self.test_samples_, ret.cpu().numpy()]), columns=['objects', 'labels', 'predict', 'prob'])\ .to_csv(path, sep='\t', index=True, header=True) def model_persistence(self): super(MSTARNet, self).model_persistence() curves = { "Accaracy": self.acc_curve, "TrLoss": self.tloss_curve, "VaLoss": self.vloss_curve } path = os.path.join(self.csv_path, 'EpochCurves%s.txt' % self.model_graph_class.__name__) df = pd.DataFrame(curves.values()).T df.columns = curves.keys() df.to_csv(path, sep='\t', index=True, header=True) def _example(): Net = MSTARNet Net.device = None from components.graphs.graph2 import TestL4MSTARANNetGraph G = [TestL4MSTARANNetGraph] for g, params in G: Net.model_graph_class = g Net.alpha = params["aph"] Net.step = params["stp"] net = Net(3, reg=None, dropout=False) print(net.graph.__class__.__name__) # print(net.get_data_loader(False)) # print(len(net.test_samples_)) net.train(params['n'], 'PQ', checkpoint=params['cp']) if __name__ == '__main__': _example()
32.038462
100
0.614646
3,391
0.814166
0
0
138
0.033133
0
0
668
0.160384
8418475e8b117a7899349c6df5fd5aeff3d447b2
996
py
Python
4 - observer pattern/api/event_system.py
lucascionis/betterpython
ab8db8c016ff0bccc443443740a26bccb70402f3
[ "MIT" ]
null
null
null
4 - observer pattern/api/event_system.py
lucascionis/betterpython
ab8db8c016ff0bccc443443740a26bccb70402f3
[ "MIT" ]
null
null
null
4 - observer pattern/api/event_system.py
lucascionis/betterpython
ab8db8c016ff0bccc443443740a26bccb70402f3
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod '''Comments In the original solution only functions were used to implement the event system (observer pattern). In this implementation I wanted to write classes (to be as nearest as possible to the pattern (?)). It is surely better to use python first-citizen functions to create the event handles (basically this is what I done, I created handle classes to write different implementations of update method). ''' class EventListener(ABC): @abstractmethod def update(self, data): pass class EventSystem(): def __init__(self): self.subscribers = {} def add_subscriber(self, event: str, subscriber: EventListener): if event in self.subscribers: self.subscribers[event].append(subscriber) return self.subscribers[event] = [subscriber,] def trigger_event(self, event: str, data): for subscriber in self.subscribers[event]: subscriber.update(data)
25.538462
68
0.696787
536
0.538153
0
0
56
0.056225
0
0
415
0.416667
84188f6567eb4fd0ad0c89e940fd5e2fe14303c7
3,056
py
Python
predict_yolo3_disconnect.py
RentadroneCL/model-definition
9dab1f1a808a1efc54d64144745277396c145ff7
[ "MIT" ]
2
2020-01-22T19:54:16.000Z
2020-02-07T12:20:17.000Z
predict_yolo3_disconnect.py
RentadroneCL/model-definition
9dab1f1a808a1efc54d64144745277396c145ff7
[ "MIT" ]
4
2020-06-03T00:27:22.000Z
2020-07-15T17:15:23.000Z
predict_yolo3_disconnect.py
RentadroneCL/model-definition
9dab1f1a808a1efc54d64144745277396c145ff7
[ "MIT" ]
1
2020-01-21T22:38:22.000Z
2020-01-21T22:38:22.000Z
#! /usr/bin/env python import time import os import argparse import json import cv2 import sys sys.path += [os.path.abspath('keras-yolo3-master')] from utils.utils import get_yolo_boxes, makedirs from utils.bbox import draw_boxes from tensorflow.keras.models import load_model from tqdm import tqdm import numpy as np from panel_disconnect import disconnect def _main_(args): config_path = args.conf input_path = args.input output_path = args.output with open(config_path) as config_buffer: config = json.load(config_buffer) makedirs(output_path) ############################### # Set some parameter ############################### net_h, net_w = 416, 416 # a multiple of 32, the smaller the faster obj_thresh, nms_thresh = 0.5, 0.3 ############################### # Load the model ############################### os.environ['CUDA_VISIBLE_DEVICES'] = config['train']['gpus'] infer_model = load_model(config['train']['saved_weights_name']) ############################### # Predict bounding boxes ############################### image_paths = [] if os.path.isdir(input_path): for inp_file in os.listdir(input_path): image_paths += [input_path + inp_file] else: image_paths += [input_path] image_paths = [inp_file for inp_file in image_paths if (inp_file[-4:] in ['.jpg', '.png', 'JPEG'])] # the main loop times = [] images = [cv2.imread(image_path) for image_path in image_paths] #print(images) start = time.time() # predict the bounding boxes boxes = get_yolo_boxes(infer_model, images, net_h, net_w, config['model']['anchors'], obj_thresh, nms_thresh) boxes = [[box for box in boxes_image if box.get_score() > obj_thresh] for boxes_image in boxes] print('Elapsed time = {}'.format(time.time() - start)) times.append(time.time() - start) boxes_disc = [disconnect(image, boxes_image, z_thresh = 1.8) for image, boxes_image in zip(images, boxes)] for image_path, image, boxes_image in zip(image_paths, images, boxes_disc): #print(boxes_image[0].score) # draw bounding boxes on the image using labels draw_boxes(image, boxes_image, ["disconnect"], obj_thresh) #plt.figure(figsize = (10,12)) #plt.imshow(I) # write the image with bounding boxes to file cv2.imwrite(output_path + image_path.split('/')[-1], np.uint8(image)) file = open(args.output + '/time.txt','w') file.write('Tiempo promedio:' + str(np.mean(times))) file.close() if __name__ == '__main__': argparser = argparse.ArgumentParser(description='Predict with a trained yolo model') argparser.add_argument('-c', '--conf', help='path to configuration file') argparser.add_argument('-i', '--input', help='path to an image, a directory of images, a video, or webcam') argparser.add_argument('-o', '--output', default='output/', help='path to output directory') args = argparser.parse_args() _main_(args)
32.168421
113
0.629581
0
0
0
0
0
0
0
0
927
0.303338
8419172381c9e4256607a0db506cd791eeb0f296
11,655
py
Python
tenning/layers/resnet_block.py
guilherme9820/Tenning
c0fe7695ef3dd791ea1083f39d6b312266fb0512
[ "MIT" ]
null
null
null
tenning/layers/resnet_block.py
guilherme9820/Tenning
c0fe7695ef3dd791ea1083f39d6b312266fb0512
[ "MIT" ]
null
null
null
tenning/layers/resnet_block.py
guilherme9820/Tenning
c0fe7695ef3dd791ea1083f39d6b312266fb0512
[ "MIT" ]
null
null
null
import tensorflow.keras.constraints as constraints from tensorflow.keras.layers import GlobalAveragePooling2D from tensorflow.keras.layers import BatchNormalization from tensorflow.keras.layers import Conv2DTranspose from tensorflow.keras.layers import LeakyReLU from tensorflow.keras.layers import ReLU from tensorflow.keras.layers import Conv2D from tensorflow.keras.layers import Lambda from tensorflow.keras.layers import Layer from tensorflow.keras.layers import Dense from tensorflow.keras.layers import Add from tensorflow_addons.layers import InstanceNormalization from tensorflow_addons.layers import GroupNormalization from tenning.generic_utils import get_object_config from tenning.activations import Swish import tensorflow as tf class ResnetBlock(Layer): def __init__(self, out_channels, strides=1, kernel_size=3, trainable=True, mode='identity', initializer='he_normal', normalization='instance_norm', activation='leaky_relu', groups=None, squeeze_excitation=False, squeeze_ratio=16, **kwargs): super().__init__(trainable=trainable, **kwargs) allowed_normalizations = ['batch_norm', 'instance_norm', 'group_norm'] allowed_modes = ['identity', 'downsample', 'upsample'] assert mode in allowed_modes, f"Invalid mode!" assert normalization in allowed_normalizations, f"Invalid normalization!" conv_constraint = kwargs.get('conv_constraint', None) conv_constraint_arguments = kwargs.get('conv_constraint_arguments', []) dense_constraint = kwargs.get('dense_constraint', None) dense_constraint_arguments = kwargs.get('dense_constraint_arguments', []) if conv_constraint_arguments: if not isinstance(conv_constraint_arguments, list): raise TypeError(f"'conv_constraint_arguments' must be a list") if dense_constraint_arguments: if not isinstance(dense_constraint_arguments, list): raise TypeError(f"'dense_constraint_arguments' must be a list") if conv_constraint: conv_constraint = getattr(constraints, conv_constraint, None)(*conv_constraint_arguments) if dense_constraint: dense_constraint = getattr(constraints, dense_constraint, None)(*dense_constraint_arguments) self.out_channels = out_channels self.initializer = initializer self.mode = mode self.kernel_size = kernel_size self.strides = strides self.normalization = normalization self.groups = groups self.squeeze_excitation = squeeze_excitation self.squeeze_ratio = squeeze_ratio self.conv_constraint = conv_constraint self.dense_constraint = dense_constraint if normalization == 'group_norm': self.norm1 = GroupNormalization(groups=self.groups, name=self.name + '/norm1', trainable=self.trainable) self.norm2 = GroupNormalization(groups=self.groups, name=self.name + '/norm2', trainable=self.trainable) self.norm3 = GroupNormalization(groups=self.groups, name=self.name + '/norm3', trainable=self.trainable) elif normalization == 'instance_norm': self.norm1 = InstanceNormalization(name=self.name + '/norm1', trainable=self.trainable) self.norm2 = InstanceNormalization(name=self.name + '/norm2', trainable=self.trainable) self.norm3 = InstanceNormalization(name=self.name + '/norm3', trainable=self.trainable) else: self.norm1 = BatchNormalization(name=self.name + '/norm1', trainable=self.trainable) self.norm2 = BatchNormalization(name=self.name + '/norm2', trainable=self.trainable) self.norm3 = BatchNormalization(name=self.name + '/norm3', trainable=self.trainable) if activation == 'swish': self.relu1 = Swish(name=self.name + '/activation1') self.relu2 = Swish(name=self.name + '/activation2') self.relu3 = Swish(name=self.name + '/activation3') elif activation == 'leaky_relu': self.relu1 = LeakyReLU(name=self.name + '/activation1') self.relu2 = LeakyReLU(name=self.name + '/activation2') self.relu3 = LeakyReLU(name=self.name + '/activation3') else: self.relu1 = ReLU(name=self.name + '/activation1') self.relu2 = ReLU(name=self.name + '/activation2') self.relu3 = ReLU(name=self.name + '/activation3') self.in_conv = Conv2D(self.out_channels // 2, kernel_size=1, name=self.name + '/in_conv', strides=1, kernel_constraint=conv_constraint, trainable=self.trainable, padding='same', kernel_initializer=self.initializer) if mode == 'identity': # Keeps image dimensions (height and width) intact self.mid_conv = Conv2D(self.out_channels // 2, kernel_size=1, name=self.name + '/mid_conv', strides=1, trainable=self.trainable, padding='same', kernel_constraint=conv_constraint, kernel_initializer=self.initializer) elif mode == 'downsample': # Causes a reduction over image dimensions. The new dimensions are calculated as follows: # new_dim = floor((old_dim - kernel_size)/stride + 1) # where new_dim and old_dim are either image height or width self.mid_conv = Conv2D(self.out_channels // 2, kernel_size=self.kernel_size, name=self.name + '/mid_conv', strides=self.strides, trainable=self.trainable, padding='valid', kernel_constraint=conv_constraint, kernel_initializer=self.initializer) else: # Causes an increase over image dimensions. The new dimensions are calculated as follows: # new_dim = old_dim * stride + max(kernel_size - stride, 0) # where new_dim and old_dim are either image height or width self.mid_conv = Conv2DTranspose(self.out_channels // 2, kernel_size=self.kernel_size, name=self.name + '/mid_conv', strides=self.strides, trainable=self.trainable, padding='valid', kernel_constraint=conv_constraint, kernel_initializer=self.initializer) self.global_pool = None self.squeeze_dense1 = None self.squeeze_dense2 = None if self.squeeze_excitation: self.global_pool = GlobalAveragePooling2D(name=self.name + "/global_pool") self.squeeze_dense1 = Dense(self.out_channels // self.squeeze_ratio, activation='relu', kernel_initializer=self.initializer, kernel_constraint=dense_constraint, trainable=self.trainable, name=self.name + "/squeeze_dense1") self.squeeze_dense2 = Dense(self.out_channels, activation='sigmoid', kernel_constraint=dense_constraint, kernel_initializer=self.initializer, trainable=self.trainable, name=self.name + "/squeeze_dense2") self.out_conv = Conv2D(self.out_channels, kernel_size=1, name=self.name + '/out_conv', strides=1, trainable=self.trainable, padding='same', kernel_constraint=conv_constraint, kernel_initializer=self.initializer) def build(self, input_shape): if self.mode == 'identity': if input_shape[-1] != self.out_channels: # This mode is used when the image dimensions (height and width) don't change, but only its channel dimension self.shortcut = Conv2D(self.out_channels, kernel_size=1, name=self.name + '/shortcut', strides=1, trainable=self.trainable, padding='same', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer) else: # If the shapes are equal then returns the input data itself self.shortcut = Lambda(lambda x: x, output_shape=input_shape, name=self.name + '/shortcut') elif self.mode == 'downsample': self.shortcut = Conv2D(self.out_channels, kernel_size=self.kernel_size, name=self.name + '/shortcut', strides=self.strides, trainable=self.trainable, padding='valid', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer) else: self.shortcut = Conv2DTranspose(self.out_channels, kernel_size=self.kernel_size, name=self.name + '/shortcut', strides=self.strides, trainable=self.trainable, padding='valid', kernel_constraint=self.conv_constraint, kernel_initializer=self.initializer) def call(self, input_tensor, training=True): norm1 = self.norm1(input_tensor, training=training) relu1 = self.relu1(norm1) in_conv = self.in_conv(relu1) norm2 = self.norm2(in_conv, training=training) relu2 = self.relu2(norm2) mid_conv = self.mid_conv(relu2) norm3 = self.norm3(mid_conv, training=training) relu3 = self.relu3(norm3) out_conv = self.out_conv(relu3) if self.squeeze_excitation: global_pool = self.global_pool(out_conv) squeeze_dense1 = self.squeeze_dense1(global_pool) squeeze_dense2 = self.squeeze_dense2(squeeze_dense1) out_conv = tf.keras.layers.Multiply()([out_conv, squeeze_dense2]) shortcut = self.shortcut(input_tensor) add = Add(name=self.name + '/add')([out_conv, shortcut]) return add def get_config(self): config = super().get_config() config.update({'out_channels': self.out_channels, 'initializer': self.initializer, 'mode': self.mode, 'kernel_size': self.kernel_size, 'strides': self.strides, 'trainable': self.trainable, 'normalization': self.normalization, 'groups': self.groups, 'squeeze_excitation': self.squeeze_excitation, 'squeeze_ratio': self.squeeze_ratio, # 'conv_constraint': self.conv_constraint, # 'dense_constraint': self.dense_constraint, 'name': self.name, 'norm1': get_object_config(self.norm1), 'norm2': get_object_config(self.norm2), 'norm3': get_object_config(self.norm3), 'relu1': get_object_config(self.relu1), 'relu2': get_object_config(self.relu2), 'relu3': get_object_config(self.relu3), 'global_pool': get_object_config(self.global_pool), 'squeeze_dense1': get_object_config(self.squeeze_dense1), 'squeeze_dense2': get_object_config(self.squeeze_dense2), 'in_conv': get_object_config(self.in_conv), 'mid_conv': get_object_config(self.mid_conv), 'out_conv': get_object_config(self.out_conv)}) return config
51.8
163
0.620764
10,909
0.935993
0
0
0
0
0
0
1,845
0.158301
841b950a79e8d2aae01b030de733c8d1017b6718
3,649
py
Python
libs/token.py
yareally/twitter-clone-python
1323c3fa4bf66f479a3092c09fb165a323eb1c85
[ "MIT" ]
1
2020-05-22T22:13:48.000Z
2020-05-22T22:13:48.000Z
libs/token.py
yareally/twitter-clone-python
1323c3fa4bf66f479a3092c09fb165a323eb1c85
[ "MIT" ]
null
null
null
libs/token.py
yareally/twitter-clone-python
1323c3fa4bf66f479a3092c09fb165a323eb1c85
[ "MIT" ]
null
null
null
# coding=utf-8 from operator import xor import os import scrypt import time from libs.rediswrapper import UserHelper try: xrange except NameError: xrange = range class Token(object): """ @param user_id: @type user_id: @param password: @type password: """ __BLOCK_SIZE = 256 __TRANS_5C = "".join(chr(x ^ 0x5c) for x in xrange(256)) __TRANS_36 = "".join(chr(x ^ 0x36) for x in xrange(256)) __I_SALT = os.urandom(16).encode('base_64') __O_SALT = os.urandom(16).encode('base_64') def __init__(self, user_id, password=None): self.user_id = user_id # get or create some password to encrypt the user verification token self.password = password #if password else self.redis.get('token_pass') if not self.password: salt = os.urandom(16).encode('base_64') self.password = scrypt.hash(os.urandom(24).encode('base_64'), salt) def generate_token(self): """ Generates an encrypted token for validating a user @return: the encrypted token (a random value and the date as a timestamp @rtype: str """ # random value, user_id, timestamp values = '%s,%s,%s' % (os.urandom(16).encode('base_64'), self.user_id, time.time()) return scrypt.encrypt(values, self.password) def generate_hmac(self, key, message): """ @param key: The user's generated password @type key: str @param message: message to hash for client-server authentication @type message: str @return: the hash based message auth code (to verify against the client sent one) @rtype: str @see: http://en.wikipedia.org/wiki/Hash-based_message_authentication_code """ if len(key) > self.__BLOCK_SIZE: salt = os.urandom(16).encode('base_64') key = scrypt.hash(key, salt) key += chr(0) * (self.__BLOCK_SIZE - len(key)) o_key_pad = xor(self.__TRANS_5C, key) i_key_pad = xor(self.__TRANS_36, key) return scrypt.hash(o_key_pad + scrypt.hash(i_key_pad + message, self.__I_SALT), self.__O_SALT) def validate_token(self, client_token, server_token, expire_time=15): """ @param client_token: @type client_token: str @param server_token: @type server_token: str @param expire_time: @type expire_time: int @return: True if still valid @rtype: bool """ if client_token != server_token: return False tokens = scrypt.decrypt(client_token, self.password).split(',') if len(tokens) != 3: return False expired = ((time.time() - int(tokens[1])) / 3600) >= expire_time if expired: return False return True class RedisToken(Token): """ @param user_id: @type user_id: int @param redis_connection: @type redis_connection: StrictRedis @param password: @type password: str """ def __init__(self, user_id, redis_connection, password=None): """ @param user_id: @type user_id: int @param redis_connection @type redis_connection: StrictRedis @param password: @type password: str @return: @rtype: """ # get or create some password to encrypt the user verification token self.redis = UserHelper(redis_connection, user_id) self.password = password if password else self.redis.get('token_pass') super(RedisToken, self).__init__(user_id, password)
28.960317
102
0.609482
3,473
0.951768
0
0
0
0
0
0
1,649
0.451905
841dd327848fd2568a5c74230c7b659174fee507
2,961
py
Python
saefportal/datastores/util.py
harry-consulting/SAEF1
055d6e492ba76f90e3248b9da2985fdfe0c6b430
[ "BSD-2-Clause" ]
null
null
null
saefportal/datastores/util.py
harry-consulting/SAEF1
055d6e492ba76f90e3248b9da2985fdfe0c6b430
[ "BSD-2-Clause" ]
null
null
null
saefportal/datastores/util.py
harry-consulting/SAEF1
055d6e492ba76f90e3248b9da2985fdfe0c6b430
[ "BSD-2-Clause" ]
1
2020-12-16T15:02:52.000Z
2020-12-16T15:02:52.000Z
import json from collections import defaultdict import fastavro import pandas as pd from django.contrib import messages from django.http import HttpResponseRedirect from django.urls import reverse from datasets.models import Connection from users.models import User def get_supported_file_types(): """Return a list of the viable file type extensions.""" return ["csv", "avro", "parquet", "xlsx", "xls", "xlsm", "xlsb"] def initialize_connection(datastore, connection_name, connection_owner_id, connection_type, request): """Create a connection and save the datastore on the connection object for later use.""" owner = User.objects.get(id=connection_owner_id) connection = Connection.objects.create(name=connection_name, owner=owner, type=connection_type) connection.datastore = datastore connection.save() messages.success(request, "Connection was created.") return HttpResponseRedirect(reverse("datasets:index")) def get_query(dataset, query): """Go through the potentially None valued given dataset and query and extract the query.""" if query: return query elif dataset.query: return dataset.query else: return f"SELECT * FROM {dataset.table}" def structure_tables_views(table, views): """Return a structured dictionary containing the given tables and views.""" table_dict = defaultdict(list) [table_dict[schema].append({"value": f"{schema}.{table}", "display": table}) for (schema, table) in table] view_dict = defaultdict(list) [view_dict[schema].append({"value": f"{schema}.{view}", "display": view}) for (schema, view) in views] return {"Tables": dict(table_dict), "Views": dict(view_dict)} def convert_to_dataframe(file_type, data): """Convert the given bytes data into a dataframe based on the given file type.""" if file_type == "csv": df = pd.read_csv(data, sep=None) elif file_type == "avro": df = pd.DataFrame.from_records(fastavro.reader(data)) elif file_type == "parquet": df = pd.read_parquet(data) else: df = pd.read_excel(data) return df def get_viable_blob_datasets(blobs, name_attr): """ Used to get the viable datasets for blob datastores. Used for Google Cloud Storage, Azure Blob Storage, Azure Data Lake and Amazon S3 datastores. """ viable_blobs = [] for blob in blobs: if getattr(blob, name_attr).split(".")[-1].lower() in get_supported_file_types(): viable_blobs.append(blob) viable_datasets = defaultdict(list) for blob in viable_blobs: split_path = getattr(blob, name_attr).split("/") parent_folder = split_path[-2] if len(split_path) >= 2 else "root" value = json.dumps({"id": getattr(blob, name_attr), "name": split_path[-1].split(".")[0]}) viable_datasets[parent_folder].append({"value": value, "display": split_path[-1]}) return {"Files": dict(viable_datasets)}
34.835294
110
0.695373
0
0
0
0
0
0
0
0
823
0.277947
841fba8a3c7dd4e8b6e7d2a9101dcfe6a12ffb43
637
py
Python
count_div.py
odellus/year_of_code
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
[ "MIT" ]
1
2017-01-03T02:24:34.000Z
2017-01-03T02:24:34.000Z
count_div.py
odellus/year_of_code
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
[ "MIT" ]
null
null
null
count_div.py
odellus/year_of_code
bfa2b30893bcc12f46e73ac34c63b5b05b27af5f
[ "MIT" ]
null
null
null
#! /usr/bin/python def solution(A, B, K): res = 0 rem_A = A % K rem_B = B % K if rem_A == 0 and rem_B == 0: res = (B - A) / K + 1 elif rem_A == 0 and rem_B != 0: low_B = B - rem_B if low_B >= A: res = (low_B - A) / K + 1 else: res = 0 elif rem_A != 0 and rem_B != 0: low_A = A - rem_A low_B = B - rem_B if low_B >= A: res = (low_B - low_A) / K else: res = 0 elif rem_A != 0 and rem_B == 0: low_A = A - rem_A res = (B - low_A) / K if res < 1: res = 0 return res
21.233333
37
0.400314
0
0
0
0
0
0
0
0
18
0.028257
842064b9ee1d937a6d9bb100474bd7dafa3c5859
3,766
py
Python
applications/plugins/SofaPython/python/SofaPython/DAGValidation.py
sofa-framework/issofa
94855f488465bc3ed41223cbde987581dfca5389
[ "OML" ]
null
null
null
applications/plugins/SofaPython/python/SofaPython/DAGValidation.py
sofa-framework/issofa
94855f488465bc3ed41223cbde987581dfca5389
[ "OML" ]
null
null
null
applications/plugins/SofaPython/python/SofaPython/DAGValidation.py
sofa-framework/issofa
94855f488465bc3ed41223cbde987581dfca5389
[ "OML" ]
null
null
null
import sys import Sofa import Tools def MechanicalObjectVisitor(node): ## listing mechanical states, bottom-up from node ancestors = [] visited = [] for p in node.getParents(): path = p.getPathName() if not path in visited: state = p.getMechanicalState() if not state is None: ancestors.append( path+"/"+state.name ) ancestors += MechanicalObjectVisitor( p ) return ancestors class Visitor(object): ## checking that mapping graph is equivalent to node graph ## checking that independent dofs are not under other dofs in the scene graph def __init__(self): #print "DAGValidationVisitor" self.error = [] def treeTraversal(self): #print 'ValidationVisitor treeTraversal' return -1 # dag def processNodeTopDown(self,node): #print node.name state = node.getMechanicalState() if state is None: return True mapping = node.getMechanicalMapping() if mapping is None: #independent dofs ancestors = MechanicalObjectVisitor(node) if not len(ancestors) is 0: # an independent dof is under other dofs in the scene graph err = "ERROR " err += "mechanical state '"+state.getContext().getPathName()+"/"+state.name+"' is independent (no mapping)" err += " and should not be in the child node of other mechanical states ("+Tools.listToStr(ancestors)+")" self.error.append(err) else: # mapped dofs #print mapping.getName() from_dof = mapping.getFrom() parent_node = mapping.getContext().getParents() parent_node_path = [] for p in parent_node: parent_node_path.append( p.getPathName() ) from_node_path = [] for f in from_dof: from_node_path.append( f.getContext().getPathName() ) #print parent_node_path for f in from_node_path: #print f if not f in parent_node_path: err = "ERROR " err += "'"+mapping.getContext().getPathName()+"/"+mapping.name+"': " err += "'"+ f + "' should be a parent node" self.error.append(err) #print err for p in parent_node_path: #print p if not p in from_node_path: err = "ERROR " err += "'"+mapping.getContext().getPathName()+"/"+mapping.name+"': " err += "'"+p + "' should NOT be a parent node" self.error.append(err) #print err #print "===================" return True def processNodeBottomUp(self,node): return True def test( node, silent=False ): ## checking that mapping graph is equivalent to node graph ## checking that independent dofs are not under other dofs in the scene graph ## return a list of errors if not silent: print "" print "====== SofaPython.DAGValidation.test =======================" print "" print "Validating scene from node '/" + node.getPathName() + "'..." vis = Visitor() node.executeVisitor(vis) if not silent: if len(vis.error) is 0: print "... VALIDATED" else: print "... NOT VALID" print "" for e in vis.error: print e print "" print "==============================================================" sys.stdout.flush() return vis.error
30.128
123
0.521774
2,404
0.638343
0
0
0
0
0
0
1,073
0.284918
842128da3d89d5f7a471cc4a5a88b8952b188592
7,216
py
Python
models/DGIFullPipeline.py
nicolas-racchi/hpc2020-graphML
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
[ "Apache-2.0" ]
null
null
null
models/DGIFullPipeline.py
nicolas-racchi/hpc2020-graphML
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
[ "Apache-2.0" ]
null
null
null
models/DGIFullPipeline.py
nicolas-racchi/hpc2020-graphML
7f0d8b7c18469e1c793c7097bd10a9e0322e75be
[ "Apache-2.0" ]
null
null
null
import time import os import pandas as pd import numpy as np import matplotlib.pyplot as plt from sklearn.manifold import TSNE from sklearn.tree import DecisionTreeClassifier from sklearn.metrics import f1_score import stellargraph as sg from stellargraph.mapper import CorruptedGenerator, HinSAGENodeGenerator from stellargraph.layer import DeepGraphInfomax, HinSAGE import tensorflow as tf from tensorflow.keras.optimizers import Adam from tensorflow.keras.callbacks import EarlyStopping from tensorflow.keras import Model, optimizers, losses, metrics ''' Runs the entire pipeline: - Takes preprocessed data as input - Outputs predictions on the test_set nodes. ''' def DGIPipeline(v_sets, e_sets, v_data, e_data, core_targets, ext_targets, core_testing): print("HINSAGE DGI FULL PIPELINE STARTED") tin = time.time() #? Sort based on testingFlag # data_splits[i].iloc[INDEX].values[0] # where INDEX: # [0] testingFlag=NaN # [1] testingFlag=0 # [2] testingFlag=1 data_splits = dict() for i in v_sets: v_sets[i] = v_sets[i].sort_values('testingFlag') data_splits[i] = v_sets[i].testingFlag.value_counts().to_frame() v_sets[i] = v_sets[i].drop('testingFlag', axis=1) #? Removing ExtendedCaseGraphID for i in v_sets: v_sets[i] = v_sets[i].drop('ExtendedCaseGraphID', axis=1) #? Create the graph object G = sg.StellarDiGraph(v_sets, e_sets) ''' Iterate through the algotithm for every node type. This is because HinSAGE can predict on one node type at a time, even though it uses all the graph to compute the embeddings. ''' # Parameters batch_size = 200 dropout = 0.4 verbose = 1 visualize = False def run_for_node_type(v_type, hinsage_layer_sizes, num_samples, activations, epochs): nan_tflag = data_splits[v_type].iloc[0].values[0] train_tflag = data_splits[v_type].iloc[1].values[0] test_tflag = data_splits[v_type].iloc[2].values[0] train_cv_set = v_sets[v_type][nan_tflag:nan_tflag+train_tflag] train_cv_ids = train_cv_set.index.values.tolist() train_cv_labels = v_data.loc[[int(node_id) for node_id in train_cv_ids]].ExtendedCaseGraphID test_set = v_sets[v_type][-test_tflag:] test_ids = test_set.index.values.tolist() generator = HinSAGENodeGenerator( G, batch_size, num_samples, head_node_type=v_type ) hinsage = HinSAGE( layer_sizes=hinsage_layer_sizes, activations=activations, generator=generator, bias=True, normalize="l2", dropout=dropout ) def run_deep_graph_infomax(base_model, generator, epochs): print(f"Starting training for {v_type} type: ") t0 = time.time() corrupted_generator = CorruptedGenerator(generator) gen = corrupted_generator.flow(G.nodes(node_type=v_type)) infomax = DeepGraphInfomax(base_model, corrupted_generator) x_in, x_out = infomax.in_out_tensors() # Train with DGI model = Model(inputs=x_in, outputs=x_out) model.compile(loss=tf.nn.sigmoid_cross_entropy_with_logits, optimizer=Adam(lr=1e-3)) es = EarlyStopping(monitor="loss", min_delta=0, patience=10) history = model.fit(gen, epochs=epochs, verbose=verbose, callbacks=[es]) #sg.utils.plot_history(history) x_emb_in, x_emb_out = base_model.in_out_tensors() if generator.num_batch_dims() == 2: x_emb_out = tf.squeeze(x_emb_out, axis=0) t1 = time.time() print(f'Time required: {t1-t0:.2f} s ({(t1-t0)/60:.1f} min)') return x_emb_in, x_emb_out, model #? Train HinSAGE model: x_emb_in, x_emb_out, _model = run_deep_graph_infomax(hinsage, generator, epochs=epochs) emb_model = Model(inputs=x_emb_in, outputs=x_emb_out) train_cv_embs = emb_model.predict( generator.flow(train_cv_set.index.values) ) #? Optional: Plot embeddings of training and CV set of current node type if (visualize == True): train_cv_embs_2d = pd.DataFrame( TSNE(n_components=2).fit_transform(train_cv_embs), index=train_cv_set.index.values ) label_map = {l: i*10 for i, l in enumerate(np.unique(train_cv_labels), start=10) if pd.notna(l)} node_colours = [label_map[target] if pd.notna(target) else 0 for target in train_cv_labels] alpha = 0.7 fig, ax = plt.subplots(figsize=(15, 15)) ax.scatter( train_cv_embs_2d[0], train_cv_embs_2d[1], c=node_colours, cmap="jet", alpha=alpha, ) ax.set(aspect="equal") plt.title(f"TSNE of HinSAGE {v_type} embeddings with DGI- coloring on ExtendedCaseGraphID") plt.show() return 1 #? Split training and cross valuation set using 80% 20% simple ordered split n_embs = train_cv_embs.shape[0] train_size = int(n_embs*0.80) cv_size = int(n_embs*0.20) train_set = train_cv_embs[:train_size] train_labels = np.ravel(pd.DataFrame(train_cv_labels.values[:train_size]).fillna(0)) cv_set = train_cv_embs[-cv_size:] cv_labels = np.ravel(pd.DataFrame(train_cv_labels.values[-cv_size:]).fillna(0)) #? CLASSIFY print(f"Running Classifier for {v_type} type") classifier = DecisionTreeClassifier() classifier.fit( X=train_set, y=train_labels, ) cv_pred = classifier.predict(cv_set) f1_avg = f1_score(cv_labels, cv_pred, average='weighted') acc = (cv_pred == cv_labels).mean() print(f"{v_type} CV Metrics: f1: {f1_avg:.6f} - acc: {acc:.6f}") #? Now Run on test set test_embs = emb_model.predict( generator.flow(test_set.index.values) ) test_pred = classifier.predict(test_embs) #? Save predictions outdir = './output' outname = f"{v_type}_predictions.csv" if not os.path.exists(outdir): os.mkdir(outdir) fullname = os.path.join(outdir, outname) output = pd.DataFrame(test_ids) output = output.rename(columns={0: 'node_id'}) output['ExtendedCaseGraphID'] = test_pred output = output.set_index('node_id') output.to_csv(fullname) return output #? Run for each node type full_predictions = pd.DataFrame() for v_type in v_sets: if v_type == 'Account': epochs = 12 num_samples = [8, 4] hinsage_layer_sizes = [32, 32] activations = ['relu', 'relu'] else: epochs = 30 num_samples = [12] hinsage_layer_sizes = [72] activations = ['relu'] if v_type != 'External Entity' and v_type != 'Address': predictions = run_for_node_type(v_type, hinsage_layer_sizes, num_samples, activations, epochs) full_predictions = full_predictions.append(predictions) full_predictions.to_csv("./output/full_predictions.csv") tout = time.time() print(f"HINSAGE DGI FULL PIPELINE COMPLETED: {(tin-tout)/60:.0f} min") return 1
33.877934
106
0.651746
0
0
0
0
0
0
0
0
1,418
0.196508
84226726736f353bcbde4bab4581da03be81116f
878
py
Python
Newsfeed/Newsfeed/app.py
akshayseth7/Intership_Snapshot
e262ec4939e2e5c5e2037333b7fa37f7c57d5425
[ "MIT" ]
null
null
null
Newsfeed/Newsfeed/app.py
akshayseth7/Intership_Snapshot
e262ec4939e2e5c5e2037333b7fa37f7c57d5425
[ "MIT" ]
null
null
null
Newsfeed/Newsfeed/app.py
akshayseth7/Intership_Snapshot
e262ec4939e2e5c5e2037333b7fa37f7c57d5425
[ "MIT" ]
null
null
null
from flask import Flask , render_template, request import google_news app = Flask(__name__) outFile = '' @app.route("/") def main(): print "Welcome!" return render_template('index.html') @app.route('/uploadFile', methods=['POST']) def upload(): global outputFile filedata = request.files['upload'] filename = filedata.filename print 'filename:' + filename inputFile = 'input/' + filename outputFile = 'output/' + filename + '_output' outputPath = 'templates/' + outputFile filedata.save(inputFile) print "Input Saved" print "processing starts" google_news.news(inputFile,outputPath) print "processing success" #processing return "success" @app.route('/download') def download(): print 'download' print outputFile return render_template(outputFile) if __name__ == "__main__": app.run()
20.904762
50
0.67426
0
0
0
0
723
0.823462
0
0
206
0.234624
842586bea147f3e4d054e06882c5e5cefb545add
1,222
py
Python
physics_planning_games/mujoban/mujoban_level_test.py
mitchchristow/deepmind-research
49c7ebe6acc48dd276ca09eca6924ba6cb5ec3a3
[ "Apache-2.0" ]
10,110
2019-08-27T20:05:30.000Z
2022-03-31T16:31:56.000Z
physics_planning_games/mujoban/mujoban_level_test.py
ibex-training/deepmind-research
6f8ae40b2626b30f5f80dfc92f5676689eff5599
[ "Apache-2.0" ]
317
2019-11-09T10:19:10.000Z
2022-03-31T00:05:19.000Z
physics_planning_games/mujoban/mujoban_level_test.py
ibex-training/deepmind-research
6f8ae40b2626b30f5f80dfc92f5676689eff5599
[ "Apache-2.0" ]
2,170
2019-08-28T12:53:36.000Z
2022-03-31T13:15:11.000Z
# Copyright 2020 DeepMind Technologies Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Tests for mujoban_level.""" from absl.testing import absltest from physics_planning_games.mujoban import mujoban_level _LEVEL = """ ##### # @#### # $. # ###$.# # # $.# # # #$. # # ### ######""" _GRID_LEVEL = """******** *..P**** *..BG..* ***BG*.* *..BG*.* *.*BG..* *....*** ******** """ class MujobanLevelTest(absltest.TestCase): def test_ascii_to_text_grid_level(self): grid_level = mujoban_level._ascii_to_text_grid_level(_LEVEL) self.assertEqual(_GRID_LEVEL, grid_level) if __name__ == '__main__': absltest.main()
22.62963
78
0.636661
197
0.161211
0
0
0
0
0
0
847
0.693126
84272a9b78bd142a71da410927baa64f753039be
1,069
py
Python
TeamX/TeamXapp/migrations/0040_auto_20190712_1351.py
rootfinlay/SageTeamX
cf4cde3360c8cccb8a727ba64d66345805d7a0ed
[ "Unlicense" ]
null
null
null
TeamX/TeamXapp/migrations/0040_auto_20190712_1351.py
rootfinlay/SageTeamX
cf4cde3360c8cccb8a727ba64d66345805d7a0ed
[ "Unlicense" ]
null
null
null
TeamX/TeamXapp/migrations/0040_auto_20190712_1351.py
rootfinlay/SageTeamX
cf4cde3360c8cccb8a727ba64d66345805d7a0ed
[ "Unlicense" ]
null
null
null
# Generated by Django 2.2.3 on 2019-07-12 12:51 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('TeamXapp', '0039_auto_20190712_1348'), ] operations = [ migrations.AddField( model_name='leavecalendar', name='leave_type', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='TeamXapp.LeaveStatus'), ), migrations.AlterField( model_name='allmembers', name='scrum_team_name', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='TeamXapp.ScrumTeam', verbose_name='Scrum team: '), ), migrations.AlterField( model_name='allmembers', name='scrum_team_roles', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='TeamXapp.ScrumTeamRole', verbose_name='Scrum Team Roles: '), ), ]
35.633333
168
0.649205
943
0.882133
0
0
0
0
0
0
268
0.250702
842b2f72a08093681d688bb2f92eb0afb6f06354
742
py
Python
quartic_sdk/core/entities/site.py
divyquartic/QuarticSDK
e3ce6387ed5f27845e0909878c831ae39badd8eb
[ "MIT" ]
1
2021-03-26T12:39:44.000Z
2021-03-26T12:39:44.000Z
quartic_sdk/core/entities/site.py
divyquartic/QuarticSDK
e3ce6387ed5f27845e0909878c831ae39badd8eb
[ "MIT" ]
95
2021-02-18T03:15:38.000Z
2022-03-25T05:39:12.000Z
quartic_sdk/core/entities/site.py
divyquartic/QuarticSDK
e3ce6387ed5f27845e0909878c831ae39badd8eb
[ "MIT" ]
1
2021-09-03T12:46:18.000Z
2021-09-03T12:46:18.000Z
""" The given file contains the class to refer to the Site entity """ from quartic_sdk.core.entities.base import Base import quartic_sdk.utilities.constants as Constants class Site(Base): """ The given class refers to the site entity which is created based upon the site response returned by the API """ def __repr__(self): """ Override the method to return the site name """ return f"<{Constants.SITE_ENTITY}: {self.name}>" def assets(self): """ Get the assets belongs to a site """ raise NotImplementedError def edge_connectors(self): """ Get the edge_connectors belongs to a site """ raise NotImplementedError
23.935484
91
0.630728
569
0.766846
0
0
0
0
0
0
425
0.572776
842c3f72d982dddd1077d864f70783e67cb8182b
525
py
Python
newapp/migrations/0003_auto_20190524_1511.py
HCDigitalScholarship/liason_lair
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
[ "MIT" ]
null
null
null
newapp/migrations/0003_auto_20190524_1511.py
HCDigitalScholarship/liason_lair
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
[ "MIT" ]
null
null
null
newapp/migrations/0003_auto_20190524_1511.py
HCDigitalScholarship/liason_lair
6035d100e3ea1216af2907a4cccd319a1cc4f8d8
[ "MIT" ]
1
2019-08-03T01:30:30.000Z
2019-08-03T01:30:30.000Z
# Generated by Django 2.0.5 on 2019-05-24 15:11 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('newapp', '0002_auto_20190524_1507'), ] operations = [ migrations.AlterField( model_name='course', name='additional_info', field=models.TextField(), ), migrations.AlterField( model_name='course', name='misc_links', field=models.TextField(), ), ]
21.875
47
0.565714
432
0.822857
0
0
0
0
0
0
125
0.238095
842f1947d1778a3623e9a7a62865a578b298416e
2,027
py
Python
comment/views/blocker.py
Italo-Carvalho/Comment
86424d02a901b74ccbcaa438fffc38f352535301
[ "MIT" ]
75
2018-09-08T14:29:35.000Z
2022-03-25T16:17:06.000Z
comment/views/blocker.py
p0-oya/Comment
39f6fb6c40314d97391d36fc25112d6420c96991
[ "MIT" ]
165
2018-10-07T21:55:31.000Z
2022-02-27T14:44:32.000Z
comment/views/blocker.py
p0-oya/Comment
39f6fb6c40314d97391d36fc25112d6420c96991
[ "MIT" ]
37
2019-12-01T19:44:23.000Z
2022-02-13T16:46:14.000Z
from django.views import View from comment.models import BlockedUser, BlockedUserHistory, Comment from comment.mixins import CanBlockUsersMixin from comment.responses import UTF8JsonResponse, DABResponseData from comment.messages import BlockUserError class BaseToggleBlockingView(DABResponseData): response_class = None def get_response_class(self): assert self.response_class is not None, ( "'%s' should either include a `response_class` attribute, " "or override the `get_response_class()` method." % self.__class__.__name__ ) return self.response_class def post(self, request, *args, **kwargs): response_class = self.get_response_class() request_data = request.POST or getattr(request, 'data', {}) comment_id = request_data.get('comment_id', None) try: comment = Comment.objects.get(id=int(comment_id)) except (Comment.DoesNotExist, ValueError, TypeError): self.error = { 'detail': BlockUserError.INVALID } self.status = 400 return response_class(self.json(), status=self.status) blocked_user, created = BlockedUser.objects.get_or_create_blocked_user_for_comment(comment) if not created: blocked_user.blocked = not blocked_user.blocked blocked_user.save() reason = request_data.get('reason', None) if blocked_user.blocked and not reason: reason = comment.content BlockedUserHistory.objects.create_history( blocked_user=blocked_user, blocker=request.user, reason=reason ) self.data = { 'blocked_user': comment.get_username(), 'blocked': blocked_user.blocked, 'urlhash': comment.urlhash } return response_class(self.json()) class ToggleBlockingView(CanBlockUsersMixin, BaseToggleBlockingView, View): response_class = UTF8JsonResponse
34.948276
99
0.655649
1,768
0.872225
0
0
0
0
0
0
173
0.085348
843650dbb739a5fa34f00adc9a2ecc5b851d5e8f
49
py
Python
api_version_1.py
docktermj/python-future-proofing-apis
ae8ac783a14c6f9d4050ad2545c82f96fb990a5c
[ "Apache-2.0" ]
null
null
null
api_version_1.py
docktermj/python-future-proofing-apis
ae8ac783a14c6f9d4050ad2545c82f96fb990a5c
[ "Apache-2.0" ]
null
null
null
api_version_1.py
docktermj/python-future-proofing-apis
ae8ac783a14c6f9d4050ad2545c82f96fb990a5c
[ "Apache-2.0" ]
null
null
null
def stable_api(x): print("X: {0}".format(x))
16.333333
29
0.571429
0
0
0
0
0
0
0
0
8
0.163265
84377da9e8bef2666e66841f43d9581ba693e418
39,550
py
Python
wicon/glyph.py
Wudan07/wIcon
9189b7029759a22371827426b5342b6dc976f1b2
[ "MIT" ]
null
null
null
wicon/glyph.py
Wudan07/wIcon
9189b7029759a22371827426b5342b6dc976f1b2
[ "MIT" ]
null
null
null
wicon/glyph.py
Wudan07/wIcon
9189b7029759a22371827426b5342b6dc976f1b2
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2015 Brad Newbold (wudan07 [at] gmail.com) # See LICENSE for details. # glyph.py # """wIcon library: glyph provides GlyphObject """ ##from handy import * ##from common import * ### represents a character in a glyphString class GlyphObject: def __init__(self, glyph): ### set to glyph value self.glyph = glyph ### will be an array of pixels, unique to each glyph self.coords = [] ### will be an adjustment to the next characters starting point - I eyeballed this. Sorry typographers! self.flash = 6 if glyph == 'A': self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 3]) self.coords.append([1, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([2, 0]) self.coords.append([2, 1]) self.coords.append([2, 2]) self.coords.append([2, 6]) self.coords.append([3, 0]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([3, 6]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([5, 6]) self.coords.append([5, 7]) self.coords.append([5, 8]) self.flash = 7 elif glyph == 'a': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'B': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 4]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 4]) self.coords.append([3, 8]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.flash = 6 elif glyph == 'b': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 4]) self.coords.append([1, 8]) self.coords.append([2, 3]) self.coords.append([2, 8]) self.coords.append([3, 3]) self.coords.append([3, 8]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.flash = 6 elif glyph == 'C': self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([1, 1]) self.coords.append([1, 7]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'c': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'D': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([4, 1]) self.coords.append([4, 7]) self.coords.append([5, 2]) self.coords.append([5, 3]) self.coords.append([5, 4]) self.coords.append([5, 5]) self.coords.append([5, 6]) self.flash = 7 elif glyph == 'd': self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'E': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 4]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 4]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 4]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'e': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([1, 5]) self.coords.append([2, 5]) self.coords.append([3, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'F': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 4]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([3, 0]) self.coords.append([3, 4]) self.coords.append([4, 0]) self.coords.append([4, 4]) self.flash = 6 elif glyph == 'f': self.coords.append([2, 1]) self.coords.append([3, 1]) self.coords.append([1, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([1, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([1, 8]) self.flash = 5 elif glyph == 'G': self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([1, 1]) self.coords.append([1, 7]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 4]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'g': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.coords.append([4, 9]) self.coords.append([1, 10]) self.coords.append([2, 10]) self.coords.append([3, 10]) self.flash = 6 elif glyph == 'H': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 4]) self.coords.append([2, 4]) self.coords.append([3, 4]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'h': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([0, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'I': self.coords.append([0, 0]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 1]) self.coords.append([2, 2]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'i': self.coords.append([1, 1]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([1, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.flash = 4 elif glyph == 'J': self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.flash = 6 elif glyph == 'j': self.coords.append([2, 1]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([0, 9]) self.coords.append([1, 9]) self.flash = 4 elif glyph == 'K': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 4]) self.coords.append([2, 3]) self.coords.append([2, 5]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([3, 6]) self.coords.append([4, 0]) self.coords.append([4, 7]) self.coords.append([5, 8]) self.flash = 7 elif glyph == 'k': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([2, 4]) self.coords.append([0, 5]) self.coords.append([1, 5]) self.coords.append([0, 6]) self.coords.append([2, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([0, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'L': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'l': self.coords.append([1, 1]) self.coords.append([1, 2]) self.coords.append([1, 3]) self.coords.append([1, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.flash = 4 elif glyph == 'M': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 1]) self.coords.append([1, 2]) self.coords.append([1, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([3, 3]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'm': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([2, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([2, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([2, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([2, 7]) self.coords.append([4, 7]) self.coords.append([0, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'N': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 1]) self.coords.append([1, 2]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([3, 6]) self.coords.append([3, 7]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'n': self.coords.append([0, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([0, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'O': self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([1, 1]) self.coords.append([1, 7]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([4, 1]) self.coords.append([4, 7]) self.coords.append([5, 2]) self.coords.append([5, 3]) self.coords.append([5, 4]) self.coords.append([5, 5]) self.coords.append([5, 6]) self.flash = 7 elif glyph == 'o': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'P': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 5]) self.coords.append([2, 0]) self.coords.append([2, 5]) self.coords.append([3, 0]) self.coords.append([3, 4]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.flash = 6 elif glyph == 'p': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([0, 9]) self.coords.append([0, 10]) self.flash = 6 elif glyph == 'Q': self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([1, 1]) self.coords.append([1, 7]) self.coords.append([2, 0]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 8]) self.coords.append([3, 9]) self.coords.append([4, 1]) self.coords.append([4, 7]) self.coords.append([4, 10]) self.coords.append([5, 2]) self.coords.append([5, 3]) self.coords.append([5, 4]) self.coords.append([5, 5]) self.coords.append([5, 6]) self.coords.append([5, 10]) self.coords.append([6, 10]) self.flash = 7 elif glyph == 'q': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.coords.append([4, 9]) self.coords.append([4, 10]) self.flash = 6 elif glyph == 'R': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 4]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([3, 0]) self.coords.append([3, 3]) self.coords.append([3, 5]) self.coords.append([3, 6]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'r': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([1, 4]) self.coords.append([2, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.flash = 6 elif glyph == 'S': self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 4]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 5]) self.coords.append([3, 8]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.flash = 6 elif glyph == 's': self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([1, 5]) self.coords.append([2, 5]) self.coords.append([3, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'T': self.coords.append([0, 0]) self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([3, 3]) self.coords.append([3, 4]) self.coords.append([3, 5]) self.coords.append([3, 6]) self.coords.append([3, 7]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([5, 0]) self.coords.append([6, 0]) self.flash = 8 elif glyph == 't': self.coords.append([1, 1]) self.coords.append([1, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([1, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([4, 7]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'U': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.flash = 6 elif glyph == 'u': self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([3, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'V': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([1, 2]) self.coords.append([1, 3]) self.coords.append([1, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([3, 7]) self.coords.append([3, 8]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([5, 1]) self.coords.append([5, 2]) self.coords.append([5, 3]) self.coords.append([6, 0]) self.flash = 8 elif glyph == 'v': self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([1, 5]) self.coords.append([3, 5]) self.coords.append([1, 6]) self.coords.append([3, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.flash = 6 elif glyph == 'W': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 7]) self.coords.append([1, 8]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([3, 0]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([5, 5]) self.coords.append([5, 6]) self.coords.append([5, 7]) self.coords.append([5, 8]) self.coords.append([6, 0]) self.coords.append([6, 1]) self.coords.append([6, 2]) self.coords.append([6, 3]) self.coords.append([6, 4]) self.flash = 8 elif glyph == 'w': self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([2, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([2, 6]) self.coords.append([4, 6]) self.coords.append([1, 7]) self.coords.append([3, 7]) self.coords.append([1, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == 'X': self.coords.append([0, 0]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 1]) self.coords.append([1, 2]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([3, 1]) self.coords.append([3, 2]) self.coords.append([3, 5]) self.coords.append([3, 6]) self.coords.append([4, 0]) self.coords.append([4, 7]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'x': self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([1, 4]) self.coords.append([3, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([1, 7]) self.coords.append([3, 7]) self.coords.append([0, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'Y': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([1, 2]) self.coords.append([1, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([3, 2]) self.coords.append([3, 3]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.flash = 6 elif glyph == 'y': self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([1, 5]) self.coords.append([3, 5]) self.coords.append([1, 6]) self.coords.append([3, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([1, 9]) self.coords.append([0, 10]) self.flash = 6 elif glyph == 'Z': self.coords.append([0, 0]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 0]) self.coords.append([1, 5]) self.coords.append([1, 6]) self.coords.append([1, 8]) self.coords.append([2, 0]) self.coords.append([2, 4]) self.coords.append([2, 8]) self.coords.append([3, 0]) self.coords.append([3, 2]) self.coords.append([3, 3]) self.coords.append([3, 8]) self.coords.append([4, 0]) self.coords.append([4, 1]) self.coords.append([4, 8]) self.flash = 6 elif glyph == 'z': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([3, 4]) self.coords.append([2, 5]) self.coords.append([1, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == '0': self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([0, 1]) self.coords.append([4, 1]) self.coords.append([0, 2]) self.coords.append([4, 2]) self.coords.append([0, 3]) self.coords.append([3, 3]) self.coords.append([4, 3]) self.coords.append([0, 4]) self.coords.append([2, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([1, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '1': self.coords.append([2, 0]) self.coords.append([1, 1]) self.coords.append([2, 1]) self.coords.append([0, 2]) self.coords.append([2, 2]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == '2': self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([0, 1]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([3, 4]) self.coords.append([2, 5]) self.coords.append([1, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 8]) self.flash = 6 elif glyph == '3': self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([0, 1]) self.coords.append([4, 1]) self.coords.append([4, 2]) self.coords.append([4, 3]) self.coords.append([2, 4]) self.coords.append([3, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '4': self.coords.append([1, 0]) self.coords.append([3, 0]) self.coords.append([1, 1]) self.coords.append([3, 1]) self.coords.append([0, 2]) self.coords.append([3, 2]) self.coords.append([0, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([2, 4]) self.coords.append([3, 4]) self.coords.append([4, 4]) self.coords.append([3, 5]) self.coords.append([3, 6]) self.coords.append([3, 7]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '5': self.coords.append([0, 0]) self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([4, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([4, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '6': self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([1, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '7': self.coords.append([0, 0]) self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([4, 0]) self.coords.append([5, 0]) self.coords.append([0, 1]) self.coords.append([5, 1]) self.coords.append([5, 2]) self.coords.append([4, 3]) self.coords.append([4, 4]) self.coords.append([3, 5]) self.coords.append([3, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.flash = 7 elif glyph == '8': self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([0, 1]) self.coords.append([4, 1]) self.coords.append([0, 2]) self.coords.append([4, 2]) self.coords.append([1, 3]) self.coords.append([2, 3]) self.coords.append([3, 3]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([0, 5]) self.coords.append([4, 5]) self.coords.append([0, 6]) self.coords.append([4, 6]) self.coords.append([0, 7]) self.coords.append([4, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.flash = 6 elif glyph == '9': self.coords.append([1, 0]) self.coords.append([2, 0]) self.coords.append([3, 0]) self.coords.append([0, 1]) self.coords.append([4, 1]) self.coords.append([0, 2]) self.coords.append([4, 2]) self.coords.append([0, 3]) self.coords.append([4, 3]) self.coords.append([1, 4]) self.coords.append([2, 4]) self.coords.append([3, 4]) self.coords.append([4, 4]) self.coords.append([0, 4]) self.coords.append([4, 4]) self.coords.append([4, 5]) self.coords.append([4, 6]) self.coords.append([3, 7]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.flash = 6 elif glyph == '-': self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([2, 4]) self.coords.append([3, 4]) self.flash = 6 elif glyph == '.': self.coords.append([0, 7]) self.coords.append([1, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.flash = 4 elif glyph == '!': self.coords.append([0, 0]) self.coords.append([1, 0]) self.coords.append([0, 1]) self.coords.append([1, 1]) self.coords.append([0, 2]) self.coords.append([1, 2]) self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([0, 5]) self.coords.append([1, 5]) self.coords.append([0, 7]) self.coords.append([1, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.flash = 4 elif glyph == ',': self.coords.append([0, 7]) self.coords.append([1, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([1, 9]) self.coords.append([0, 10]) self.flash = 4 elif glyph == '\'': self.coords.append([0, 0]) self.coords.append([1, 0]) self.coords.append([0, 1]) self.coords.append([1, 1]) self.coords.append([1, 2]) self.flash = 4 elif glyph == '"': self.coords.append([0, 0]) self.coords.append([0, 1]) self.coords.append([0, 2]) self.coords.append([2, 0]) self.coords.append([2, 1]) self.coords.append([2, 2]) self.flash = 4 elif glyph == ' ': self.flash = 6 elif glyph == '\t': self.flash = 24 elif glyph == '(': self.coords.append([2, 0]) self.coords.append([1, 1]) self.coords.append([0, 2]) self.coords.append([0, 3]) self.coords.append([0, 4]) self.coords.append([0, 5]) self.coords.append([0, 6]) self.coords.append([0, 7]) self.coords.append([0, 8]) self.coords.append([1, 9]) self.coords.append([2, 10]) self.flash = 6 elif glyph == ')': self.coords.append([0, 0]) self.coords.append([1, 1]) self.coords.append([2, 2]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([1, 9]) self.coords.append([0, 10]) self.flash = 6 elif glyph == ')': self.coords.append([0, 0]) self.coords.append([1, 1]) self.coords.append([2, 2]) self.coords.append([2, 3]) self.coords.append([2, 4]) self.coords.append([2, 5]) self.coords.append([2, 6]) self.coords.append([2, 7]) self.coords.append([2, 8]) self.coords.append([1, 9]) self.coords.append([0, 10]) self.flash = 6 elif glyph == ':': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([0, 7]) self.coords.append([1, 7]) self.coords.append([0, 8]) self.coords.append([1, 8]) self.flash = 5 elif glyph == ';': self.coords.append([0, 3]) self.coords.append([1, 3]) self.coords.append([0, 4]) self.coords.append([1, 4]) self.coords.append([0, 7]) self.coords.append([1, 7]) self.coords.append([1, 8]) self.coords.append([0, 9]) self.flash = 5 elif glyph == '_': self.coords.append([0, 8]) self.coords.append([1, 8]) self.coords.append([2, 8]) self.coords.append([3, 8]) self.coords.append([4, 8]) self.coords.append([5, 8]) self.flash = 7 else: self.flash = 6 def center(self, wide=6): glwide = self.flash - 2 adjust = (wide-glwide)/2 for cor in self.coords: cor[0] += adjust self._flash(wide+2) def _flash(self, flash): self.flash = flash def glyphstr_length(gls): """ Returns length of glyphstr gls """ length = 0 for gl in gls: length += gl.flash return length - 2 def glyphstr_monospace(gls, wide=6): """ for each GlyphObject in gls, calls .center(wide) """ for gl in gls: gl.center(wide) def glyphstr_center(gls, width=100): """ given a width of an area (such as column heading width) it will adjust the start point of each glyph in a glyphstr_, centering the string """ length = glyphstr_length(gls) glen = len(gls) #addlen = (width-length)/(glen)) print length print width - length hl = (width-length)/2 for i in range(0, glen): gl = gls[i] flash = gl.flash gl._flash(flash+hl) def glyphstr_justify(gls, width=100): """ given a width of an area (such as column heading width) it will adjust the start point of each glyph in a glyphstr_, justifying the string """ length = glyphstr_length(gls) glen = len(gls) #addlen = (width-length)/(glen)) print length print width - length ct = 0 for i in range(0, width-length): if ct >= glen-1: ct = 0 gl = gls[ct] flash = gl.flash gl._flash(flash+1) ct += 1 def glyphstr_bounds_get(string, mono=False): """ Returns 2 len integer array, size and height of string as glyphstr_ """ #xk = 0 #yk = 0 xz = 0 #yz = 10 vals = string.split('\n') yz = len(vals) * 10 for val in vals: gs = glyphstr_get(val) if mono: glyphstr_monospace(gs) sz = glyphstr_length(gs) if sz > xz: xz = sz return [xz, yz] def glyphstr_get(string): """ given a string, Returns glyphs, a list of glyphs """ glyphs = [] i = 0 while i < len(string): letter = string[i:i+1] glyphs.append(GlyphObject(letter)) i += 1 return glyphs
26.759134
143
0.596207
37,587
0.950367
0
0
0
0
0
0
1,273
0.032187
8438a4e8ec614cde523653248e7af3039519099a
463
py
Python
mqtt_sender.py
kehtolaulu/iot-ccs811
611ca30ffaec067d730ac95c59b6800fda2cf148
[ "MIT" ]
null
null
null
mqtt_sender.py
kehtolaulu/iot-ccs811
611ca30ffaec067d730ac95c59b6800fda2cf148
[ "MIT" ]
null
null
null
mqtt_sender.py
kehtolaulu/iot-ccs811
611ca30ffaec067d730ac95c59b6800fda2cf148
[ "MIT" ]
null
null
null
import json from paho.mqtt.client import Client from subscriber import Subscriber from datetime import datetime class MqttSender(Subscriber): def __init__(self, client: Client, topic: str): self.client = client self.topic = topic def on_next(self, message: dict): json_message = json.dumps(message) print(f'[{datetime.now().isoformat()}] Sending: {json_message}') self.client.publish(self.topic, json_message)
25.722222
72
0.695464
347
0.74946
0
0
0
0
0
0
57
0.12311
8439225f8d80c110768afbd91dc3a48cb1f55f67
1,914
py
Python
users/migrations/0004_auto_20201228_1613.py
hhdMrLion/django-crm
9f6f021e0cddc323c88280b733144366a0cb9fa6
[ "Apache-2.0" ]
1
2021-06-18T03:03:43.000Z
2021-06-18T03:03:43.000Z
users/migrations/0004_auto_20201228_1613.py
hhdMrLion/django-crm
9f6f021e0cddc323c88280b733144366a0cb9fa6
[ "Apache-2.0" ]
null
null
null
users/migrations/0004_auto_20201228_1613.py
hhdMrLion/django-crm
9f6f021e0cddc323c88280b733144366a0cb9fa6
[ "Apache-2.0" ]
null
null
null
# Generated by Django 2.2.17 on 2020-12-28 08:13 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0003_auto_20201228_1605'), ] operations = [ migrations.CreateModel( name='Count', fields=[ ('user_id', models.IntegerField(primary_key=True, serialize=False, verbose_name='用户id')), ('name', models.CharField(blank=True, max_length=64, null=True, verbose_name='姓名')), ('day_customer', models.IntegerField(default=0, verbose_name='今天新增客户数量')), ('day_liaison', models.IntegerField(default=0, verbose_name='今天新增联系人数量')), ('day_record', models.IntegerField(default=0, verbose_name='今天新增拜访记录数量')), ('day_business', models.IntegerField(default=0, verbose_name='今天新增商机数量')), ('mouth_customer', models.IntegerField(default=0, verbose_name='本月新增客户数量')), ('mouth_liaison', models.IntegerField(default=0, verbose_name='本月新增联系人数量')), ('mouth_record', models.IntegerField(default=0, verbose_name='本月新增拜访记录数量')), ('mouth_business', models.IntegerField(default=0, verbose_name='本月新增商机数量')), ('all_customer', models.IntegerField(default=0, verbose_name='全部客户数量')), ('all_liaison', models.IntegerField(default=0, verbose_name='全部联系人数量')), ('all_record', models.IntegerField(default=0, verbose_name='全部拜访记录数量')), ('all_business', models.IntegerField(default=0, verbose_name='全部商机数量')), ], options={ 'verbose_name': '用户数据统计', 'verbose_name_plural': '用户数据统计', 'db_table': 'count', }, ), migrations.DeleteModel( name='UserCount', ), ]
46.682927
106
0.581505
2,040
0.953271
0
0
0
0
0
0
706
0.329907
843bb9c05ba6309f2f5fa04bc4ff12d51bd9395e
430
py
Python
pages/homepage.py
eugenexxx/laptop_docker
362ea238296e64fdd5c49ac55185d65b05e718cc
[ "Apache-2.0" ]
null
null
null
pages/homepage.py
eugenexxx/laptop_docker
362ea238296e64fdd5c49ac55185d65b05e718cc
[ "Apache-2.0" ]
null
null
null
pages/homepage.py
eugenexxx/laptop_docker
362ea238296e64fdd5c49ac55185d65b05e718cc
[ "Apache-2.0" ]
null
null
null
from webium import BasePage, Finds, Find from selenium.webdriver.common.by import By class Homepage(BasePage): catalog_header = Find(by=By.CLASS_NAME, value="Header__BlockCatalogLink") computers_label = Find(by=By.CSS_SELECTOR, value="a[href='/kompyutery/']") laptops_accessories_label = Find(by=By.XPATH, value="//a[contains(.,'Ноутбуки и аксессуары')]") laptops_label = Find(by=By.LINK_TEXT, value="Ноутбуки")
43
99
0.746512
369
0.80744
0
0
0
0
0
0
129
0.282276
843c2a9f5e722e97bca056334565acff3143bb58
3,112
py
Python
finetune/TensorFlow/download_model_and_dataset.py
cgouttham/microsoft-hackathon
7e50981e0f165543676504592ad26818db13432f
[ "MIT" ]
340
2019-05-15T06:42:37.000Z
2022-02-23T13:29:34.000Z
finetune/TensorFlow/download_model_and_dataset.py
cgouttham/microsoft-hackathon
7e50981e0f165543676504592ad26818db13432f
[ "MIT" ]
43
2019-05-14T21:26:06.000Z
2022-02-13T02:42:57.000Z
finetune/TensorFlow/download_model_and_dataset.py
cgouttham/microsoft-hackathon
7e50981e0f165543676504592ad26818db13432f
[ "MIT" ]
113
2019-05-23T08:21:48.000Z
2022-03-03T19:18:17.000Z
from __future__ import print_function import argparse import sys import os import shutil import zipfile import urllib parser = argparse.ArgumentParser() ## Required parameters parser.add_argument("--bert_model_name", default = None, type = str, required = True, help = "Name of pretrained BERT model. Possible values: " "uncased_L-12_H-768_A-12,uncased_L-24_H-1024_A-16,cased_L-12_H-768_A-12," "multilingual_L-12_H-768_A-12,chinese_L-12_H-768_A-12") parser.add_argument("--model_dump_path", default = None, type = str, required = True, help = "Path to the output model.") parser.add_argument("--glue_data_path", default = None, type = str, required = True, help = "Path to store downloaded GLUE dataset") args = parser.parse_args() bert_model_url_map = { 'uncased_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-12_H-768_A-12.zip', 'uncased_L-24_H-1024_A-16': 'https://storage.googleapis.com/bert_models/2018_10_18/uncased_L-24_H-1024_A-16.zip', 'cased_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_10_18/cased_L-12_H-768_A-12.zip', 'multilingual_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_11_03/multilingual_L-12_H-768_A-12.zip', 'chinese_L-12_H-768_A-12': 'https://storage.googleapis.com/bert_models/2018_11_03/chinese_L-12_H-768_A-12.zip' } if args.bert_model_name not in bert_model_url_map: sys.stderr.write('Unknown BERT model name ' + args.bert_model_name) sys.exit(1) pretrained_model_url = bert_model_url_map.get(args.bert_model_name) # make local directory for pretrained tensorflow BERT model tensorflow_model_dir = './tensorflow_model' if not os.path.exists(tensorflow_model_dir): os.makedirs(tensorflow_model_dir) # download and extract pretrained tensorflow BERT model download_file_name = 'tensorflow_model.zip' urllib.request.urlretrieve(pretrained_model_url, filename=download_file_name) print('Extracting pretrained model...') with zipfile.ZipFile(download_file_name, 'r') as z: z.extractall(tensorflow_model_dir) # make destination path if not os.path.exists(args.model_dump_path): os.makedirs(args.model_dump_path) files = ['bert_model.ckpt.meta', 'bert_model.ckpt.index', 'bert_model.ckpt.data-00000-of-00001', 'bert_config.json', 'vocab.txt'] for file in files: shutil.copy(os.path.join(tensorflow_model_dir, args.bert_model_name, file), os.path.join(args.model_dump_path, file)) print('Start to download GLUE dataset...\n') urllib.request.urlretrieve( 'https://gist.githubusercontent.com/W4ngatang/60c2bdb54d156a41194446737ce03e2e/raw/17b8dd0d724281ed7c3b2aeeda662b92809aadd5/download_glue_data.py', filename='download_glue_data.py') if os.system('python download_glue_data.py --data_dir {0} --tasks all'.format(args.glue_data_path)) != 0: sys.exit(1)
43.222222
151
0.70662
0
0
0
0
0
0
0
0
1,483
0.476542
843ca99856298b4d971576c36ef2ff0db2f48136
1,386
py
Python
pdata_app/migrations/0035_auto_20180221_1515.py
jonseddon/primavera-dmt
1239044e37f070b925a3d06db68351f285df780c
[ "BSD-3-Clause" ]
null
null
null
pdata_app/migrations/0035_auto_20180221_1515.py
jonseddon/primavera-dmt
1239044e37f070b925a3d06db68351f285df780c
[ "BSD-3-Clause" ]
49
2018-11-14T17:00:03.000Z
2021-12-20T11:04:22.000Z
pdata_app/migrations/0035_auto_20180221_1515.py
jonseddon/primavera-dmt
1239044e37f070b925a3d06db68351f285df780c
[ "BSD-3-Clause" ]
2
2018-07-04T10:58:43.000Z
2018-09-29T14:55:08.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2018-02-21 15:15 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('pdata_app', '0034_auto_20180221_1158'), ] operations = [ migrations.AddField( model_name='observationdataset', name='cached_directories', field=models.CharField(blank=True, max_length=200, null=True, verbose_name=b'Directory'), ), migrations.AddField( model_name='observationdataset', name='cached_end_time', field=models.DateTimeField(blank=True, null=True, verbose_name=b'End Time'), ), migrations.AddField( model_name='observationdataset', name='cached_num_files', field=models.IntegerField(blank=True, null=True, verbose_name=b'# Data Files'), ), migrations.AddField( model_name='observationdataset', name='cached_start_time', field=models.DateTimeField(blank=True, null=True, verbose_name=b'Start Time'), ), migrations.AddField( model_name='observationdataset', name='cached_variables', field=models.CharField(blank=True, max_length=500, null=True, verbose_name=b'Variables'), ), ]
33.804878
101
0.622655
1,230
0.887446
0
0
0
0
0
0
360
0.25974
843d9417ba37601232cb640d55f1d03f38cd7f76
3,226
py
Python
python/examples/imagenet/image_reader.py
gongweibao/Serving
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
[ "Apache-2.0" ]
null
null
null
python/examples/imagenet/image_reader.py
gongweibao/Serving
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
[ "Apache-2.0" ]
null
null
null
python/examples/imagenet/image_reader.py
gongweibao/Serving
d234a1421e8b964c5fa3e9901f57f24aa49e3a91
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cv2 import numpy as np class ImageReader(): def __init__(self): self.image_mean = [0.485, 0.456, 0.406] self.image_std = [0.229, 0.224, 0.225] self.image_shape = [3, 224, 224] self.resize_short_size = 256 self.interpolation = None def resize_short(self, img, target_size, interpolation=None): """resize image Args: img: image data target_size: resize short target size interpolation: interpolation mode Returns: resized image data """ percent = float(target_size) / min(img.shape[0], img.shape[1]) resized_width = int(round(img.shape[1] * percent)) resized_height = int(round(img.shape[0] * percent)) if interpolation: resized = cv2.resize( img, (resized_width, resized_height), interpolation=interpolation) else: resized = cv2.resize(img, (resized_width, resized_height)) return resized def crop_image(self, img, target_size, center): """crop image Args: img: images data target_size: crop target size center: crop mode Returns: img: cropped image data """ height, width = img.shape[:2] size = target_size if center == True: w_start = (width - size) // 2 h_start = (height - size) // 2 else: w_start = np.random.randint(0, width - size + 1) h_start = np.random.randint(0, height - size + 1) w_end = w_start + size h_end = h_start + size img = img[h_start:h_end, w_start:w_end, :] return img def process_image(self, sample): """ process_image """ mean = self.image_mean std = self.image_std crop_size = self.image_shape[1] data = np.fromstring(sample, np.uint8) img = cv2.imdecode(data, cv2.IMREAD_COLOR) if img is None: print("img is None, pass it.") return None if crop_size > 0: target_size = self.resize_short_size img = self.resize_short( img, target_size, interpolation=self.interpolation) img = self.crop_image(img, target_size=crop_size, center=True) img = img[:, :, ::-1] img = img.astype('float32').transpose((2, 0, 1)) / 255 img_mean = np.array(mean).reshape((3, 1, 1)) img_std = np.array(std).reshape((3, 1, 1)) img -= img_mean img /= img_std return img
32.26
74
0.591135
2,582
0.800372
0
0
0
0
0
0
1,060
0.32858
843f97dd8ec994e4357ed02f96f7842db3d9a402
5,867
py
Python
cloudflare-deploy.py
antonini/certbot-hooks
61e200b7a038952f2f559953f47be62e1f992e39
[ "Apache-2.0" ]
null
null
null
cloudflare-deploy.py
antonini/certbot-hooks
61e200b7a038952f2f559953f47be62e1f992e39
[ "Apache-2.0" ]
null
null
null
cloudflare-deploy.py
antonini/certbot-hooks
61e200b7a038952f2f559953f47be62e1f992e39
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python import logging import sys import CloudFlare import os import re from os import path from certbot.plugins import dns_common __author__ = "Endrigo Antonini" __copyright__ = "Copyright 2020, Endrigo Antonini" __license__ = "Apache License 2.0" __version__ = "1.0" __maintainer__ = "Endrigo Antonini" __email__ = "eantonini@eidoscode.com" __status__ = "Production" logger = logging.getLogger(__name__) DEFAULT_CERT_FOLDER = "/etc/letsencrypt/live" CERTBOT_CONF_DIR = "/etc/letsencrypt/renewal" PROPERTIES = {} def read_file(filename): """ Read a file from disk and return all the content :param str filename: File name of the file that is going to read. :raises Exception: if the file doesn't exists """ if not path.isfile(filename): raise Exception("File {} doesn't exists!".format(filename)) with open(filename) as f: return f.read() def read_certificate(filename): return re.sub('\r?\n', '\\n', read_file(filename)) def read_properties_file(file): myvars = {} if not path.isfile(file): raise Exception("Config file {} doesn't exists!".format(file)) with open(file) as myfile: for line in myfile: name, var = line.partition("=")[::2] myvars[name.strip()] = var.strip() return myvars def read_domain_properties(domain): global PROPERTIES if domain in PROPERTIES: return PROPERTIES[domain] config_file="{}/{}.conf".format(CERTBOT_CONF_DIR, domain) myvars = read_properties_file(config_file) PROPERTIES[domain] = myvars return myvars def connect_cloudflare(domain): print("Connection to Cloudflare of domain {}".format(domain)) properties = read_domain_properties(domain) cred_file = None if not "dns_cloudflare_credentials" in properties: raise Exception("File {} doesn't have property dns_cloudflare_api_token on it.".format(cred_file)) cred_file = properties["dns_cloudflare_credentials"] props = read_properties_file(cred_file) if not "dns_cloudflare_api_token" in props: raise Exception("File {} doesn't have property dns_cloudflare_api_token on it.".format(cred_file)) api_key = props["dns_cloudflare_api_token"] return CloudFlare.CloudFlare(token=api_key) def find_zone_id(cf, domain): zone_name_guesses = dns_common.base_domain_name_guesses(domain) zones = [] # type: List[Dict[str, Any]] code = msg = None for zone_name in zone_name_guesses: params = {'name': zone_name, 'per_page': 1} try: zones = cf.zones.get(params=params) # zones | pylint: disable=no-member except CloudFlare.exceptions.CloudFlareAPIError as e: code = int(e) msg = str(e) hint = None if code == 6003: hint = ('Did you copy your entire API token/key? To use Cloudflare tokens, ' 'you\'ll need the python package cloudflare>=2.3.1.{}' .format(' This certbot is running cloudflare ' + str(CloudFlare.__version__) if hasattr(CloudFlare, '__version__') else '')) elif code == 9103: hint = 'Did you enter the correct email address and Global key?' elif code == 9109: hint = 'Did you enter a valid Cloudflare Token?' if hint: raise Exception('Error determining zone_id: {0} {1}. Please confirm ' 'that you have supplied valid Cloudflare API credentials. ({2})' .format(code, msg, hint)) else: logger.debug('Unrecognised CloudFlareAPIError while finding zone_id: %d %s. ' 'Continuing with next zone guess...', e, e) if zones: zone_id = zones[0]['id'] logger.debug('Found zone_id of %s for %s using name %s', zone_id, domain, zone_name) return zone_id raise Exception('Unable to determine zone_id for {0} using zone names: {1}. ' 'Please confirm that the domain name has been entered correctly ' 'and is already associated with the supplied Cloudflare account.{2}' .format(domain, domain, ' The error from Cloudflare was:' ' {0} {1}'.format(code, msg) if code is not None else '')) def upload_certificate(domain): cf = connect_cloudflare(domain) private_key = read_certificate("{}/{}/privkey.pem".format(DEFAULT_CERT_FOLDER, domain)) fullchain = read_certificate("{}/{}/fullchain.pem".format(DEFAULT_CERT_FOLDER, domain)) zone_id = find_zone_id(cf, domain) logger.debug("Cloudflare Zone id {} of domain {} ".format(zone_id, domain)) data = {'certificate': fullchain, 'private_key': private_key, 'bundle_method': 'ubiquitous'} print("Going to deploy certificate.") try: cf.zones.custom_certificates.post(zone_id, data=data) print("Depoyed.") except CloudFlare.exceptions.CloudFlareAPIError as e: code = int(e) msg = str(e) hint = None if code == 1228: print("Cert already deployed.") else: logger.error(code) logger.error(msg) raise e return def main(): domains_str = os.environ['RENEWED_DOMAINS'] domains_lst = domains_str.split() for domain in domains_lst: print("") print("Start domain {} checking".format(domain)) zone_name_guesses = dns_common.base_domain_name_guesses(domain) zone_domain = None for temp_zone_domain in zone_name_guesses: temp_config_file = "{}/{}.conf".format(CERTBOT_CONF_DIR, temp_zone_domain) logger.debug("Checking zone {} -- {}".format(temp_zone_domain, temp_config_file)) if path.isfile(temp_config_file): zone_domain = temp_zone_domain break if zone_domain is None: raise Exception("It wasn't possible to continue. There is no config file for domain {}.".format(domain)) upload_certificate(zone_domain) if __name__ == '__main__': main()
32.236264
110
0.670701
0
0
0
0
0
0
0
0
1,947
0.331856
84416b0aa44ff310962bcf2724c753d72fba9519
476
py
Python
main/schemas/location_lat.py
ohioh/ohioh_Framework_Cluster_3_Flask
69e50b9d697b5e8818305328335d26314b625732
[ "Apache-2.0" ]
1
2020-08-11T18:37:36.000Z
2020-08-11T18:37:36.000Z
main/schemas/location_lat.py
ohioh/ohioh_Framework_Cluster_3_Flask
69e50b9d697b5e8818305328335d26314b625732
[ "Apache-2.0" ]
null
null
null
main/schemas/location_lat.py
ohioh/ohioh_Framework_Cluster_3_Flask
69e50b9d697b5e8818305328335d26314b625732
[ "Apache-2.0" ]
null
null
null
from datetime import datetime from marshmallow import Schema, EXCLUDE import marshmallow.fields as ms_fields class LocationLatSchema(Schema): user_id = ms_fields.Str(required=True) user_timestamp = ms_fields.DateTime(default=datetime.now()) location_id = ms_fields.Str(default="") latitude = ms_fields.Float(default=0.0) departure = ms_fields.Bool(default=False) accuracy = ms_fields.Float(default=0.0) class Meta: unknown = EXCLUDE
23.8
63
0.737395
360
0.756303
0
0
0
0
0
0
2
0.004202
84418df14873be48f72ce565d6b9bb740aefa623
411
py
Python
Python/34-match.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
1
2022-03-16T23:25:54.000Z
2022-03-16T23:25:54.000Z
Python/34-match.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
null
null
null
Python/34-match.py
strawman2511/Learning
21ee7bdad376060503fdc0a739fed2d7bd40f9b9
[ "MIT" ]
null
null
null
# Till now only Python 3.10 can run match statement def check_point(point): match point: case (0, 0): print("Origin") case (0, y): print(f"Y = {y}") case (x, 0) print(f"X = {x}") case (x, y): print(f"X = {x}, Y = {y}") case _: raise ValueError("Not a point") x = 1 y = 2 point = (x, y) check_point(point)
20.55
51
0.452555
0
0
0
0
0
0
0
0
111
0.270073
8441be7fed412cc2b0c06a54eaceebee4908fef7
272
py
Python
incremental/settings.py
Nana0606/IUAD
c52439eb5bbbef6bd50533b5d9e142e18091d85e
[ "BSD-2-Clause" ]
1
2021-07-05T02:20:32.000Z
2021-07-05T02:20:32.000Z
incremental/settings.py
Nana0606/IUAD
c52439eb5bbbef6bd50533b5d9e142e18091d85e
[ "BSD-2-Clause" ]
null
null
null
incremental/settings.py
Nana0606/IUAD
c52439eb5bbbef6bd50533b5d9e142e18091d85e
[ "BSD-2-Clause" ]
1
2021-08-22T08:45:18.000Z
2021-08-22T08:45:18.000Z
# python3 # -*- coding: utf-8 -*- # @Author : lina # @Time : 2018/4/22 21:17 """ code function: define all parameters. """ matched_file_name = "../data/gcn_res.txt" wordvec_path = '../data/word2vec.model' incremental_path = "../data/incremental_res.txt"
20.923077
49
0.628676
0
0
0
0
0
0
0
0
202
0.742647
84425e6e37d98a459d555c6b47a64806ebbb0769
246
py
Python
app/newsletter/views.py
valeriansaliou/waaave-web
8a0cde773563865a905af38f5a0b723a43b17341
[ "RSA-MD" ]
1
2020-04-06T10:04:43.000Z
2020-04-06T10:04:43.000Z
app/newsletter/views.py
valeriansaliou/waaave-web
8a0cde773563865a905af38f5a0b723a43b17341
[ "RSA-MD" ]
null
null
null
app/newsletter/views.py
valeriansaliou/waaave-web
8a0cde773563865a905af38f5a0b723a43b17341
[ "RSA-MD" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse def root(request): """ Newsletter > Root """ return render(request, 'newsletter/newsletter_root.jade')
24.6
61
0.756098
0
0
0
0
0
0
0
0
66
0.268293
84464ba3de7de8074ab4f3a72392eb3da290f401
16,826
py
Python
transformer_courses/BERT_distillation/PaddleSlim-develop/paddleslim/nas/search_space/mobilenet_block.py
wwhio/awesome-DeepLearning
2cc92edcf0c22bdfc670c537cc819c8fadf33fac
[ "Apache-2.0" ]
1,150
2021-06-01T03:44:21.000Z
2022-03-31T13:43:42.000Z
transformer_courses/BERT_distillation/PaddleSlim-develop/paddleslim/nas/search_space/mobilenet_block.py
wwhio/awesome-DeepLearning
2cc92edcf0c22bdfc670c537cc819c8fadf33fac
[ "Apache-2.0" ]
358
2021-06-01T03:58:47.000Z
2022-03-28T02:55:00.000Z
transformer_courses/BERT_distillation/PaddleSlim-develop/paddleslim/nas/search_space/mobilenet_block.py
wwhio/awesome-DeepLearning
2cc92edcf0c22bdfc670c537cc819c8fadf33fac
[ "Apache-2.0" ]
502
2021-05-31T12:52:14.000Z
2022-03-31T02:51:41.000Z
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License" # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import paddle.fluid as fluid from paddle.fluid.param_attr import ParamAttr from .search_space_base import SearchSpaceBase from .base_layer import conv_bn_layer from .search_space_registry import SEARCHSPACE from .utils import compute_downsample_num, check_points, get_random_tokens __all__ = ["MobileNetV1BlockSpace", "MobileNetV2BlockSpace"] @SEARCHSPACE.register class MobileNetV2BlockSpace(SearchSpaceBase): def __init__(self, input_size, output_size, block_num, block_mask=None, scale=1.0): super(MobileNetV2BlockSpace, self).__init__(input_size, output_size, block_num, block_mask) if self.block_mask == None: # use input_size and output_size to compute self.downsample_num self.downsample_num = compute_downsample_num(self.input_size, self.output_size) if self.block_num != None: assert self.downsample_num <= self.block_num, 'downsample numeber must be LESS THAN OR EQUAL TO block_num, but NOW: downsample numeber is {}, block_num is {}'.format( self.downsample_num, self.block_num) # self.filter_num means channel number self.filter_num = np.array([ 3, 4, 8, 12, 16, 24, 32, 48, 64, 80, 96, 128, 144, 160, 192, 224, 256, 320, 384, 512 ]) # 20 # self.k_size means kernel size self.k_size = np.array([3, 5]) #2 # self.multiply means expansion_factor of each _inverted_residual_unit self.multiply = np.array([1, 2, 3, 4, 5, 6]) #6 # self.repeat means repeat_num _inverted_residual_unit in each _invresi_blocks self.repeat = np.array([1, 2, 3, 4, 5, 6]) #6 self.scale = scale def init_tokens(self): return get_random_tokens(self.range_table()) def range_table(self): range_table_base = [] if self.block_mask != None: range_table_length = len(self.block_mask) else: range_table_length = self.block_num for i in range(range_table_length): range_table_base.append(len(self.multiply)) range_table_base.append(len(self.filter_num)) range_table_base.append(len(self.repeat)) range_table_base.append(len(self.k_size)) return range_table_base def token2arch(self, tokens=None): """ return mobilenetv2 net_arch function """ if tokens == None: tokens = self.init_tokens() self.bottleneck_params_list = [] if self.block_mask != None: for i in range(len(self.block_mask)): self.bottleneck_params_list.append( (self.multiply[tokens[i * 4]], self.filter_num[tokens[i * 4 + 1]], self.repeat[tokens[i * 4 + 2]], 2 if self.block_mask[i] == 1 else 1, self.k_size[tokens[i * 4 + 3]])) else: repeat_num = int(self.block_num / self.downsample_num) num_minus = self.block_num % self.downsample_num ### if block_num > downsample_num, add stride=1 block at last (block_num-downsample_num) layers for i in range(self.downsample_num): self.bottleneck_params_list.append( (self.multiply[tokens[i * 4]], self.filter_num[tokens[i * 4 + 1]], self.repeat[tokens[i * 4 + 2]], 2, self.k_size[tokens[i * 4 + 3]])) ### if block_num / downsample_num > 1, add (block_num / downsample_num) times stride=1 block for k in range(repeat_num - 1): kk = k * self.downsample_num + i self.bottleneck_params_list.append( (self.multiply[tokens[kk * 4]], self.filter_num[tokens[kk * 4 + 1]], self.repeat[tokens[kk * 4 + 2]], 1, self.k_size[tokens[kk * 4 + 3]])) if self.downsample_num - i <= num_minus: j = self.downsample_num * (repeat_num - 1) + i self.bottleneck_params_list.append( (self.multiply[tokens[j * 4]], self.filter_num[tokens[j * 4 + 1]], self.repeat[tokens[j * 4 + 2]], 1, self.k_size[tokens[j * 4 + 3]])) if self.downsample_num == 0 and self.block_num != 0: for i in range(len(self.block_num)): self.bottleneck_params_list.append( (self.multiply[tokens[i * 4]], self.filter_num[tokens[i * 4 + 1]], self.repeat[tokens[i * 4 + 2]], 1, self.k_size[tokens[i * 4 + 3]])) def net_arch(input, return_mid_layer=False, return_block=None): # all padding is 'SAME' in the conv2d, can compute the actual padding automatic. # bottleneck sequences in_c = int(32 * self.scale) mid_layer = dict() layer_count = 0 depthwise_conv = None for i, layer_setting in enumerate(self.bottleneck_params_list): t, c, n, s, k = layer_setting if s == 2: layer_count += 1 if check_points((layer_count - 1), return_block): mid_layer[layer_count - 1] = depthwise_conv input, depthwise_conv = self._invresi_blocks( input=input, in_c=in_c, t=t, c=int(c * self.scale), n=n, s=s, k=int(k), name='mobilenetv2_' + str(i + 1)) in_c = int(c * self.scale) if check_points(layer_count, return_block): mid_layer[layer_count] = depthwise_conv if return_mid_layer: return input, mid_layer else: return input, return net_arch def _shortcut(self, input, data_residual): """Build shortcut layer. Args: input(Variable): input. data_residual(Variable): residual layer. Returns: Variable, layer output. """ return fluid.layers.elementwise_add(input, data_residual) def _inverted_residual_unit(self, input, num_in_filter, num_filters, ifshortcut, stride, filter_size, expansion_factor, reduction_ratio=4, name=None): """Build inverted residual unit. Args: input(Variable), input. num_in_filter(int), number of in filters. num_filters(int), number of filters. ifshortcut(bool), whether using shortcut. stride(int), stride. filter_size(int), filter size. padding(str|int|list), padding. expansion_factor(float), expansion factor. name(str), name. Returns: Variable, layers output. """ num_expfilter = int(round(num_in_filter * expansion_factor)) channel_expand = conv_bn_layer( input=input, num_filters=num_expfilter, filter_size=1, stride=1, padding='SAME', num_groups=1, act='relu6', name=name + '_expand') bottleneck_conv = conv_bn_layer( input=channel_expand, num_filters=num_expfilter, filter_size=filter_size, stride=stride, padding='SAME', num_groups=num_expfilter, act='relu6', name=name + '_dwise', use_cudnn=False) depthwise_output = bottleneck_conv linear_out = conv_bn_layer( input=bottleneck_conv, num_filters=num_filters, filter_size=1, stride=1, padding='SAME', num_groups=1, act=None, name=name + '_linear') out = linear_out if ifshortcut: out = self._shortcut(input=input, data_residual=out) return out, depthwise_output def _invresi_blocks(self, input, in_c, t, c, n, s, k, name=None): """Build inverted residual blocks. Args: input: Variable, input. in_c: int, number of in filters. t: float, expansion factor. c: int, number of filters. n: int, number of layers. s: int, stride. k: int, filter size. name: str, name. Returns: Variable, layers output. """ first_block, depthwise_output = self._inverted_residual_unit( input=input, num_in_filter=in_c, num_filters=c, ifshortcut=False, stride=s, filter_size=k, expansion_factor=t, name=name + '_1') last_residual_block = first_block last_c = c for i in range(1, n): last_residual_block, depthwise_output = self._inverted_residual_unit( input=last_residual_block, num_in_filter=last_c, num_filters=c, ifshortcut=True, stride=1, filter_size=k, expansion_factor=t, name=name + '_' + str(i + 1)) return last_residual_block, depthwise_output @SEARCHSPACE.register class MobileNetV1BlockSpace(SearchSpaceBase): def __init__(self, input_size, output_size, block_num, block_mask=None, scale=1.0): super(MobileNetV1BlockSpace, self).__init__(input_size, output_size, block_num, block_mask) if self.block_mask == None: # use input_size and output_size to compute self.downsample_num self.downsample_num = compute_downsample_num(self.input_size, self.output_size) if self.block_num != None: assert self.downsample_num <= self.block_num, 'downsample numeber must be LESS THAN OR EQUAL TO block_num, but NOW: downsample numeber is {}, block_num is {}'.format( self.downsample_num, self.block_num) # self.filter_num means channel number self.filter_num = np.array([ 3, 4, 8, 12, 16, 24, 32, 48, 64, 80, 96, 128, 144, 160, 192, 224, 256, 320, 384, 512, 576, 640, 768, 1024, 1048 ]) self.k_size = np.array([3, 5]) self.scale = scale def init_tokens(self): return get_random_tokens(self.range_table()) def range_table(self): range_table_base = [] if self.block_mask != None: for i in range(len(self.block_mask)): range_table_base.append(len(self.filter_num)) range_table_base.append(len(self.filter_num)) range_table_base.append(len(self.k_size)) else: for i in range(self.block_num): range_table_base.append(len(self.filter_num)) range_table_base.append(len(self.filter_num)) range_table_base.append(len(self.k_size)) return range_table_base def token2arch(self, tokens=None): if tokens == None: tokens = self.init_tokens() self.bottleneck_params_list = [] if self.block_mask != None: for i in range(len(self.block_mask)): self.bottleneck_params_list.append( (self.filter_num[tokens[i * 3]], self.filter_num[tokens[i * 3 + 1]], 2 if self.block_mask[i] == 1 else 1, self.k_size[tokens[i * 3 + 2]])) else: repeat_num = int(self.block_num / self.downsample_num) num_minus = self.block_num % self.downsample_num for i in range(self.downsample_num): ### if block_num > downsample_num, add stride=1 block at last (block_num-downsample_num) layers self.bottleneck_params_list.append( (self.filter_num[tokens[i * 3]], self.filter_num[tokens[i * 3 + 1]], 2, self.k_size[tokens[i * 3 + 2]])) ### if block_num / downsample_num > 1, add (block_num / downsample_num) times stride=1 block for k in range(repeat_num - 1): kk = k * self.downsample_num + i self.bottleneck_params_list.append( (self.filter_num[tokens[kk * 3]], self.filter_num[tokens[kk * 3 + 1]], 1, self.k_size[tokens[kk * 3 + 2]])) if self.downsample_num - i <= num_minus: j = self.downsample_num * (repeat_num - 1) + i self.bottleneck_params_list.append( (self.filter_num[tokens[j * 3]], self.filter_num[tokens[j * 3 + 1]], 1, self.k_size[tokens[j * 3 + 2]])) if self.downsample_num == 0 and self.block_num != 0: for i in range(len(self.block_num)): self.bottleneck_params_list.append( (self.filter_num[tokens[i * 3]], self.filter_num[tokens[i * 3 + 1]], 1, self.k_size[tokens[i * 3 + 2]])) def net_arch(input, return_mid_layer=False, return_block=None): mid_layer = dict() layer_count = 0 for i, layer_setting in enumerate(self.bottleneck_params_list): filter_num1, filter_num2, stride, kernel_size = layer_setting if stride == 2: layer_count += 1 if check_points((layer_count - 1), return_block): mid_layer[layer_count - 1] = input input = self._depthwise_separable( input=input, num_filters1=filter_num1, num_filters2=filter_num2, stride=stride, scale=self.scale, kernel_size=int(kernel_size), name='mobilenetv1_{}'.format(str(i + 1))) if return_mid_layer: return input, mid_layer else: return input, return net_arch def _depthwise_separable(self, input, num_filters1, num_filters2, stride, scale, kernel_size, name=None): num_groups = input.shape[1] s_oc = int(num_filters1 * scale) if s_oc > num_groups: output_channel = s_oc - (s_oc % num_groups) else: output_channel = num_groups depthwise_conv = conv_bn_layer( input=input, filter_size=kernel_size, num_filters=output_channel, stride=stride, num_groups=num_groups, use_cudnn=False, name=name + '_dw') pointwise_conv = conv_bn_layer( input=depthwise_conv, filter_size=1, num_filters=int(num_filters2 * scale), stride=1, name=name + '_sep') return pointwise_conv
39.130233
178
0.530013
15,693
0.932664
0
0
15,737
0.935279
0
0
3,005
0.178593
8446c1c3c431374432b1f4f4b191c7dc6650169d
1,580
py
Python
src/cltk/phonology/gmh/phonology.py
yelircaasi/cltk
1583aa24682543a1f33434a21918f039ca27d60c
[ "MIT" ]
757
2015-11-20T00:58:52.000Z
2022-03-31T06:34:24.000Z
src/cltk/phonology/gmh/phonology.py
yelircaasi/cltk
1583aa24682543a1f33434a21918f039ca27d60c
[ "MIT" ]
950
2015-11-17T05:38:29.000Z
2022-03-14T16:09:34.000Z
src/cltk/phonology/gmh/phonology.py
yelircaasi/cltk
1583aa24682543a1f33434a21918f039ca27d60c
[ "MIT" ]
482
2015-11-22T18:13:02.000Z
2022-03-20T21:22:02.000Z
"""Middle High German phonology tools """ from typing import List from cltk.phonology.gmh.transcription import Transcriber from cltk.phonology.syllabify import Syllabifier __author__ = ["Clément Besnier <clem@clementbesnier.fr>"] class MiddleHighGermanTranscription: """ Middle High German Transcriber """ def __init__(self): self.transcriber = Transcriber() def transcribe(self, word): """ >>> MiddleHighGermanTranscription().transcribe("Brynhild") 'Brynχɪld̥' :param word: word to transcribe :return: transcribed word """ return self.transcriber.transcribe(word, with_squared_brackets=False) def __repr__(self): return f"<MiddleHighGermanTranscription>" def __call__(self, word): return self.transcribe(word) class MiddleHighGermanSyllabifier: """ Middle High German syllabifier based on sonority phoneme hierarchy for MHG. Source: Resonances in Middle High German: New Methodologies in Prosody, Christopher Leo Hench, 2017 """ def __init__(self): self.syllabifier = Syllabifier(language="gmh") def syllabify(self, word: str) -> List[str]: """ >>> MiddleHighGermanSyllabifier().syllabify("Gunther") ['Gunt', 'her'] :param word: word to syllabify :return: syllabified word """ return self.syllabifier.syllabify(word, mode="MOP") def __repr__(self): return f"<MiddleHighGermanSyllabifier>" def __call__(self, word): return self.syllabify(word)
26.333333
103
0.666456
1,345
0.849116
0
0
0
0
0
0
757
0.477904
844826018788435b356bf6f9c896357ffb15fd09
11,680
py
Python
baiduspider/core/parser.py
samzhangjy/GSSpider
344d9c9053a5d5bf08692e0c817d30763dbd8ab7
[ "MIT" ]
31
2020-07-17T08:26:37.000Z
2021-08-24T02:28:50.000Z
baiduspider/core/parser.py
samzhangjy/GSSpider
344d9c9053a5d5bf08692e0c817d30763dbd8ab7
[ "MIT" ]
6
2020-07-14T17:13:17.000Z
2020-09-12T06:02:01.000Z
baiduspider/core/parser.py
samzhangjy/GSSpider
344d9c9053a5d5bf08692e0c817d30763dbd8ab7
[ "MIT" ]
12
2020-07-27T08:38:26.000Z
2021-07-28T16:05:58.000Z
import json from html import unescape from bs4 import BeautifulSoup from baiduspider.core._spider import BaseSpider from baiduspider.errors import ParseError class Parser(BaseSpider): def __init__(self) -> None: super().__init__() def parse_web(self, content: str) -> dict: """解析百度网页搜索的页面源代码 Args: content (str): 已经转换为UTF-8编码的百度网页搜索HTML源码 Returns: dict: 解析后的结果 """ soup = BeautifulSoup(content, 'html.parser') if soup.find('div', id='content_left') is None: raise ParseError('Invalid HTML content.') # 尝试获取搜索结果总数 try: num = int(str(soup.find('span', class_='nums_text').text).strip( '百度为您找到相关结果约').strip('个').replace(',', '')) except: num = 0 # 查找运算窗口 calc = soup.find('div', class_='op_new_cal_screen') # 定义预结果(运算以及相关搜索) pre_results = [] # 预处理相关搜索 try: _related = soup.find('div', id='rs').find('table').find_all('th') except: _related = [] related = [] # 预处理新闻 news = soup.find('div', class_='result-op', tpl='sp_realtime_bigpic5', srcid='19') # 确认是否有新闻块 try: news_title = self._format( news.find('h3', class_='t').find('a').text) except: news_title = None news_detail = [] else: news_rows = news.findAll('div', class_='c-row') news_detail = [] prev_row = None for row in news_rows: try: row_title = self._format(row.find('a').text) except AttributeError: prev_row['des'] = self._format(row.text) continue row_time = self._format( row.find('span', class_='c-color-gray2').text) row_author = self._format( row.find('span', class_='c-color-gray').text) row_url = self._format(row.find('a')['href']) news_detail.append({ 'title': row_title, 'time': row_time, 'author': row_author, 'url': row_url, 'des': None }) prev_row = news_detail[-1] # 预处理短视频 video = soup.find('div', class_='op-short-video-pc') if video: video_rows = video.findAll('div', class_='c-row') video_results = [] for row in video_rows: row_res = [] videos = row.findAll('div', class_='c-span6') for v in videos: v_link = v.find('a') v_title = v_link['title'] v_url = self._format(v_link['href']) v_img = v_link.find('img')['src'] v_len = self._format( v.find('div', class_='op-short-video-pc-duration-wrap').text) v_from = self._format( v.find('div', class_='op-short-video-pc-clamp1').text) row_res.append({ 'title': v_title, 'url': v_url, 'cover': v_img, 'length': v_len, 'origin': v_from }) video_results += row_res else: video_results = [] # 一个一个append相关搜索 for _ in _related: if _.text: related.append(_.text) # 预处理百科 baike = soup.find('div', class_='c-container', tpl='bk_polysemy') if baike: b_title = self._format(baike.find('h3').text) b_url = baike.find('a')['href'] b_des = self._format(baike.find( 'div', class_='c-span-last').find('p').text) try: b_cover = baike.find( 'div', class_='c-span6').find('img')['src'] b_cover_type = 'image' except (TypeError, AttributeError): try: b_cover = baike.find( 'video', class_='op-bk-polysemy-video')['data-src'] b_cover_type = 'video' except TypeError: b_cover = None b_cover_type = None baike = { 'title': b_title, 'url': b_url, 'des': b_des, 'cover': b_cover, 'cover-type': b_cover_type } # 加载搜索结果总数 if num != 0: pre_results.append(dict(type='total', result=num)) # 加载运算 if calc: pre_results.append(dict(type='calc', process=str(calc.find('p', class_='op_new_val_screen_process').find( 'span').text), result=str(calc.find('p', class_='op_new_val_screen_result').find('span').text))) # 加载相关搜索 if related: pre_results.append(dict(type='related', results=related)) # 加载资讯 if news_detail: pre_results.append(dict(type='news', results=news_detail)) # 加载短视频 if video_results: pre_results.append(dict(type='video', results=video_results)) # 加载百科 if baike: pre_results.append(dict(type='baike', result=baike)) # 预处理源码 error = False try: soup = BeautifulSoup(content, 'html.parser') # 错误处理 except IndexError: error = True finally: if error: raise ParseError( 'Failed to generate BeautifulSoup object for the given source code content.') results = soup.findAll('div', class_='result') res = [] for result in results: soup = BeautifulSoup(self._minify(str(result)), 'html.parser') # 链接 href = soup.find('a').get('href').strip() # 标题 title = self._format(str(soup.find('a').text)) # 时间 try: time = self._format(soup.findAll( 'div', class_='c-abstract')[0].find('span', class_='newTimeFactor_before_abs').text) except (AttributeError, IndexError): time = None try: # 简介 des = soup.find_all('div', class_='c-abstract')[0].text soup = BeautifulSoup(str(result), 'html.parser') des = self._format(des).lstrip(str(time)).strip() except IndexError: try: des = des.replace('mn', '') except (UnboundLocalError, AttributeError): des = None if time: time = time.split('-')[0].strip() # 因为百度的链接是加密的了,所以需要一个一个去访问 # 由于性能原因,分析链接部分暂略 # if href is not None: # try: # # 由于性能原因,这里设置1秒超时 # r = requests.get(href, timeout=1) # href = r.url # except: # # 获取网页失败,默认换回原加密链接 # href = href # # 分析链接 # if href: # parse = urlparse(href) # domain = parse.netloc # prepath = parse.path.split('/') # path = [] # for loc in prepath: # if loc != '': # path.append(loc) # else: # domain = None # path = None try: is_not_special = result['tpl'] not in [ 'short_video_pc', 'sp_realtime_bigpic5', 'bk_polysemy'] except KeyError: is_not_special = False if is_not_special: # 确保不是特殊类型的结果 # 获取可见的域名 try: domain = result.find('div', class_='c-row').find('div', class_='c-span-last').find( 'div', class_='se_st_footer').find('a', class_='c-showurl').text except Exception as error: try: domain = result.find( 'div', class_='c-row').find('div', class_='c-span-last').find('p', class_='op-bk-polysemy-move').find('span', class_='c-showurl').text except Exception as error: try: domain = result.find( 'div', class_='se_st_footer').find('a', class_='c-showurl').text except: domain = None if domain: domain = domain.replace(' ', '') else: domain = None # 加入结果 if title and href and is_not_special: res.append({ 'title': title, 'des': des, 'origin': domain, 'url': href, 'time': time, 'type': 'result'}) soup = BeautifulSoup(content, 'html.parser') try: soup = BeautifulSoup(str(soup.findAll('div', id='page') [0]), 'html.parser') # 分页 pages_ = soup.findAll('span', class_='pc') except IndexError: pages_ = [] pages = [] for _ in pages_: pages.append(int(_.text)) # 如果搜索结果仅有一页时,百度不会显示底部导航栏 # 所以这里直接设置成1,如果不设会报错`TypeError` if not pages: pages = [1] # 设置最终结果 result = pre_results result.extend(res) return { 'results': result, # 最大页数 'pages': max(pages) } def parse_pic(self, content: str) -> dict: """解析百度图片搜索的页面源代码 Args: content (str): 已经转换为UTF-8编码的百度图片搜索HTML源码 Returns: dict: 解析后的结果 """ # 从JavaScript中加载数据 # 因为JavaScript很像JSON(JavaScript Object Notation),所以直接用json加载就行了 # 还有要预处理一下,把函数和无用的括号过滤掉 error = None try: data = json.loads(content.split('flip.setData(\'imgData\', ')[1].split( 'flip.setData(')[0].split(']);')[0].replace(');', '').replace('<\\/strong>', '</strong>').replace('\\\'', '\'')) except Exception as err: error = err if type(err) in [IndexError, AttributeError]: raise ParseError('Invalid HTML content.') finally: if error: raise ParseError(str(error)) results = [] for _ in data['data'][:-1]: if _: # 标题 title = str(_['fromPageTitle']).encode('utf-8').decode('utf-8') # 去除标题里的HTML title = unescape(self._remove_html(title)) # 链接 url = _['objURL'] # 来源域名 host = _['fromURLHost'] # 生成结果 result = { 'title': title, 'url': url, 'host': host } results.append(result) # 加入结果 # 获取分页 bs = BeautifulSoup(content, 'html.parser') pages_ = bs.find('div', id='page').findAll('span', class_='pc') pages = [] for _ in pages_: pages.append(int(_.text)) return { 'results': results, # 取最大页码 'pages': max(pages) }
36.72956
162
0.447603
12,365
0.986989
0
0
0
0
0
0
3,619
0.288873
8449b868c5c55bebc3c70da12ca1d458ad2a711a
2,142
py
Python
virtual/lib/python3.6/site-packages/requests_unixsocket/adapters.py
marknesh/pitches
0a480d9bc2beafaefa0121393b1502cc05edab89
[ "MIT" ]
null
null
null
virtual/lib/python3.6/site-packages/requests_unixsocket/adapters.py
marknesh/pitches
0a480d9bc2beafaefa0121393b1502cc05edab89
[ "MIT" ]
10
2020-03-08T21:13:29.000Z
2021-04-08T19:41:14.000Z
flask/lib/python3.6/site-packages/requests_unixsocket/adapters.py
JOFLIX/grapevines
34576e01184570d79cc140b42ffb71d322132da6
[ "MIT", "Unlicense" ]
1
2020-11-04T06:48:34.000Z
2020-11-04T06:48:34.000Z
import socket from requests.adapters import HTTPAdapter from requests.compat import urlparse, unquote try: from requests.packages.urllib3.connection import HTTPConnection from requests.packages.urllib3.connectionpool import HTTPConnectionPool except ImportError: from urllib3.connection import HTTPConnection from urllib3.connectionpool import HTTPConnectionPool # The following was adapted from some code from docker-py # https://github.com/docker/docker-py/blob/master/docker/unixconn/unixconn.py class UnixHTTPConnection(HTTPConnection): def __init__(self, unix_socket_url, timeout=60): """Create an HTTP connection to a unix domain socket :param unix_socket_url: A URL with a scheme of 'http+unix' and the netloc is a percent-encoded path to a unix domain socket. E.g.: 'http+unix://%2Ftmp%2Fprofilesvc.sock/status/pid' """ HTTPConnection.__init__(self, 'localhost', timeout=timeout) self.unix_socket_url = unix_socket_url self.timeout = timeout def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.settimeout(self.timeout) socket_path = unquote(urlparse(self.unix_socket_url).netloc) sock.connect(socket_path) self.sock = sock class UnixHTTPConnectionPool(HTTPConnectionPool): def __init__(self, socket_path, timeout=60): HTTPConnectionPool.__init__(self, 'localhost', timeout=timeout) self.socket_path = socket_path self.timeout = timeout def _new_conn(self): return UnixHTTPConnection(self.socket_path, self.timeout) class UnixAdapter(HTTPAdapter): def __init__(self, timeout=60): super(UnixAdapter, self).__init__() self.timeout = timeout def get_connection(self, socket_path, proxies=None): proxies = proxies or {} proxy = proxies.get(urlparse(socket_path.lower()).scheme) if proxy: raise ValueError('%s does not support specifying proxies' % self.__class__.__name__) return UnixHTTPConnectionPool(socket_path, self.timeout)
35.114754
77
0.710551
1,617
0.754902
0
0
0
0
0
0
466
0.217554
844a39e610cb54a65514ca7f805b41f45b503518
3,021
py
Python
jarvis/resume/tests/test_utils.py
Anubhav722/blahblah
160698e06a02e671ac40de3113cd37d642e72e96
[ "MIT" ]
1
2019-01-03T06:10:04.000Z
2019-01-03T06:10:04.000Z
jarvis/resume/tests/test_utils.py
Anubhav722/blahblah
160698e06a02e671ac40de3113cd37d642e72e96
[ "MIT" ]
1
2021-03-31T19:11:52.000Z
2021-03-31T19:11:52.000Z
jarvis/resume/tests/test_utils.py
Anubhav722/blahblah
160698e06a02e671ac40de3113cd37d642e72e96
[ "MIT" ]
null
null
null
from django.test import TestCase from jarvis.resume.utils.extractor import get_text from jarvis.resume.utils.parser_helper import get_urls, get_url_response, url_categories, get_github_username, get_stackoverflow_userid, get_stackoverflow_username, get_name, get_id_from_linkedin_url, get_email from unidecode import unidecode path_to_test_data = 'resume/tests/test_data/1.pdf' urls = ['https://github.com/imnithin', 'http://imnithin.github.io', 'https://gist.github.com/imnithin', 'http://stackoverflow.com/users/2231236/nithin', 'https://www.linkedin.com/in/imnithink'] categories = {'blog': ['http://imnithin.github.io'], 'coding': [], 'contributions': ['https://github.com/imnithin', 'https://gist.github.com/imnithin'], 'forums': ['http://stackoverflow.com/users/2231236/nithin'], 'others': [], 'social': ['https://www.linkedin.com/in/imnithink']} url_response = [{'name': 'https://github.com/imnithin', 'type': 'contributions'}, {'name': 'https://gist.github.com/imnithin', 'type': 'contributions'}, {'name': 'https://www.linkedin.com/in/imnithink', 'type': 'social'}, {'name': 'http://imnithin.github.io', 'type': 'blog'}, {'name': 'http://stackoverflow.com/users/2231236/nithin', 'type': 'forums'}] class ParserHelperUtilsTest(TestCase): """Unit tests for Parser Helper Functions""" def setUp(self): self.text = get_text(path_to_test_data) def test_get_name(self): """Test User Name Obtained from jarvis.resume""" name = 'nithin' self.assertEqual(get_name(self.text)[0], name) def test_github_username(self): """Test GitHub Username""" github_user_name = 'imnithin' self.assertEqual(get_github_username(self.text), github_user_name) def test_stackoverflow_user_id(self): """Test StackOverflow user id""" stackoverflow_user_id = '2231236' self.assertEqual(get_stackoverflow_userid(self.text), stackoverflow_user_id) def test_stackoverflow_user_name(self): """Test StackOverflow User Name""" stackoverflow_user_name = 'nithin' self.assertEqual(get_stackoverflow_username(self.text), stackoverflow_user_name) def test_get_urls(self): self.assertEqual(get_urls(self.text), urls) def test_url_categories(self): values = list(categories.values()).sort() self.assertEqual(list(url_categories(urls).values()).sort(), values) def test_get_url_response(self): sorted_url_response = url_response.sort() self.assertEqual(get_url_response(categories).sort(), sorted_url_response) def test_get_id_from_linkedin_url(self): linkedin_id = 'imnithink' self.assertEqual(unidecode(get_id_from_linkedin_url(self.text)).strip(), linkedin_id) def test_get_email(self): email = 'nithinkool14@gmail.com' self.assertEqual(get_email(self.text)[0], email)
43.782609
210
0.676266
1,658
0.548825
0
0
0
0
0
0
977
0.323403
844aff8b757e567eab04101d17c08cb3e245797f
8,032
py
Python
profiles_weak.py
andreuvall/HybridPlaylistContinuation
6e31e50050c61a2c3ae55183e18b665fd54c7250
[ "BSD-2-Clause" ]
8
2017-06-04T11:42:49.000Z
2021-10-19T12:16:01.000Z
profiles_weak.py
andreuvall/HybridPlaylistContinuation
6e31e50050c61a2c3ae55183e18b665fd54c7250
[ "BSD-2-Clause" ]
null
null
null
profiles_weak.py
andreuvall/HybridPlaylistContinuation
6e31e50050c61a2c3ae55183e18b665fd54c7250
[ "BSD-2-Clause" ]
5
2017-08-27T17:02:14.000Z
2020-06-09T01:21:09.000Z
from __future__ import print_function from __future__ import division from sklearn.utils import check_random_state from sklearn import preprocessing as prep from utils.data import load_data, show_data_splits, shape_data from utils.evaluation import evaluate from utils.profiles import select_model, show_design, train, fit, compute_scores import theano import lasagne as lg import numpy as np import argparse import os ''' Hybrid music playlist continuation based on a song-to-playlist classifier. We learn a classifier that takes song features as inputs and predicts the playlists songs belong to. Once it is learned, such classifier can be used to populate a matrix of song-playlist scores describing how well a song and a playlist fit together. Thus, a playlist can be extended by selecting the songs with highest score. This approach is "hybrid" in the usual sense in the recommender systems literature, i.e., it combines content (given by the song features) and cf information (given by playlists examples). As it is, this approach only works on the so-called weak generalization setting. That is, the model is trained on the same playlists that will be extended. ''' if __name__ == '__main__': parser = argparse.ArgumentParser(description='Hybrid music playlist continuation based on a song-to-playlist classifier.') parser.add_argument('--model', type=str, help='path to the model specification file', metavar='') parser.add_argument('--dataset', type=str, help='path to the playlists dataset directory', metavar='') parser.add_argument('--msd', type=str, help='path to the MSD directory', metavar='') parser.add_argument('--train', action='store_true', help='train the song-to-playist classifier with monitoring') parser.add_argument('--fit', action='store_true', help='fit the song-to-playlist classifier') parser.add_argument('--test', action='store_true', help='evaluate the playlist continuations') parser.add_argument('--ci', action='store_true', help='compute confidence intervals if True') parser.add_argument('--song_occ', type=int, help='test on songs observed song_occ times during training', nargs='+', metavar='') parser.add_argument('--metrics_file', type=str, help='file name to save metrics', metavar='') parser.add_argument('--seed', type=int, help='set random behavior', metavar='') args = parser.parse_args() # set random behavior rng = check_random_state(args.seed) lg.random.set_rng(rng) # set model configuration model = select_model(args.model) # prepare output directory data_name = os.path.basename(os.path.normpath(args.dataset)) out_dir = os.path.join('params', 'profiles', model.name + '_' + data_name + '_weak') if not os.path.exists(out_dir): os.makedirs(out_dir) # load data: playlists, splits, features and artist info data = load_data(args.dataset, args.msd, model) playlists_coo, split_weak, _, features, song2artist = data # playlists_coo are the playlists stored in coordinate format playlists_idx, songs_idx, _, idx2song = playlists_coo # each playlist is split into a "query" of ~80% of the songs (train_idx + # valid_idx) and a "continuation" of ~20% of the songs (test_idx) train_idx, valid_idx, test_idx = split_weak # define splits for this experiment # train model on the training queries # validate model on the validation queries # fit the model on the full queries # extend all the playlists, using all queries and continuations train_idx = train_idx valid_idx = valid_idx fit_idx = np.hstack((train_idx, valid_idx)) query_idx = fit_idx cont_idx = test_idx # provide data information show_data_splits(playlists_idx, songs_idx, idx2song, song2artist, train_idx, valid_idx, fit_idx, query_idx, cont_idx) # provide model information print('\nNetwork:') show_design(model) if args.train: # # train the hybrid model while validating on withheld playlists # # prepare input song features and playlist targets at training X_train, Y_train = shape_data( playlists_idx, songs_idx, idx2song, features, mode='train', subset=train_idx ) # prepare input song features and playlist targets at validation X_valid, Y_valid = shape_data( playlists_idx, songs_idx, idx2song, features, mode='test', subset=valid_idx ) # preprocess input features if required # use the training song features to standardize the validation data if model.standardize: scaler = prep.RobustScaler() X_train = scaler.fit_transform(X_train) X_valid = scaler.transform(X_valid) if model.normalize: X_train = prep.normalize(X_train, norm=model.normalize) X_valid = prep.normalize(X_valid, norm=model.normalize) # train the classifier train( model=model, train_input=X_train.astype(theano.config.floatX), train_target=Y_train.astype(np.int8), valid_input=X_valid.astype(theano.config.floatX), valid_target=Y_valid.astype(np.int8), out_dir=out_dir, random_state=rng ) if args.fit: # # fit the hybrid model # # prepare input song features and playlist targets at training X_fit, Y_fit = shape_data( playlists_idx, songs_idx, idx2song, features, mode='train', subset=fit_idx ) # preprocess input features if required if model.standardize: X_fit = prep.robust_scale(X_fit) if model.normalize: X_fit = prep.normalize(X_fit, norm=model.normalize) # fit the classifier fit( model=model, fit_input=X_fit.astype(theano.config.floatX), fit_target=Y_fit.astype(np.int8), out_dir=out_dir, random_state=rng ) if args.test: # # extend the playlists in the query split and evaluate the # continuations by comparing them to actual withheld continuations # # prepare input song features and playlist targets at test X_cont, Y_cont = shape_data( playlists_idx, songs_idx, idx2song, features, mode='test', subset=cont_idx ) # preprocess input features if required # use the training song features to standardize the test data if model.standardize: X_fit, _ = shape_data( playlists_idx, songs_idx, idx2song, features, mode='train', subset=fit_idx ) scaler = prep.RobustScaler() scaler.fit(X_fit) X_cont = scaler.transform(X_cont) if model.normalize: X_cont = prep.normalize(X_cont, norm=model.normalize) # songs in the "query" playlists need to be masked to make sure that # they are not recommended as continuations _, Y_query = shape_data( playlists_idx, songs_idx, idx2song, features, mode='test', subset=query_idx ) # get number of song occurrences when fitting for cold-start analysis # Y_fit = Y_query train_occ = np.asarray(Y_query.sum(axis=1)).flatten() # compute the song-playlist scores cont_output = compute_scores( model=model, params_dir=out_dir, cont_input=X_cont.astype(theano.config.floatX), cont_target=Y_cont.astype(np.int8) ) # evaluate the continuations evaluate( scores=[cont_output.T], targets=[Y_cont.T.tocsr()], queries=[Y_query.T.tocsr()], train_occ=[train_occ], k_list=[10, 30, 100], ci=args.ci, song_occ=args.song_occ, metrics_file=args.metrics_file )
37.886792
132
0.662475
0
0
0
0
0
0
0
0
3,050
0.379731
844bd667c2563dc8f5e9e83fc9eaf8e0c1857eb6
560
py
Python
news/admin.py
trojsten/news
aa1dfb4ee31a7f810dcd484eecafd49659292d76
[ "BSD-3-Clause" ]
null
null
null
news/admin.py
trojsten/news
aa1dfb4ee31a7f810dcd484eecafd49659292d76
[ "BSD-3-Clause" ]
6
2016-07-10T00:22:02.000Z
2021-12-23T22:43:41.000Z
news/admin.py
trojsten/news
aa1dfb4ee31a7f810dcd484eecafd49659292d76
[ "BSD-3-Clause" ]
2
2019-04-30T20:20:38.000Z
2021-02-16T18:41:01.000Z
from django.contrib import admin from django.db import models from easy_select2.widgets import Select2Multiple from news.models import Entry class EntryAdmin(admin.ModelAdmin): list_display = ('title', 'pub_date', 'author') readonly_fields = ('slug',) exclude = ('author',) formfield_overrides = { models.ManyToManyField: {'widget': Select2Multiple()} } def save_model(self, request, obj, form, change): if not change: obj.author = request.user obj.save() admin.site.register(Entry, EntryAdmin)
25.454545
61
0.682143
376
0.671429
0
0
0
0
0
0
47
0.083929
844c48d7274f542cdb76ae374555eb9e43a3cc30
21,999
py
Python
deliverable1/analyzer/clientGUI.py
tonellotto/pira-project
13f1f40fd3339d60067c09396822af8f3c83239c
[ "MIT" ]
null
null
null
deliverable1/analyzer/clientGUI.py
tonellotto/pira-project
13f1f40fd3339d60067c09396822af8f3c83239c
[ "MIT" ]
null
null
null
deliverable1/analyzer/clientGUI.py
tonellotto/pira-project
13f1f40fd3339d60067c09396822af8f3c83239c
[ "MIT" ]
null
null
null
import analyzer_client as analyzer from tkinter import * from tkinter import filedialog from tkinter import messagebox from tkinter import ttk import json import os from pathlib import Path IP_ADDRESS = "localhost" PORT = "8061" ENGINE_CURR_OPTIONS = {} ANALYZE_CURR_OPTIONS = {'language':'en', 'entities': None, 'correlation_id': None, 'score_threshold': "0.1", 'return_decision_process': "0" } DENY_LIST = {'supported_entities': [], 'valuesList': [], 'length': 0 } REGEX_LIST = {'entities': [], 'names_pattern': [], 'patterns': [], 'scores': [], 'context_words': [], 'length': 0 } class Frames(object): def __init__(self, root): self.root = root self.root.title('Presidio Analyzer gRPC Client') self.root.geometry('650x260') self.root.configure(bg="#0B0C10") self.root.resizable(0, 0) # Title frameTitle = Frame(self.root, width = 650, height = 60, bg="#0B0C10") frameTitle.grid(row = 0, columnspan = 2) Label(frameTitle, text="Microsoft Presidio Analyzer", font=("Helvetica", 17, "bold"), bg="#0B0C10", fg="#C5C6C7", anchor = CENTER).pack(ipady = 20) # Settings frameBtnSettings = Frame(self.root, bg="#0B0C10") frameBtnSettings.grid(row = 2, columnspan = 2) settingsButton = Button(frameBtnSettings, text="Settings", font=("Helvetica", 14), bg="#0B0C10", fg="#C5C6C7", command = self.settings).pack(pady = 10, ipadx= 33, ipady = 3) # Start analyzer frameBtnAnalyze = Frame(self.root, width = 650, height = 1, bg="#0B0C10") frameBtnAnalyze.grid(row = 1, columnspan = 2) analyzeBtn = Button(frameTitle, text="Start analyzer", font=("Helvetica", 14), bg="#0B0C10", fg="#C5C6C7", command = self.startAnalyzer).pack(pady = 22, ipadx= 10, ipady = 3) def startAnalyzer(self): dir_path = os.path.dirname(os.path.realpath(__file__)) path = Path(dir_path) self.root.filenames = filedialog.askopenfilenames(initialdir= str(path.parent.absolute()) + "/files", title="Select A File", filetypes=(("txt files", "*.txt"),("all files", "*.*"))) if self.root.filenames: clientAnalyzer = analyzer.ClientEntity(IP_ADDRESS, PORT) # send options if setted for elem in ANALYZE_CURR_OPTIONS: clientAnalyzer.setupOptions(elem, ANALYZE_CURR_OPTIONS[elem], "ANALYZE_OPTIONS") if DENY_LIST['length'] > 0: clientAnalyzer.setupDenyList(DENY_LIST['supported_entities'], DENY_LIST['valuesList']) if REGEX_LIST['length'] > 0: patterns = analyzer.createPatternInfo(1, REGEX_LIST['names_pattern'], REGEX_LIST['patterns'], REGEX_LIST['scores']) clientAnalyzer.setupRegex(REGEX_LIST['entities'][0], patterns, REGEX_LIST['context_words'][0]) progressWindow = Toplevel() progressWindow.title("Analyzer Status") progressWindow.geometry("330x80") progressWindow.configure(bg="white") self.root.update_idletasks() Label(progressWindow, text="Analyzer process is starting..it may take a while!", font=("Helvetica", 10), bg="white", fg="black").pack(side=TOP, padx = 15, pady = 7) progressBar = ttk.Progressbar(progressWindow, orient=HORIZONTAL, length=200, mode="determinate") progressBar.pack(side=TOP, pady = 14) self.root.update_idletasks() filenameList = [] for path in self.root.filenames: filename, ext = os.path.basename(path).split(".") filenameList.append(filename) res = clientAnalyzer.sendRequestAnalyze(os.path.basename(filename)) if res == -2: progressWindow.destroy() messagebox.showerror("gRPC Server Error", "Cannot connect to the server! Check your server settings") break if progressBar['value'] < 100: progressBar['value'] += (100/len(self.root.filenames)) self.root.update_idletasks() if int(progressBar['value']) == 100: messagebox.showinfo(parent=progressWindow, message='Analyzer process completed!') progressWindow.destroy() if res != -2: clientAnalyzer.closeConnection() self.readResults(filenameList) def readResults(self, filenameList): self.result = Toplevel() self.result.title("Presidio Analyzer gRPC - RESULTS") self.result.geometry("850x450") self.result.configure(bg="#0B0C10") self.result.resizable(0, 0) ## List filename-results.txt frameList = Frame(self.result, width = 150, height = 30) frameList.pack(side=LEFT, padx=13) # Scrollbar resultsScrollbar = Scrollbar(frameList, orient=VERTICAL) listbox_widget = Listbox(frameList, yscrollcommand=resultsScrollbar.set, height = 20, font=("Courier", 12), bg="#1F2833", fg="#C5C6C7") # configure scrollbar resultsScrollbar.config(command=listbox_widget.yview) resultsScrollbar.pack(side=RIGHT, fill=Y) ## END LIST ## Frame that will contain results frameResults = Frame(self.result, width = 680, bg="#0B0C10") frameResults.pack(side=RIGHT, pady = 15, padx = 10) self.text_widget = Text(frameResults, font=("Courier", 13), spacing1=3, bg="#1F2833", fg="#C5C6C7") self.text_widget.pack(pady = 10, padx= 15) ## END FRAME for filename in filenameList: listbox_widget.insert(END, filename) listbox_widget.bind('<<ListboxSelect>>', self.clickEvent) listbox_widget.pack() def clickEvent(self, e): dir_path = os.path.dirname(os.path.realpath(__file__)) path = Path(dir_path) currSelection = e.widget.curselection() filename = e.widget.get(currSelection) #print(filename) with open(str(path.parent.absolute()) + "/files/" + filename + ".txt", "r") as originalFile: originalText = originalFile.read() with open(str(path.parent.absolute()) + "/analyzer-results/" + filename + "-results.txt", "r") as resultsFile: self.text_widget.configure(state='normal') self.text_widget.delete("1.0", END) for line in resultsFile: resultStr = json.loads(line) #print(resultStr) start = resultStr['start'] end = resultStr['end'] self.text_widget.insert(END, f"FOUND WORD: {originalText[start:end]}\n\n") self.text_widget.insert(END, f"ENTITY TYPE: {resultStr['entity_type']}\nSTART: {resultStr['start']}\nEND: {resultStr['end']}\nSCORE: {resultStr['score']}") self.text_widget.insert(END, "\n-------------------------------------------------\n") self.text_widget.configure(state='disabled') def settings(self): self.settings = Toplevel() self.settings.title("Presidio Analyzer gRPC - Settings") self.settings.geometry("790x430") self.settings.configure(bg="#0B0C10") self.settings.resizable(0, 0) ## List of options frameList = Frame(self.settings, width = 100, height = 30) frameList.pack(side=LEFT, padx=8, pady=10) listbox_widget = Listbox(frameList, height = 20, font=("Courier", 12), bg="#1F2833", fg="#C5C6C7") ## Container options self.frameOptions = Frame(self.settings, bg="#0B0C10") self.frameOptions.pack(side=RIGHT, pady = 15, padx = 10, expand = True) listbox_widget.insert(0, "Server settings") listbox_widget.insert(1, "PII Recognition") listbox_widget.insert(2, "Analyzer Options") listbox_widget.bind('<<ListboxSelect>>', self.clickEventOption) listbox_widget.pack() def clickEventOption(self, e): currSelection = e.widget.curselection() optionName = e.widget.get(currSelection) for widget in self.frameOptions.winfo_children(): widget.destroy() if optionName == "Server settings": Label(self.frameOptions, text = "SERVER IP: " + IP_ADDRESS + " | SERVER PORT: " + str(PORT), font=("courier", 10), bg="#0B0C10", fg="#C5C6C7").pack(side=TOP) Label(self.frameOptions, text = "Server IP", font=("helvetica", 15), bg="#0B0C10", fg="#C5C6C7").pack(side=TOP, pady = 10) self.server_ip = Entry(self.frameOptions, font=("helvetica", 13), justify=CENTER, bd=3) self.server_ip.pack(anchor=S, pady = 5, padx = 20, ipady = 2) Label(self.frameOptions, text = "Server Port", font=("helvetica", 15), bg="#0B0C10", fg="#C5C6C7").pack(side=TOP, pady = 10) self.server_port = Entry(self.frameOptions, font=("helvetica", 13), justify=CENTER, bd=3) self.server_port.pack(anchor=S, pady = 5, padx = 20, ipady = 2) Button(self.frameOptions, text = "Save", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.setupServer).pack(side=TOP, ipadx = 10, pady = 10) if IP_ADDRESS != "null" and PORT != "null": self.server_ip.insert(0, IP_ADDRESS) self.server_port.insert(0, PORT) elif optionName == "Analyzer Options": frameNameOptions = Frame(self.frameOptions, width = 650, height = 60, bg="#0B0C10") frameNameOptions.grid(row = 0, column = 0, padx = 12) frameValues = Frame(self.frameOptions, width = 650, height = 60, bg="#0B0C10") frameValues.grid(row = 0, column = 1) Label(frameNameOptions, text = "LANGUAGE", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 0, column = 0, pady = 5) self.language = Entry(frameValues, font=("helvetica", 13), bd=3) self.language.grid(row = 0, column = 0, pady = 5) Label(frameNameOptions, text = "ENTITIES", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 1, column = 0, pady = 5) self.entities = Entry(frameValues, font=("helvetica", 13), bd=3) self.entities.grid(row = 1, column = 0, pady = 5) Label(frameNameOptions, text = "CORRELATION ID", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 2, column = 0, pady = 5) self.corr_id = Entry(frameValues, font=("helvetica", 13), bd=3) self.corr_id.grid(row = 2, column = 0, pady = 5) Label(frameNameOptions, text = "SCORE THRESHOLD", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 3, column = 0, pady = 5) self.score = Entry(frameValues, font=("helvetica", 13), bd=3) self.score.grid(row = 3, column = 0, pady = 5) self.decision_process = IntVar(None, int(ANALYZE_CURR_OPTIONS['return_decision_process'])) Label(frameNameOptions, text = "RETURN DECISION PROCESS", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 4, column = 0, pady = 5) Radiobutton(frameValues, text="YES", font=("helvetica", 10), variable=self.decision_process, value=1).grid(row=4, sticky=W, pady = 5) Radiobutton(frameValues, text="NO", font=("helvetica", 10), variable=self.decision_process, value=0).grid(row=4, sticky=E, pady = 5) Button(self.frameOptions, text = "Save", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.saveAnalyzeConfig).grid(row = 5, columnspan = 2, ipadx = 10, pady = 20) # load the current config self.language.insert(0, ANALYZE_CURR_OPTIONS['language']) if ANALYZE_CURR_OPTIONS['entities'] != None: self.entities.insert(0, ANALYZE_CURR_OPTIONS['entities']) if ANALYZE_CURR_OPTIONS['correlation_id'] != None: self.corr_id.insert(0, ANALYZE_CURR_OPTIONS['correlation_id']) self.score.insert(0, ANALYZE_CURR_OPTIONS['score_threshold']) elif optionName == "PII Recognition": frameMenu = Frame(self.frameOptions, bg="#0B0C10") frameMenu.grid(row = 0, column = 0, padx = 12) self.frameInsertOption = Frame(self.frameOptions, width = 300, height = 150, bg="#0B0C10") self.frameInsertOption.grid(row = 0, column = 1, padx = 12) # menu options self.value_inside = StringVar() # Set the default value of the variable self.value_inside.set("Select an option") recognition_menu = OptionMenu(frameMenu, self.value_inside, "Select an option", *("Regex", "Deny List"), command=self.optionChanged) recognition_menu.pack() self.frameCurr = Frame(self.frameOptions, width = 520, height = 100, bg="#0B0C10") self.frameCurr.grid(row = 1, columnspan = 2, pady = 7) def setupServer(self): global IP_ADDRESS, PORT IP_ADDRESS = self.server_ip.get() PORT = self.server_port.get() messagebox.showinfo(parent=self.settings, title = "Save", message=f"Server options saved succefully!") def saveAnalyzeConfig(self): if self.language.get() != "en": messagebox.showerror("Setup Error", "Only English language is supported!") else: ANALYZE_CURR_OPTIONS['language'] = self.language.get() if self.entities.get() == "" or str(self.entities.get()).lower() == "none": ANALYZE_CURR_OPTIONS['entities'] = None else: ANALYZE_CURR_OPTIONS['entities'] = self.entities.get() if self.corr_id.get() == "": ANALYZE_CURR_OPTIONS['correlation_id'] = None else: ANALYZE_CURR_OPTIONS['correlation_id'] = self.corr_id.get() ANALYZE_CURR_OPTIONS['score_threshold'] = self.score.get() ANALYZE_CURR_OPTIONS['return_decision_process'] = str(self.decision_process.get()) print(ANALYZE_CURR_OPTIONS) messagebox.showinfo(parent=self.settings, title = "Save", message=f"Options saved succefully!") def optionChanged(self, e): for widget in self.frameInsertOption.winfo_children(): widget.destroy() for widget in self.frameCurr.winfo_children(): widget.destroy() if self.value_inside.get() == "Deny List": Label(self.frameInsertOption, text = "ENTITY", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 0, column = 0, pady = 5, padx = 5) self.entity = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.entity.grid(row = 0, column = 1, pady = 5) Label(self.frameInsertOption, text = "VALUES LIST", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 1, column = 0, pady = 5, padx = 5) self.values = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.values.grid(row = 1, column = 1, pady = 5) Button(self.frameInsertOption, text = "Save", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.setupDenyList).grid(row=3, column = 0, ipadx = 10, pady = 20) Button(self.frameInsertOption, text = "Reset", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.clearDenyConfig).grid(row=3, column = 1, ipadx = 10, pady = 20) # Print current deny lists self.deny_widget = Text(self.frameCurr, font=("helvetica", 13), width = 60, height = 10, spacing1=3, bg="#1F2833", fg="#C5C6C7") self.deny_widget.grid(row = 0, column = 0) for i in range(DENY_LIST['length']): self.deny_widget.insert(END, f"{DENY_LIST['supported_entities'][i]} - {DENY_LIST['valuesList'][i]}\n") self.deny_widget.configure(state='disabled') elif self.value_inside.get() == "Regex": Label(self.frameInsertOption, text = "ENTITY", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 0, column = 0, pady = 5, padx = 5) self.entity_regex = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.entity_regex.grid(row = 0, column = 1, pady = 5) Label(self.frameInsertOption, text = "NAME PATTERN", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 1, column = 0, pady = 5, padx = 5) self.name_pattern = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.name_pattern.grid(row = 1, column = 1, pady = 5) Label(self.frameInsertOption, text = "REGEX", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 2, column = 0, pady = 5, padx = 5) self.regex = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.regex.grid(row = 2, column = 1, pady = 5) Label(self.frameInsertOption, text = "SCORE", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 3, column = 0, pady = 5, padx = 5) self.score_regex = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.score_regex.grid(row = 3, column = 1, pady = 5) Label(self.frameInsertOption, text = "CONTEXT WORD", font=("helvetica", 13), bg="#0B0C10", fg="#C5C6C7").grid(row = 4, column = 0, pady = 5, padx = 5) self.context = Entry(self.frameInsertOption, font=("helvetica", 13), bd=3) self.context.grid(row = 4, column = 1, pady = 5) Button(self.frameInsertOption, text = "Save", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.setupRegexList).grid(row=5, column = 0, ipadx = 10, pady = 10) Button(self.frameInsertOption, text = "Reset", font=("helvetica", 12), bg="#0B0C10", fg="#C5C6C7", command=self.clearRegexConfig).grid(row=5, column = 1, ipadx = 10, pady = 10) self.regex_widget = Text(self.frameCurr, font=("helvetica", 13), width = 60, height = 6, spacing1=3, bg="#1F2833", fg="#C5C6C7") self.regex_widget.grid(row = 0, column = 0) # print current regex patterns for i in range(REGEX_LIST['length']): self.regex_widget.insert(END, f"{REGEX_LIST['entities'][i]} - {REGEX_LIST['names_pattern'][i]} - {REGEX_LIST['patterns'][i]} - {REGEX_LIST['scores'][i]} - {REGEX_LIST['context_words'][i]}\n") self.regex_widget.configure(state='disabled') def setupDenyList(self): if len(self.entity.get()) > 2 and len(self.values.get()) > 2: DENY_LIST['supported_entities'].append(self.entity.get()) DENY_LIST['valuesList'].append(self.values.get()) DENY_LIST['length'] += 1 self.deny_widget.configure(state='normal') self.deny_widget.insert(END, f"{self.entity.get()} - {self.values.get()}\n") self.deny_widget.configure(state='disabled') messagebox.showinfo(parent=self.settings, title = "Save", message=f"Deny list for {self.entity.get()} saved!") else: messagebox.showerror(parent=self.settings, title ="Error", message="Compile all the fields!") #print(DENY_LIST) def clearDenyConfig(self): answer = messagebox.askyesno(parent=self.settings, title = None, message="Do you want to reset deny list configuration?") if answer: DENY_LIST['supported_entities'] = [] DENY_LIST['valuesList'] = [] DENY_LIST['length'] = 0 self.deny_widget.configure(state='normal') self.deny_widget.delete("1.0", END) self.deny_widget.configure(state='disabled') def setupRegexList(self): if len(self.entity_regex.get()) > 2: REGEX_LIST['entities'].append(self.entity_regex.get()) REGEX_LIST['names_pattern'].append(self.name_pattern.get()) REGEX_LIST['patterns'].append(self.regex.get()) REGEX_LIST['scores'].append(self.score_regex.get()) REGEX_LIST['context_words'].append(self.context.get()) REGEX_LIST['length'] += 1 self.regex_widget.configure(state='normal') self.regex_widget.insert(END, f"{self.entity_regex.get()} - {self.name_pattern.get()} - {self.regex.get()} - {self.score_regex.get()} - {self.context.get()}\n") self.regex_widget.configure(state='disabled') messagebox.showinfo(parent=self.settings, title = "Save", message=f"Regex for {self.entity_regex.get()} saved!") else: messagebox.showerror(parent=self.settings, title ="Error", message="Compile all the fields!") #print(REGEX_LIST) def clearRegexConfig(self): answer = messagebox.askyesno(parent=self.settings, title = None, message="Do you want to reset regex configuration?") if answer: REGEX_LIST['entities'] = [] REGEX_LIST['names_pattern'] = [] REGEX_LIST['patterns'] = [] REGEX_LIST['scores'] = [] REGEX_LIST['context_words'] = [] REGEX_LIST['length'] = 0 self.regex_widget.configure(state='normal') self.regex_widget.delete("1.0", END) self.regex_widget.configure(state='disabled') root = Tk() app = Frames(root) root.mainloop()
50.456422
208
0.589027
21,338
0.969953
0
0
0
0
0
0
4,513
0.205146
844d85dc62ed6dfb3a4f73a387bf2a08be758a8e
53
py
Python
05-functions_and_modules/mods_1.py
palmieric/Tecnologie_Web-Introduzione_a_Python
b10ce49a947b239ca2af1938248f7191937b2f89
[ "CC0-1.0" ]
3
2021-05-17T14:48:42.000Z
2021-05-24T10:12:06.000Z
05-functions_and_modules/mods_1.py
palmieric/Tecnologie_Web-Introduzione_a_Python
b10ce49a947b239ca2af1938248f7191937b2f89
[ "CC0-1.0" ]
null
null
null
05-functions_and_modules/mods_1.py
palmieric/Tecnologie_Web-Introduzione_a_Python
b10ce49a947b239ca2af1938248f7191937b2f89
[ "CC0-1.0" ]
2
2021-05-17T13:52:15.000Z
2021-05-24T10:44:54.000Z
# mods 1 import random print(random.randint(1,10))
10.6
27
0.716981
0
0
0
0
0
0
0
0
8
0.150943