hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f2b9b881e8f73f67cedd4bd5a979546da0d9dcab | 988 | py | Python | textembedding/__init__.py | Hanscal/textembedding | 0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7 | [
"MIT"
] | 1 | 2021-05-26T09:42:37.000Z | 2021-05-26T09:42:37.000Z | textembedding/__init__.py | Hanscal/textembedding | 0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7 | [
"MIT"
] | null | null | null | textembedding/__init__.py | Hanscal/textembedding | 0076a7a67e1c0e0b3ebc4bbbfa9dcdcfbf16c4c7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2021/2/26 12:07
@Author : hcai
@Email : hua.cai@unidt.com
"""
import textembedding.get_embedding
import textembedding.load_model
name = "textbedding"
# wv model
#
#
# | 29.058824 | 122 | 0.765182 |
f2ba3f6c4a26d42ba28e90efe9fded89ad4b027a | 385 | py | Python | Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py | RKiddle/python_finance | 7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe | [
"MIT"
] | 1 | 2021-04-28T01:26:38.000Z | 2021-04-28T01:26:38.000Z | Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py | RKiddle/python_finance | 7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe | [
"MIT"
] | null | null | null | Importing_&_Managing_Financial_Data/Importing_financial_data_from_the_web/Visualize_a_stock_price_trend.py | RKiddle/python_finance | 7c0ed2998c0f82a0998ba0cb06225453ba8ee3fe | [
"MIT"
] | null | null | null | # Import matplotlib.pyplot
import matplotlib.pyplot as plt
# Set start and end dates
start = date(2016, 1, 1)
end = date(2016, 12, 31)
# Set the ticker and data_source
ticker = 'FB'
data_source = 'google'
# Import the data using DataReader
stock_prices = DataReader(ticker, data_source, start, end)
# Plot Close
stock_prices['Close'].plot(title=ticker)
# Show the plot
plt.show()
| 19.25 | 58 | 0.735065 |
f2bbbde7ac14cbda28bc8fe761c19a1e71889708 | 2,808 | py | Python | pfrock/cli/config_parser.py | knightliao/pfrock | 33587f11caeeccc11d0b8219b4e02df153905486 | [
"Apache-2.0"
] | 62 | 2016-02-24T10:47:17.000Z | 2019-04-27T01:36:56.000Z | pfrock/cli/config_parser.py | knightliao/pfrock | 33587f11caeeccc11d0b8219b4e02df153905486 | [
"Apache-2.0"
] | 1 | 2019-04-19T12:13:21.000Z | 2021-08-10T09:16:09.000Z | pfrock/cli/config_parser.py | knightliao/pfrock | 33587f11caeeccc11d0b8219b4e02df153905486 | [
"Apache-2.0"
] | 24 | 2016-03-01T14:59:29.000Z | 2019-09-02T08:12:00.000Z | # !/usr/bin/env python
# coding=utf8
import json
import traceback
from tornado.web import RequestHandler
from pfrock.cli import logger
from pfrock.core.constants import PFROCK_CONFIG_SERVER, PFROCK_CONFIG_ROUTER, PFROCK_CONFIG_PORT, ROUTER_METHOD, \
ROUTER_PATH, ROUTER_OPTIONS, ROUTER_HANDLER
from pfrock.core.lib import auto_str
class PfrockConfigParser(object):
| 33.428571 | 114 | 0.649217 |
f2bdc8d9084d26a302efcbe7ca92780a65ffbfe3 | 3,722 | py | Python | src/resnet/resnetv2_3stage_gaussian.py | googleinterns/out-of-distribution | 84a2d5af59462f0943f629f742090b485ed50e61 | [
"Apache-2.0"
] | null | null | null | src/resnet/resnetv2_3stage_gaussian.py | googleinterns/out-of-distribution | 84a2d5af59462f0943f629f742090b485ed50e61 | [
"Apache-2.0"
] | null | null | null | src/resnet/resnetv2_3stage_gaussian.py | googleinterns/out-of-distribution | 84a2d5af59462f0943f629f742090b485ed50e61 | [
"Apache-2.0"
] | null | null | null | from typing import List, Union
import torch
from torch import nn
from torch.nn import functional as F
from src.modules.max_mahalanobis import MaxMahalanobis, GaussianResult
from src.modules.normalize import Normalize
from src.resnet.bottleneck_block_v2s3 import create_bottleneck_stage_v2s3
from src.resnet.shared import GaussianMode, ResNet_Gaussian
| 35.113208 | 109 | 0.671682 |
f2c0ef753b4cd8675d6db691f0d1c053e49d0236 | 504 | py | Python | assignments/Exercise_Lecture73_Phumeth.P.py | ZnoKunG/PythonProject | 388b5dfeb0161aee66094e7b2ecc2d6ed13588bd | [
"MIT"
] | null | null | null | assignments/Exercise_Lecture73_Phumeth.P.py | ZnoKunG/PythonProject | 388b5dfeb0161aee66094e7b2ecc2d6ed13588bd | [
"MIT"
] | null | null | null | assignments/Exercise_Lecture73_Phumeth.P.py | ZnoKunG/PythonProject | 388b5dfeb0161aee66094e7b2ecc2d6ed13588bd | [
"MIT"
] | null | null | null | systemMenu = {"": 35, "": 45, "": 55, "": 20}
menuList = []
while True:
menuName = input("Please Enter Menu :")
if(menuName.lower() == "exit"):
break
else:
menuList.append([menuName, systemMenu[menuName]])
showBill() | 28 | 70 | 0.613095 |
f2c15988d8527886dc69eb42d21e16810aed3ba2 | 2,229 | py | Python | FetchTextFromRISE.py | RISE-MPIWG/hylg | 7d49e7aed0623d9730d5c8933030954fa8f729b0 | [
"MIT"
] | 1 | 2020-05-30T02:29:36.000Z | 2020-05-30T02:29:36.000Z | FetchTextFromRISE.py | RISE-MPIWG/hylg | 7d49e7aed0623d9730d5c8933030954fa8f729b0 | [
"MIT"
] | null | null | null | FetchTextFromRISE.py | RISE-MPIWG/hylg | 7d49e7aed0623d9730d5c8933030954fa8f729b0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import requests
import os
# 6000 is a large number to make sure we get all the components of a collection. Please do note that RISE also has a pagination feature,
# which can be implemented by clients if they wish.
per_page = 6000
# getting the list of collections that the user has access to:
collections_response = requests.get(f'https://rise.mpiwg-berlin.mpg.de/api/collections?per_page={per_page}')
collections = collections_response.json()
# each accessible collections has a name, a uuid, and a number of resources.
# print(collections)
idx = 1
for collection in collections:
print(f'collection at index: {idx}')
idx += 1
print(collection)
# picking a collection by its index
# collection_index = 1
# collection = collections[collection_index]
results = list(filter(lambda collection: collection['name'] == 'MPIWG - ', collections))
collection = results[0]
print(collection['uuid'])
collection_uuid = collection['uuid']
# we grab all resources for this collection
resources_response = requests.get(f'https://rise.mpiwg-berlin.mpg.de/api/collections/{collection_uuid}/resources?per_page={per_page}')
corpus_path = './corpus'
if not os.path.exists(corpus_path):
os.makedirs(corpus_path)
for resource in resources_response.json():
uuid = resource['uuid']
resource_name = resource['name']
print(resource_name)
if not os.path.exists(corpus_path + "/" + resource_name):
os.makedirs(corpus_path + "/" + resource_name)
sections = requests.get("https://rise.mpiwg-berlin.mpg.de/api/resources/"+ resource['uuid'] +"/sections")
for section in sections.json():
print(section)
print(section['uuid'])
section_name = section['name']
section_path = corpus_path + "/" + resource_name + "/" + section_name
file = open(section_path +".txt", "w")
content_units = requests.get("https://rise.mpiwg-berlin.mpg.de/api/sections/"+ section['uuid'] +"/content_units?per_page=6000")
for content_unit in content_units.json():
print(content_unit)
file.write(content_unit['content'])
file.close() | 39.803571 | 139 | 0.682369 |
f2c28b1aa80b1c46d32c2a27e47aa2e4dc3f68c8 | 612 | py | Python | ipfinder.py | robertas64/realGrowIp | bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990 | [
"MIT"
] | 1 | 2022-03-09T23:21:18.000Z | 2022-03-09T23:21:18.000Z | ipfinder.py | njartemis/realGrowIp | bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990 | [
"MIT"
] | null | null | null | ipfinder.py | njartemis/realGrowIp | bc1f1f4cf30eaa4091a6f81907a39eb8d3b66990 | [
"MIT"
] | 1 | 2021-04-16T16:11:24.000Z | 2021-04-16T16:11:24.000Z | # ___ ___ ___ _ _
# |_ _| _ \___| __(_)_ _ __| |___ _ _
# | || _/___| _|| | ' \/ _` / -_) '_|
# |___|_| |_| |_|_||_\__,_\___|_|
# Made by Robertas64
#Importing the module
import os
from time import *
banner = """
___ ___ ___ _ _
|_ _| _ \___| __(_)_ _ __| |___ _ _
| || _/___| _|| | ' \/ _` / -_) '_|
|___|_| |_| |_|_||_\__,_\___|_|
Find GrowtopiaServer Real IP
Author : Robertas64
Make sure you're connected
To GrowtopiaServer hosts
"""
#Main
print(banner)
os.system("ping growtopia1.com") | 22.666667 | 40 | 0.47549 |
f2c4872a061796a24a75f519586680551cd85468 | 348 | py | Python | data.py | alantess/DDQN-BTC | 0fff185200dd1c16088dc322cbb7790b848c1e6d | [
"MIT"
] | 2 | 2021-01-12T08:59:54.000Z | 2022-02-07T23:41:49.000Z | data.py | alantess/DDQN-BTC | 0fff185200dd1c16088dc322cbb7790b848c1e6d | [
"MIT"
] | null | null | null | data.py | alantess/DDQN-BTC | 0fff185200dd1c16088dc322cbb7790b848c1e6d | [
"MIT"
] | null | null | null | import pandas as pd
import matplotlib.pyplot as plt
| 23.2 | 55 | 0.666667 |
f2c664d27fab22d77e93ebebd90d26fccfda0d77 | 4,715 | py | Python | main.py | lucaswerner90/upc_dl_project_2021 | c02061da0e25a0b24a9b742074b87ac30f36586d | [
"MIT"
] | 2 | 2021-07-15T12:30:43.000Z | 2021-11-04T07:50:16.000Z | main.py | lucaswerner90/upc_dl_project_2021 | c02061da0e25a0b24a9b742074b87ac30f36586d | [
"MIT"
] | 30 | 2021-05-03T07:37:37.000Z | 2021-07-01T18:53:23.000Z | main.py | lucaswerner90/upc_dl_project_2021 | c02061da0e25a0b24a9b742074b87ac30f36586d | [
"MIT"
] | 1 | 2021-06-21T11:12:32.000Z | 2021-06-21T11:12:32.000Z | import argparse
import os
import torch
import torch.nn as nn
import torch.optim as optim
import argparse
from torch.utils.data import DataLoader
from torchvision import transforms
from dataset.main import Flickr8kDataset
from dataset.caps_collate import CapsCollate
from dataset.download import DownloadDataset
from model.main import ImageCaptioningModel,ViTImageCaptioningModel
from train import train, split_subsets
from transformers import ViTFeatureExtractor
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
use_ViT_Enc = True
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Image captioning model setup')
parser.add_argument('-bsz','--batch-size',type=int, required=False, choices=[4,8,16,32,64], default=64, help='Number of images to process on each batch')
parser.add_argument('-vocab','--vocabulary-size',type=int, required=False, default=5000, help='Number of words that our model will use to generate the captions of the images')
parser.add_argument('-image-feature','--image-features-dimension',type=int, choices=[256,512,1024], required=False, default=512, help='Number of features that the model will take for each image')
parser.add_argument('-attn-dim','--attention-dimension',type=int, choices=[256,512,1024], required=False, default=256, help='Dimension of the attention tensor')
parser.add_argument('-embed-dim','--embedding-dimension',type=int, choices=[256,512,1024], required=False, default=256, help='Dimension of the word embedding tensor')
parser.add_argument('-epochs','--epochs',type=int, required=False, default=100, help='Number of epochs that our model will run')
parser.add_argument('-captions-length','--captions-max-length',type=int, required=False, default=28, help='Max size of the predicted captions')
parser.add_argument('-lr','--learning-rate',type=float, required=False, choices=[1e-1,1e-2,1e-3,1e-4],default=1e-3, help='Max size of the predicted captions')
parser.add_argument('-img-size','--image-size',type=int, required=False, choices=[224,256,320], default=224, help='Size of the input image that our model will process')
parser.add_argument('-log','--log-interval',type=int, required=False, default=5, help='During training, every X epochs, we log the results')
args = parser.parse_args()
variables = vars(args)
if not os.path.exists('data'):
print('Downloading Flickr8k dataset...')
filepath = os.path.join(os.getcwd(),'data')
DownloadDataset.download(filepath)
main(variables)
| 45.336538 | 196 | 0.759915 |
f2c78a6895c6f2bb08f5bc34684b1ca6a132fd79 | 2,050 | py | Python | tests/FasterSubsetSumTests/test_randomizedBase.py | joakiti/Benchmark-SubsetSums | a875b5adf7f800d26b73516452904031c73ec29d | [
"MIT"
] | null | null | null | tests/FasterSubsetSumTests/test_randomizedBase.py | joakiti/Benchmark-SubsetSums | a875b5adf7f800d26b73516452904031c73ec29d | [
"MIT"
] | null | null | null | tests/FasterSubsetSumTests/test_randomizedBase.py | joakiti/Benchmark-SubsetSums | a875b5adf7f800d26b73516452904031c73ec29d | [
"MIT"
] | null | null | null | import unittest
from unittest import TestCase
from Implementations.FastIntegersFromGit import FastIntegersFromGit
from Implementations.helpers.Helper import ListToPolynomial, toNumbers
from Implementations.FasterSubsetSum.RandomizedBase import NearLinearBase
from benchmarks.test_distributions import Distributions as dist
| 38.679245 | 112 | 0.661463 |
f2ca33e35faaa3a6ab066c758e3c492f242feea7 | 633 | py | Python | lesson_3_set.py | pis2pis2/pis2pis2 | a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f | [
"MIT"
] | null | null | null | lesson_3_set.py | pis2pis2/pis2pis2 | a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f | [
"MIT"
] | null | null | null | lesson_3_set.py | pis2pis2/pis2pis2 | a8ab83d89bbeaa2b4a6a2be684ae5b7513472a7f | [
"MIT"
] | 4 | 2019-11-12T06:59:35.000Z | 2021-01-29T21:34:15.000Z | # (set)------------------------
#------------------------------------------
#
temp_set = {1,2,3}
print(type(temp_set), temp_set)
temp_list = [1,2,1,2,2,3,4,12,32]
temp_set = set(temp_list)
print(type(temp_set), temp_set)
#
print(100 in temp_set)
for element in temp_set:
print(element)
#
#----------
#
#
my_set_1 = set([1, 2, 3, 4, 5])
my_set_2 = set([5, 6, 7, 8, 9])
my_set_3 = my_set_1.union(my_set_2)
print(my_set_3)
my_set_4 = my_set_1.difference(my_set_2)
print(my_set_4) | 20.419355 | 52 | 0.598736 |
f2ca9fdc60f3ee0343b7c18df16ab40ecebc987e | 4,744 | py | Python | web/fabric_utils/deploy.py | kbarnes3/guidcoin | c9011a00f18bbd181a538a553950dbc0e8c1a05e | [
"BSD-2-Clause"
] | null | null | null | web/fabric_utils/deploy.py | kbarnes3/guidcoin | c9011a00f18bbd181a538a553950dbc0e8c1a05e | [
"BSD-2-Clause"
] | null | null | null | web/fabric_utils/deploy.py | kbarnes3/guidcoin | c9011a00f18bbd181a538a553950dbc0e8c1a05e | [
"BSD-2-Clause"
] | null | null | null | from fabric.api import cd, run, settings, sudo
configurations = {
'daily': {
'branch': 'master',
'ssl': False,
},
'dev': {
'branch': 'master',
'ssl': False,
},
'prod': {
'branch': 'prod',
'ssl': False,
},
'staging': {
'branch': 'prod',
'ssl': False,
},
}
| 35.669173 | 92 | 0.620784 |
f2cace32420ebacd10ddd9012cee72a53278a13e | 1,863 | py | Python | sorts/4.Tree_sort.py | 18-2-SKKU-OSS/2018-2-OSS-E5-- | 8bb7e4c239f5bd95f4635b442bb8b2838e76fb36 | [
"MIT"
] | 4 | 2018-12-02T14:21:02.000Z | 2019-02-28T04:15:42.000Z | sorts/4.Tree_sort.py | 18-2-SKKU-OSS/2018-2-OSS-E5 | 8bb7e4c239f5bd95f4635b442bb8b2838e76fb36 | [
"MIT"
] | 25 | 2018-11-27T10:00:05.000Z | 2018-12-11T01:58:46.000Z | sorts/4.Tree_sort.py | 18-2-SKKU-OSS/2018-2-OSS-E5-- | 8bb7e4c239f5bd95f4635b442bb8b2838e76fb36 | [
"MIT"
] | null | null | null | """
Tree Sort .
Binary Search Tree .
Binary Search Tree
.
Root .
.
.
Binary Search Tree .
"""
from __future__ import print_function
"""
Binary Search Tree
inorder inorder .
"""
if __name__ == '__main__':
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
for i in range(3):
user_input = raw_input('Enter numbers separated by a comma:\n').strip()
unsorted = [int(item) for item in user_input.split(',')]
print(treesort(unsorted))
| 28.661538 | 79 | 0.570585 |
f2cb9232d1beaf4ae9243ec51c0966d350c75625 | 446 | py | Python | rules/helpers.py | prokoptsev/rules | 436348004aa34c2e50d71960dad2076719fc433b | [
"MIT"
] | null | null | null | rules/helpers.py | prokoptsev/rules | 436348004aa34c2e50d71960dad2076719fc433b | [
"MIT"
] | 1 | 2017-02-01T08:56:08.000Z | 2017-02-01T08:56:08.000Z | rules/helpers.py | prokoptsev/rules | 436348004aa34c2e50d71960dad2076719fc433b | [
"MIT"
] | 1 | 2019-11-08T10:44:43.000Z | 2019-11-08T10:44:43.000Z | # coding: utf-8
from __future__ import unicode_literals, absolute_import
_NOTSET = type(
b"NotSet",
(object,),
{"__repr__": lambda self: "<ValueNotSet>"}
)()
| 24.777778 | 56 | 0.632287 |
4b2bca30173574ead32b90a8d29f7a356f54d612 | 3,030 | py | Python | e2e/Vectors/Generation/Consensus/Beaten.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 66 | 2019-01-14T08:39:52.000Z | 2022-01-06T11:39:15.000Z | e2e/Vectors/Generation/Consensus/Beaten.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 228 | 2019-01-16T15:42:44.000Z | 2022-02-05T07:48:07.000Z | e2e/Vectors/Generation/Consensus/Beaten.py | kayabaNerve/Currency | 260ebc20f1704f42ad6183fee39ad58ec6d07961 | [
"CC0-1.0"
] | 19 | 2019-01-14T08:53:04.000Z | 2021-11-03T20:19:28.000Z | from typing import List
import json
import e2e.Libs.Ristretto.Ristretto as Ristretto
from e2e.Libs.BLS import PrivateKey
from e2e.Classes.Transactions.Transactions import Claim, Send, Transactions
from e2e.Classes.Consensus.Verification import SignedVerification
from e2e.Classes.Consensus.VerificationPacket import VerificationPacket
from e2e.Classes.Consensus.SpamFilter import SpamFilter
from e2e.Classes.Merit.Merit import Block, Merit
from e2e.Vectors.Generation.PrototypeChain import PrototypeBlock, PrototypeChain
edPrivKey: Ristretto.SigningKey = Ristretto.SigningKey(b'\0' * 32)
edPubKey: bytes = edPrivKey.get_verifying_key()
transactions: Transactions = Transactions()
sendFilter: SpamFilter = SpamFilter(3)
proto: PrototypeChain = PrototypeChain(40, keepUnlocked=True)
proto.add(1)
merit: Merit = Merit.fromJSON(proto.toJSON())
#Create a Claim.
claim: Claim = Claim([(merit.mints[-1], 0)], edPubKey)
claim.sign(PrivateKey(0))
transactions.add(claim)
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(claim.hash, list(range(2)))]
).finish(0, merit)
)
sends: List[Send] = [
#Transaction which will win.
Send([(claim.hash, 0)], [(bytes(32), claim.amount)]),
#Transaction which will be beaten.
Send([(claim.hash, 0)], [(edPubKey, claim.amount // 2), (edPubKey, claim.amount // 2)])
]
#Children. One which will have a Verification, one which won't.
sends += [
Send([(sends[1].hash, 0)], [(edPubKey, claim.amount // 2)]),
Send([(sends[1].hash, 1)], [(edPubKey, claim.amount // 2)])
]
#Send which spend the remaining descendant of the beaten Transaction.
sends.append(Send([(sends[2].hash, 0)], [(bytes(32), claim.amount // 2)]))
for s in range(len(sends)):
sends[s].sign(edPrivKey)
sends[s].beat(sendFilter)
if s < 3:
transactions.add(sends[s])
verif: SignedVerification = SignedVerification(sends[2].hash, 1)
verif.sign(1, PrivateKey(1))
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[
VerificationPacket(sends[0].hash, [0]),
VerificationPacket(sends[1].hash, [1])
]
).finish(0, merit)
)
merit.add(
PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(sends[2].hash, [0])]
).finish(0, merit)
)
for _ in range(4):
merit.add(
PrototypeBlock(merit.blockchain.blocks[-1].header.time + 1200).finish(0, merit)
)
blockWBeatenVerif: Block = PrototypeBlock(
merit.blockchain.blocks[-1].header.time + 1200,
packets=[VerificationPacket(sends[2].hash, [1])]
).finish(0, merit)
merit.add(
PrototypeBlock(merit.blockchain.blocks[-1].header.time + 1200).finish(0, merit)
)
with open("e2e/Vectors/Consensus/Beaten.json", "w") as vectors:
vectors.write(json.dumps({
"blockchain": merit.toJSON(),
"transactions": transactions.toJSON(),
"sends": [send.toJSON() for send in sends],
"verification": verif.toSignedJSON(),
"blockWithBeatenVerification": blockWBeatenVerif.toJSON()
}))
| 29.705882 | 89 | 0.718482 |
4b2c8fbe001a03db6be5e0e2f8295d8600500dd8 | 5,105 | py | Python | main/pythonDev/TestModels/sphericalJointTest.py | eapcivil/EXUDYN | 52bddc8c258cda07e51373f68e1198b66c701d03 | [
"BSD-3-Clause-Open-MPI"
] | 1 | 2020-10-06T08:06:25.000Z | 2020-10-06T08:06:25.000Z | main/pythonDev/TestModels/sphericalJointTest.py | eapcivil/EXUDYN | 52bddc8c258cda07e51373f68e1198b66c701d03 | [
"BSD-3-Clause-Open-MPI"
] | null | null | null | main/pythonDev/TestModels/sphericalJointTest.py | eapcivil/EXUDYN | 52bddc8c258cda07e51373f68e1198b66c701d03 | [
"BSD-3-Clause-Open-MPI"
] | null | null | null | #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# This is an EXUDYN example
#
# Details: Simulate Chain with 3D rigid bodies and SphericalJoint;
# Also test MarkerNodePosition
#
# Author: Johannes Gerstmayr
# Date: 2020-04-09
#
# Copyright:This file is part of Exudyn. Exudyn is free software. You can redistribute it and/or modify it under the terms of the Exudyn license. See 'LICENSE.txt' for more details.
#
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import sys
sys.path.append('../TestModels') #for modelUnitTest as this example may be used also as a unit test
import exudyn as exu
from exudyn.itemInterface import *
from exudyn.utilities import *
from exudyn.graphicsDataUtilities import *
from modelUnitTests import ExudynTestStructure, exudynTestGlobals
SC = exu.SystemContainer()
mbs = SC.AddSystem()
nBodies = 4
color = [0.1,0.1,0.8,1]
s = 0.1 #width of cube
sx = 3*s #lengt of cube/body
cPosZ = 0.1 #offset of constraint in z-direction
zz = sx * (nBodies+1)*2 #max size of background
background0 = GraphicsDataRectangle(-zz,-zz,zz,sx,color)
oGround=mbs.AddObject(ObjectGround(referencePosition= [0,0,0],
visualization=VObjectGround(graphicsData= [background0])))
mPosLast = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oGround,
localPosition=[-sx,0,cPosZ*0]))
#create a chain of bodies:
for i in range(nBodies):
f = 0 #factor for initial velocities
omega0 = [0,50.*f,20*f] #arbitrary initial angular velocity
ep0 = eulerParameters0 #no rotation
ep_t0 = AngularVelocity2EulerParameters_t(omega0, ep0)
p0 = [-sx+i*2*sx,0.,0] #reference position
v0 = [0.2*f,0.,0.] #initial translational velocity
nRB = mbs.AddNode(NodeRigidBodyEP(referenceCoordinates=p0+ep0,
initialVelocities=v0+list(ep_t0)))
#nRB = mbs.AddNode(NodeRigidBodyEP(referenceCoordinates=[0,0,0,1,0,0,0], initialVelocities=[0,0,0,0,0,0,0]))
oGraphics = GraphicsDataOrthoCubeLines(-sx,-s,-s, sx,s,s, [0.8,0.1,0.1,1])
oRB = mbs.AddObject(ObjectRigidBody(physicsMass=2,
physicsInertia=[6,1,6,0,0,0],
nodeNumber=nRB,
visualization=VObjectRigidBody(graphicsData=[oGraphics])))
mMassRB = mbs.AddMarker(MarkerBodyMass(bodyNumber = oRB))
mbs.AddLoad(Gravity(markerNumber = mMassRB, loadVector=[0.,-9.81,0.])) #gravity in negative z-direction
if i==0:
#mPos = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [-sx*0,0.,cPosZ*0]))
mPos = mbs.AddMarker(MarkerNodePosition(nodeNumber=nRB))
else:
mPos = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [-sx,0.,cPosZ]))
#alternative with spring-damper:
#mbs.AddObject(ObjectConnectorCartesianSpringDamper(markerNumbers = [mPosLast, mPos],
# stiffness=[k,k,k], damping=[d,d,d])) #gravity in negative z-direction
axes = [1,1,1]
if (i==0):
axes = [0,1,1]
mbs.AddObject(SphericalJoint(markerNumbers = [mPosLast, mPos], constrainedAxes=axes))
#marker for next chain body
mPosLast = mbs.AddMarker(MarkerBodyPosition(bodyNumber = oRB, localPosition = [sx,0.,cPosZ]))
mbs.Assemble()
#exu.Print(mbs)
simulationSettings = exu.SimulationSettings() #takes currently set values or default values
fact = 1000
simulationSettings.timeIntegration.numberOfSteps = 1*fact
simulationSettings.timeIntegration.endTime = 0.001*fact
simulationSettings.solutionSettings.solutionWritePeriod = simulationSettings.timeIntegration.endTime/fact*10
simulationSettings.timeIntegration.verboseMode = 1
simulationSettings.timeIntegration.newton.useModifiedNewton = True
simulationSettings.timeIntegration.generalizedAlpha.useIndex2Constraints = False
simulationSettings.timeIntegration.generalizedAlpha.useNewmark = False
simulationSettings.timeIntegration.generalizedAlpha.spectralRadius = 0.6 #0.6 works well
simulationSettings.solutionSettings.solutionInformation = "rigid body tests"
SC.visualizationSettings.nodes.defaultSize = 0.05
#simulationSettings.displayComputationTime = True
#simulationSettings.displayStatistics = True
if exudynTestGlobals.useGraphics:
exu.StartRenderer()
mbs.WaitForUserToContinue()
SC.TimeIntegrationSolve(mbs, 'GeneralizedAlpha', simulationSettings)
#+++++++++++++++++++++++++++++++++++++++++++++
sol = mbs.systemData.GetODE2Coordinates();
solref = mbs.systemData.GetODE2Coordinates(configuration=exu.ConfigurationType.Reference);
#exu.Print('sol=',sol)
u = 0
for i in range(14): #take coordinates of first two bodies
u += abs(sol[i]+solref[i])
exu.Print('solution of sphericalJointTest=',u)
exudynTestGlobals.testError = u - (4.409004179180698) #2020-04-04: 4.409004179180698
if exudynTestGlobals.useGraphics:
#SC.WaitForRenderEngineStopFlag()
exu.StopRenderer() #safely close rendering window!
| 40.84 | 181 | 0.681097 |
4b315d99b885f67bca9bd8f9e32645470a5d8448 | 1,915 | py | Python | inference/online_inference/src/app.py | made-ml-in-prod-2021/marina-zav | 7b4b6e5f333707001e36dfb014dcd36bf975d969 | [
"FTL"
] | null | null | null | inference/online_inference/src/app.py | made-ml-in-prod-2021/marina-zav | 7b4b6e5f333707001e36dfb014dcd36bf975d969 | [
"FTL"
] | null | null | null | inference/online_inference/src/app.py | made-ml-in-prod-2021/marina-zav | 7b4b6e5f333707001e36dfb014dcd36bf975d969 | [
"FTL"
] | null | null | null | import logging
import sys
import time
from typing import List, Optional
import uvicorn
from fastapi import FastAPI
from fastapi.exceptions import RequestValidationError
from fastapi.responses import PlainTextResponse
from sklearn.pipeline import Pipeline
from src.entities import (
read_app_params,
HeartDiseaseModelRequest,
HeartDiseaseModelResponse,
)
from src.models import make_predict, load_model
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
DEFAULT_CONFIG_PATH = "configs/app_config.yaml"
model: Optional[Pipeline] = None
app = FastAPI()
def setup_app():
app_params = read_app_params(DEFAULT_CONFIG_PATH)
logger.info(f"Running app on {app_params.host} with port {app_params.port}")
uvicorn.run(app, host=app_params.host, port=app_params.port)
if __name__ == "__main__":
setup_app()
| 26.232877 | 80 | 0.769191 |
4b34659c04f2dfee8c71b653e9b765ff930cf91e | 8,040 | py | Python | serverCollector.py | VertexC/pipot-server | 0e2c9b0e34a589d9813301765ef8d2433ef67869 | [
"ISC"
] | 4 | 2019-02-11T12:43:08.000Z | 2019-03-23T06:59:38.000Z | serverCollector.py | VertexC/pipot-server | 0e2c9b0e34a589d9813301765ef8d2433ef67869 | [
"ISC"
] | 25 | 2019-02-26T17:16:58.000Z | 2019-08-19T03:36:56.000Z | serverCollector.py | VertexC/pipot-server | 0e2c9b0e34a589d9813301765ef8d2433ef67869 | [
"ISC"
] | 5 | 2019-01-15T06:32:21.000Z | 2020-01-10T11:58:43.000Z | import hashlib
import hmac
import json
import datetime
from abc import ABCMeta, abstractmethod
from twisted.internet import protocol
from mod_config.models import Rule, Actions
from mod_honeypot.models import PiPotReport, Deployment
from pipot.encryption import Encryption
from pipot.notifications import NotificationLoader
from pipot.services import ServiceLoader
| 37.924528 | 78 | 0.486318 |
4b3702971613873f8e0d3ea487888d2084d6acd1 | 4,852 | py | Python | pyhttptest/decorators.py | NickMitin/pyhttptest | 5116caf3962dab63d62bffe94b0659f435b3e2d3 | [
"BSD-3-Clause"
] | 142 | 2019-10-22T11:19:44.000Z | 2021-11-09T11:05:27.000Z | pyhttptest/decorators.py | NickMitin/pyhttptest | 5116caf3962dab63d62bffe94b0659f435b3e2d3 | [
"BSD-3-Clause"
] | 5 | 2019-10-22T14:43:39.000Z | 2020-10-09T13:25:24.000Z | pyhttptest/decorators.py | NickMitin/pyhttptest | 5116caf3962dab63d62bffe94b0659f435b3e2d3 | [
"BSD-3-Clause"
] | 14 | 2019-10-23T18:27:58.000Z | 2020-09-22T01:07:39.000Z | from sys import modules
from functools import wraps
from jsonschema import validate
from pyhttptest.constants import (
HTTP_METHOD_NAMES,
JSON_FILE_EXTENSION,
)
from pyhttptest.exceptions import (
FileExtensionError,
HTTPMethodNotSupportedError
)
from pyhttptest.http_schemas import ( # noqa
get_schema,
post_schema,
put_schema,
delete_schema
)
def check_file_extension(func):
"""A decorator responsible for checking whether
the file extension is supported.
An inner :func:`_decorator` slices the last five
characters of the passed ``file_path`` parameter and
checking whether they are equal to JSON file extension(.json).
If there is equality, decorated function business logic is
performed otherwise, the exception for not supported file extension
is raised.
Usage:
.. code-block:: python
@check_file_extension
def load_content_from_json_file(file_path):
...
:raises FileExtensionError: If the file extension is not '.json'.
"""
return _decorator
def validate_extract_json_properties_func_args(func):
"""A validation decorator, ensuring that arguments
passed to the decorated function are with proper types.
An inner :func:`_decorator` does checking of arguments
types. If the types of the arguments are different than allowing
ones, the exception is raised, otherwise decorated function
is processed.
Usage:
.. code-block:: python
@validate_extract_json_properties_func_args
def extract_properties_values_from_json(data, keys):
...
:raises TypeError: If the data is not a `dict`.
:raises TypeError: If the keys is not a type of (`tuple`, `list`, `set`).
"""
return _decorator
def validate_data_against_json_schema(func):
"""A validation decorator, ensuring that data is
covering JSON Schema requirements.
An inner :func:`_decorator` does checking of data
type, HTTP Method support along with appropriate JSON Schema,
that can validate passed data. If one of the checks doesn't match,
the exception is raised, otherwise, data validation is run against
JSON Schema and decorated function is processed.
Usage:
.. code-block:: python
@validate_data_against_json_schema
def extract_json_data(data):
...
:raises TypeError: If the data is not a `dict`.
:raises HTTPMethodNotSupportedError: If an HTTP Method is not supported.
:raises TypeError: If lack of appropriate JSON Schema to validate data.
"""
return _decorator
| 31.303226 | 82 | 0.61789 |
4b37117f289d4054432d5850f0a931ebb4548e7d | 4,523 | py | Python | resources/tests/test_perf.py | HotStew/respa | 04f39efb15b4f4206a122e665f8377c7198e1f25 | [
"MIT"
] | 49 | 2015-10-21T06:25:31.000Z | 2022-03-20T07:24:20.000Z | resources/tests/test_perf.py | HotStew/respa | 04f39efb15b4f4206a122e665f8377c7198e1f25 | [
"MIT"
] | 728 | 2015-06-24T13:26:54.000Z | 2022-03-24T12:18:41.000Z | resources/tests/test_perf.py | digipointtku/respa | a529e0df4d3f072df7801adb5bf97a5f4abd1243 | [
"MIT"
] | 46 | 2015-06-26T10:52:57.000Z | 2021-12-17T09:38:25.000Z | from datetime import datetime
import arrow
import pytest
from django.conf import settings
from resources.models import Day, Period, Reservation, Resource, ResourceType, Unit
TEST_PERFORMANCE = bool(getattr(settings, "TEST_PERFORMANCE", False))
| 50.255556 | 118 | 0.676984 |
4b37de6730f4bf33d3ca155f712cb0a661d6d553 | 951 | py | Python | docker-app/qfieldcloud/core/migrations/0045_auto_20211012_2234.py | livelihoods-and-landscapes/qfieldcloud-tcs | 3075e19d89caa3090a0d2027a376336526572764 | [
"MIT"
] | 34 | 2021-06-08T12:06:24.000Z | 2022-03-07T11:45:10.000Z | docker-app/qfieldcloud/core/migrations/0045_auto_20211012_2234.py | livelihoods-and-landscapes/qfieldcloud-tcs | 3075e19d89caa3090a0d2027a376336526572764 | [
"MIT"
] | 139 | 2021-06-08T00:24:51.000Z | 2022-03-28T09:59:54.000Z | docker-app/qfieldcloud/core/migrations/0045_auto_20211012_2234.py | livelihoods-and-landscapes/qfieldcloud-tcs | 3075e19d89caa3090a0d2027a376336526572764 | [
"MIT"
] | 8 | 2021-06-11T04:18:36.000Z | 2022-02-15T20:52:58.000Z | # Generated by Django 3.2.8 on 2021-10-12 22:34
from django.db import migrations, models
| 28.818182 | 81 | 0.64143 |
4b385a13715b06d087f942d7458bcb33eb5bba5d | 74 | py | Python | src/wagtail_tag_manager/__init__.py | Tehnode/wagtail-tag-manager | 048a03fe61b57ddd1eea0377ab26cf96527f5457 | [
"BSD-3-Clause"
] | 59 | 2018-06-13T07:30:42.000Z | 2022-03-22T02:14:34.000Z | src/wagtail_tag_manager/__init__.py | Tehnode/wagtail-tag-manager | 048a03fe61b57ddd1eea0377ab26cf96527f5457 | [
"BSD-3-Clause"
] | 74 | 2018-08-09T20:52:56.000Z | 2022-03-02T08:39:30.000Z | src/wagtail_tag_manager/__init__.py | Tehnode/wagtail-tag-manager | 048a03fe61b57ddd1eea0377ab26cf96527f5457 | [
"BSD-3-Clause"
] | 23 | 2018-10-10T05:29:38.000Z | 2022-01-19T15:09:51.000Z | default_app_config = "wagtail_tag_manager.config.WagtailTagManagerConfig"
| 37 | 73 | 0.891892 |
4b3983c191ae8db18994072c0ce7b31ca01543db | 12,081 | py | Python | python/test/lib/zk/cache_test.py | cschutijser/scion | 054cef53b31a577ed224a090d6a4fd3883fd520b | [
"Apache-2.0"
] | 1 | 2018-03-18T14:46:34.000Z | 2018-03-18T14:46:34.000Z | python/test/lib/zk/cache_test.py | cschutijser/scion | 054cef53b31a577ed224a090d6a4fd3883fd520b | [
"Apache-2.0"
] | 1 | 2020-03-20T01:28:56.000Z | 2020-03-20T01:28:56.000Z | python/test/lib/zk/cache_test.py | cschutijser/scion | 054cef53b31a577ed224a090d6a4fd3883fd520b | [
"Apache-2.0"
] | 2 | 2020-03-14T16:03:27.000Z | 2020-03-18T08:13:19.000Z | # Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`cache_test` --- lib.zk.cache unit tests
======================================================
"""
# Stdlib
from unittest.mock import call, patch
# External packages
import nose
import nose.tools as ntools
from kazoo.exceptions import (
ConnectionLoss,
NoNodeError,
NodeExistsError,
SessionExpiredError,
)
# SCION
from lib.zk.errors import ZkNoConnection, ZkNoNodeError
from lib.zk.cache import ZkSharedCache
from test.testcommon import assert_these_calls, create_mock
def test_create_conn_loss(self):
for excp in ConnectionLoss, SessionExpiredError:
yield self._check_create_conn_loss, excp
class TestZkSharedCacheProcess(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache.process
"""
class TestZkSharedCacheListEntries(object):
"""
Unit tests for lib.zk.cache.ZkSharedCache._list_entries
"""
def test_children_exceptions(self):
for excp, expected in (
(ConnectionLoss, ZkNoConnection),
(SessionExpiredError, ZkNoConnection),
):
yield self._check_children_exception, excp, expected
class TestZkSharedCacheHandleEntries(object):
"""
Unit test for lib.zk.cache.ZkSharedCache._handle_entries
"""
if __name__ == "__main__":
nose.run(defaultTest=__name__)
| 36.279279 | 77 | 0.631653 |
4b3b08e4408a36e23ecf7b49e3efe15dedf8336d | 2,347 | py | Python | scripts/macro-f1-tag.py | shuoyangd/stenella | a677c67c602f2229e4452ed7f38b778897df51c0 | [
"MIT"
] | 1 | 2021-11-09T04:57:24.000Z | 2021-11-09T04:57:24.000Z | scripts/macro-f1-tag.py | shuoyangd/stenella | a677c67c602f2229e4452ed7f38b778897df51c0 | [
"MIT"
] | null | null | null | scripts/macro-f1-tag.py | shuoyangd/stenella | a677c67c602f2229e4452ed7f38b778897df51c0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2021 Shuoyang Ding <shuoyangd@gmail.com>
# Created on 2021-02-11
#
# Distributed under terms of the MIT license.
import argparse
import logging
import math
import sys
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
opt_parser = argparse.ArgumentParser(description="")
opt_parser.add_argument("--tag-file", "-tf", required=True, help="file that holds system predictions, one label per line")
opt_parser.add_argument("--ref-file", "-rf", required=True, help="file that holds reference ok/bad labels, one label per line")
if __name__ == "__main__":
ret = opt_parser.parse_known_args()
options = ret[0]
if ret[1]:
logging.warning(
"unknown arguments: {0}".format(
opt_parser.parse_known_args()[1]))
main(options)
| 27.611765 | 127 | 0.647635 |
4b3bc0d2b44d013537c672eed0453f853feeca74 | 8,865 | py | Python | Stack/Solutions_Two.py | daniel-zeiler/potential-happiness | 1c9d503a52c35dab8b031f72e63725578735ac73 | [
"MIT"
] | null | null | null | Stack/Solutions_Two.py | daniel-zeiler/potential-happiness | 1c9d503a52c35dab8b031f72e63725578735ac73 | [
"MIT"
] | null | null | null | Stack/Solutions_Two.py | daniel-zeiler/potential-happiness | 1c9d503a52c35dab8b031f72e63725578735ac73 | [
"MIT"
] | null | null | null | import collections
from typing import List
input = [1, 4, 2, 5, 3]
print(validSubarrays(input))
| 24.901685 | 77 | 0.502538 |
4b3c016c7ef444898f5da6f026c91b333cec123a | 4,684 | py | Python | scripts/pklhisto2root.py | umd-lhcb/lhcb-ntuples-gen | d306895a0dc6bad2def19ca3d7d1304a5a9be239 | [
"BSD-2-Clause"
] | null | null | null | scripts/pklhisto2root.py | umd-lhcb/lhcb-ntuples-gen | d306895a0dc6bad2def19ca3d7d1304a5a9be239 | [
"BSD-2-Clause"
] | 105 | 2018-12-20T19:09:19.000Z | 2022-03-19T09:53:06.000Z | scripts/pklhisto2root.py | umd-lhcb/lhcb-ntuples-gen | d306895a0dc6bad2def19ca3d7d1304a5a9be239 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
#
# Stolen almost verbatim from:
# https://gitlab.cern.ch/lhcb-rta/pidcalib2/-/blob/master/src/pidcalib2/pklhisto2root.py
###############################################################################
# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration #
# #
# This software is distributed under the terms of the GNU General Public #
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
"""Convert pickled PIDCalib2 histograms to TH*D & save them in a ROOT file.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
"""
import itertools
import math
import pathlib
import pickle
import sys
import boost_histogram as bh
import ROOT
def convert_to_root_histo(name: str, bh_histo: bh.Histogram):
"""Convert boost_histogram histogram to a ROOT histogram.
Only 1D, 2D, and 3D histograms are supported by ROOT. Attempting to convert
higher-dimensional histograms will result in an exception.
Furthermore, the boost histogram must have a storage type that stores
variance, e.g., Weight.
Args:
name: Name of the new ROOT histogram.
bh_histo: The histogram to convert.
Returns:
The converted ROOT histogram. Type depends on dimensionality.
"""
histo = None
if len(bh_histo.axes) == 1:
histo = ROOT.TH1D(name, name, 3, 0, 1)
histo.SetBins(bh_histo.axes[0].size, bh_histo.axes[0].edges)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
elif len(bh_histo.axes) == 2:
histo = ROOT.TH2D(name, name, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
elif len(bh_histo.axes) == 3:
histo = ROOT.TH3D(name, name, 3, 0, 1, 3, 0, 1, 3, 0, 1)
histo.SetBins(
bh_histo.axes[0].size,
bh_histo.axes[0].edges,
bh_histo.axes[1].size,
bh_histo.axes[1].edges,
bh_histo.axes[2].size,
bh_histo.axes[2].edges,
)
histo.GetXaxis().SetTitle(bh_histo.axes[0].metadata["name"])
histo.GetYaxis().SetTitle(bh_histo.axes[1].metadata["name"])
histo.GetZaxis().SetTitle(bh_histo.axes[2].metadata["name"])
else:
raise Exception(f"{len(bh_histo.axes)}D histograms not supported by ROOT")
indices_ranges = [list(range(n)) for n in bh_histo.axes.size]
for indices_tuple in itertools.product(*indices_ranges):
root_indices = [index + 1 for index in indices_tuple]
histo.SetBinContent(
histo.GetBin(*root_indices), bh_histo[indices_tuple].value # type: ignore
)
histo.SetBinError(
histo.GetBin(*root_indices), math.sqrt(bh_histo[indices_tuple].variance) # type: ignore # noqa
)
return histo
if __name__ == "__main__":
main()
| 35.755725 | 107 | 0.604825 |
4b3d1227daae503aacdc59d3f5aff14a4ad6eda7 | 112 | py | Python | week3DataStructuren/Dictionaries/dictionary.py | hanbioinformatica/owe2a | f572866ef3bc75689d2d571cb393c6d60480655b | [
"Apache-2.0"
] | null | null | null | week3DataStructuren/Dictionaries/dictionary.py | hanbioinformatica/owe2a | f572866ef3bc75689d2d571cb393c6d60480655b | [
"Apache-2.0"
] | null | null | null | week3DataStructuren/Dictionaries/dictionary.py | hanbioinformatica/owe2a | f572866ef3bc75689d2d571cb393c6d60480655b | [
"Apache-2.0"
] | 1 | 2018-12-04T15:23:47.000Z | 2018-12-04T15:23:47.000Z | d1 = {"koe":4,"slang":0,"konijn":4,"zebra":4}
d1["koe"]
d2 = {"vis":0,"beer":4,"kip":2}
d1.update(d2)
print (d1) | 22.4 | 45 | 0.553571 |
4b3d9db6b58f7211471a5f7c96ec4eb5f14b1e04 | 958 | py | Python | backend/app/exceptions/exceptions.py | Michal-Miko/competitive-teams | 6bb55542e06121f413248ddf0b75285296b610bb | [
"MIT"
] | null | null | null | backend/app/exceptions/exceptions.py | Michal-Miko/competitive-teams | 6bb55542e06121f413248ddf0b75285296b610bb | [
"MIT"
] | null | null | null | backend/app/exceptions/exceptions.py | Michal-Miko/competitive-teams | 6bb55542e06121f413248ddf0b75285296b610bb | [
"MIT"
] | null | null | null | from app.database import crud
from fastapi import HTTPException, status
| 41.652174 | 117 | 0.775574 |
4b42581465b8edd2e244428913cc73b52bb89dd0 | 1,964 | py | Python | ASC/Teme/tema1/consumer.py | mihai-constantin/ACS | 098c99d82dad8fb5d0e909da930c72f1185a99e2 | [
"Apache-2.0"
] | null | null | null | ASC/Teme/tema1/consumer.py | mihai-constantin/ACS | 098c99d82dad8fb5d0e909da930c72f1185a99e2 | [
"Apache-2.0"
] | null | null | null | ASC/Teme/tema1/consumer.py | mihai-constantin/ACS | 098c99d82dad8fb5d0e909da930c72f1185a99e2 | [
"Apache-2.0"
] | null | null | null | """
This module represents the Consumer.
Computer Systems Architecture Course
Assignment 1
March 2020
"""
from threading import Thread
from time import sleep
| 30.215385 | 85 | 0.563646 |
4b42f8a4c4ed9dadeb6bc01da50d750be154d614 | 978 | py | Python | rnn_based/model2.py | gunkaynar/heart_anomaly | 94ea2700e2c4d79028e0448022f6857df3c35e04 | [
"MIT"
] | null | null | null | rnn_based/model2.py | gunkaynar/heart_anomaly | 94ea2700e2c4d79028e0448022f6857df3c35e04 | [
"MIT"
] | null | null | null | rnn_based/model2.py | gunkaynar/heart_anomaly | 94ea2700e2c4d79028e0448022f6857df3c35e04 | [
"MIT"
] | null | null | null | import torch.nn as nn
import torch
import numpy as np
from torch.autograd import Variable
| 31.548387 | 98 | 0.649284 |
4b442e7411a54e834374d8e754640fa1c026849b | 791 | py | Python | src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py | joewright/x12 | 3734589b5ffa554388174234aa7dc37c2543f46a | [
"Apache-2.0"
] | 4 | 2021-12-11T15:38:03.000Z | 2021-12-22T13:18:31.000Z | src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py | joewright/x12 | 3734589b5ffa554388174234aa7dc37c2543f46a | [
"Apache-2.0"
] | 55 | 2021-06-12T01:11:15.000Z | 2022-02-03T19:28:32.000Z | src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py | joewright/x12 | 3734589b5ffa554388174234aa7dc37c2543f46a | [
"Apache-2.0"
] | 3 | 2021-06-11T19:33:19.000Z | 2021-11-19T23:33:58.000Z | """
transaction_set.py
Defines the Enrollment 834 005010X220A1 transaction set model.
"""
from typing import List, Optional
from linuxforhealth.x12.models import X12SegmentGroup
from .loops import Footer, Header, Loop1000A, Loop1000B, Loop1000C, Loop2000
from pydantic import root_validator, Field
from linuxforhealth.x12.validators import validate_segment_count
| 27.275862 | 88 | 0.78129 |
4b44ab85799151e1020d7bb62b4190682ce5fa39 | 1,974 | py | Python | src/entities/report/actions/consult.py | LuisArmando-TestCoder/ShareGraph | fa89d37c8fe522c526b903fe25bd1e22fd769425 | [
"MIT"
] | null | null | null | src/entities/report/actions/consult.py | LuisArmando-TestCoder/ShareGraph | fa89d37c8fe522c526b903fe25bd1e22fd769425 | [
"MIT"
] | null | null | null | src/entities/report/actions/consult.py | LuisArmando-TestCoder/ShareGraph | fa89d37c8fe522c526b903fe25bd1e22fd769425 | [
"MIT"
] | null | null | null | from utilities.getStore import getStore
filePath = "./entities/bill/store.json"
productFilePath = "./entities/product/store.json"
| 28.608696 | 103 | 0.644377 |
4b44b5778d0cc6b0adc1458cef3d5591585dd53d | 1,826 | py | Python | discordbot.py | kinotan/discordpy-startup | 1505c4f78deff7f793de75985e669ee84a78a3f2 | [
"MIT"
] | null | null | null | discordbot.py | kinotan/discordpy-startup | 1505c4f78deff7f793de75985e669ee84a78a3f2 | [
"MIT"
] | null | null | null | discordbot.py | kinotan/discordpy-startup | 1505c4f78deff7f793de75985e669ee84a78a3f2 | [
"MIT"
] | null | null | null | #discord.py
from asyncio import sleep
import discord
client = discord.Client()
#BOT
# BOT
client.run("***")
| 37.265306 | 107 | 0.545455 |
4b453f03ad86e35ca9832f88f32cb3f426b3e7ef | 1,191 | py | Python | api/migrations/0002_element_pokemon.py | danielchikara/pokemon_in_hom | 5da9baa3f87e012ae0d4278668409e1668bf87a6 | [
"MIT"
] | null | null | null | api/migrations/0002_element_pokemon.py | danielchikara/pokemon_in_hom | 5da9baa3f87e012ae0d4278668409e1668bf87a6 | [
"MIT"
] | null | null | null | api/migrations/0002_element_pokemon.py | danielchikara/pokemon_in_hom | 5da9baa3f87e012ae0d4278668409e1668bf87a6 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.11 on 2022-01-08 03:50
from django.db import migrations, models
import django.db.models.deletion
| 35.029412 | 146 | 0.572628 |
4b45b1a0f99a331cd3db5bcbd3c80d4d359c59e4 | 5,468 | py | Python | src/CLI/actioner/sonic-cli-ip-prefix.py | project-arlo/sonic-mgmt-framework | 562cd84ff3fec9ca705c7df621742f2daa61ce71 | [
"Apache-2.0"
] | 7 | 2019-10-17T06:12:02.000Z | 2021-09-08T11:16:19.000Z | src/CLI/actioner/sonic-cli-ip-prefix.py | noolex/sonic-mgmt-framework | 5493889adc47fc584b04dca1a0cc0a2007211df4 | [
"Apache-2.0"
] | 207 | 2019-06-24T04:48:11.000Z | 2020-05-06T05:51:37.000Z | src/CLI/actioner/sonic-cli-ip-prefix.py | noolex/sonic-mgmt-framework | 5493889adc47fc584b04dca1a0cc0a2007211df4 | [
"Apache-2.0"
] | 20 | 2019-06-27T19:24:45.000Z | 2021-07-15T21:12:30.000Z | #!/usr/bin/python
###########################################################################
#
# Copyright 2019 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
import sys
import time
import json
import ast
import cli_client as cc
from rpipe_utils import pipestr
from scripts.render_cli import show_cli_output
import urllib3
urllib3.disable_warnings()
if __name__ == '__main__':
pipestr().write(sys.argv)
run(sys.argv[1], sys.argv[2:])
| 34.389937 | 285 | 0.599854 |
4b465725a6717037599028f9aa649996198118b6 | 268 | py | Python | exec-shell.py | zqsheng/snippet | cb14300fc62c616d48e6552ad93c6d33b5e8c9a1 | [
"Apache-2.0"
] | 1 | 2018-09-10T11:31:33.000Z | 2018-09-10T11:31:33.000Z | exec-shell.py | zqsheng/snippet | cb14300fc62c616d48e6552ad93c6d33b5e8c9a1 | [
"Apache-2.0"
] | null | null | null | exec-shell.py | zqsheng/snippet | cb14300fc62c616d48e6552ad93c6d33b5e8c9a1 | [
"Apache-2.0"
] | null | null | null | import os
import time
exec_count = 100
cmds = []
cmds.append("tar -cPzf /opt/web.tar.gz /opt/web/ /opt/soft")
cmds.append("rm -f /opt/web.tar.gz")
for i in range(exec_count):
for cmd in cmds:
if os.system(cmd) != 0:
break
time.sleep(1) | 24.363636 | 60 | 0.604478 |
4b489a2a948e0d5e9116987af060d05a349b77d2 | 123 | py | Python | handlers/__init__.py | IronWolf-K/nonebot_plugin_fr24 | f3752882598de54f41cd9b27456dd3b6f88971e2 | [
"MIT"
] | null | null | null | handlers/__init__.py | IronWolf-K/nonebot_plugin_fr24 | f3752882598de54f41cd9b27456dd3b6f88971e2 | [
"MIT"
] | null | null | null | handlers/__init__.py | IronWolf-K/nonebot_plugin_fr24 | f3752882598de54f41cd9b27456dd3b6f88971e2 | [
"MIT"
] | null | null | null | from .help import help_handler
from .pre import pre_handler
from .now import now_handler
from .filter import filter_handler | 30.75 | 34 | 0.845528 |
4b48a1ce648ccd7eddf1077ee9304a100d815be4 | 581 | py | Python | day6_10/day6b.py | invincible-akshay/advent_of_code2020 | 81f207c6f7218ff235c31d67e1b4659cc482297c | [
"MIT"
] | null | null | null | day6_10/day6b.py | invincible-akshay/advent_of_code2020 | 81f207c6f7218ff235c31d67e1b4659cc482297c | [
"MIT"
] | null | null | null | day6_10/day6b.py | invincible-akshay/advent_of_code2020 | 81f207c6f7218ff235c31d67e1b4659cc482297c | [
"MIT"
] | null | null | null | import utils.fileutils as futils
inp = futils.read_list("../data/day6.txt")
nums_dict = dict()
group_size, res_count = 0, 0
for line in inp:
if line == "":
# res_count += len(nums_set)
for k, v in nums_dict.items():
if v == group_size:
res_count += 1
nums_dict = dict()
group_size = 0
continue
group_size += 1
for ch in line:
nums_dict[ch] = 1 + nums_dict.get(ch, 0)
for k, v in nums_dict.items():
if v == group_size:
res_count += 1
print("Sum of counts: {0}".format(res_count))
| 25.26087 | 48 | 0.567986 |
4b48d527c36dcd783a13d6a5609545147bc8c89c | 45,165 | py | Python | platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | null | null | null | platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T02:36:22.000Z | 2020-08-25T02:36:22.000Z | platform/hwconf_data/zgm13/PythonSnippet/ExporterModel.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T01:56:04.000Z | 2020-08-25T01:56:04.000Z | from . import types
from . import dep
from . import RuntimeModel
from . import Metadata
def get_property(mod, property_name):
"""
Get a property model object by searching for property name
:param mod: module on which to look for the property
:param property_name: name of the property
:return: ExporterModel.Property (or superclass) if found, None else.
"""
if mod is None:
return None
prop = mod.get_property(property_name)
return prop
def override_module(module_list, old, new):
"""
Override a module in the module_list with another instance
:param old:
:param new:
:return:
"""
if old.name != new.name:
print("ERROR: Not replacing module with same module")
return
for k,v in enumerate(module_list):
if v == old:
module_list[k] = new
| 44.497537 | 179 | 0.613949 |
4b4b052054557003ef2b409b1b9f8cb5ed96012e | 48,837 | py | Python | tests/schematics_proto3_tests_pb2.py | mlga/schematics-proto3 | 588fe5bc212e203688166638a1c52dfeda931403 | [
"MIT"
] | null | null | null | tests/schematics_proto3_tests_pb2.py | mlga/schematics-proto3 | 588fe5bc212e203688166638a1c52dfeda931403 | [
"MIT"
] | 11 | 2020-04-09T13:33:54.000Z | 2020-08-19T17:38:26.000Z | tests/schematics_proto3_tests_pb2.py | mlga/schematics-proto3 | 588fe5bc212e203688166638a1c52dfeda931403 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tests/schematics_proto3_tests.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tests/schematics_proto3_tests.proto',
package='schematics_proto3.tests',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n#tests/schematics_proto3_tests.proto\x12\x17schematics_proto3.tests\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"e\n\x06Nested\x12\x34\n\x05inner\x18\x01 \x01(\x0b\x32%.schematics_proto3.tests.Nested.Inner\x12\r\n\x05other\x18\x02 \x01(\t\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\t\">\n\rWrappedDouble\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\"<\n\x0cWrappedFloat\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\"<\n\x0cWrappedInt64\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\">\n\rWrappedUInt64\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\"<\n\x0cWrappedInt32\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\">\n\rWrappedUInt32\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\":\n\x0bWrappedBool\x12+\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\">\n\rWrappedString\x12-\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"<\n\x0cWrappedBytes\x12,\n\x07wrapped\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.BytesValue\"6\n\tTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\">\n\x11RepeatedTimestamp\x12)\n\x05value\x18\x01 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"w\n\x0eOneOfTimestamp\x12.\n\x06value1\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x12,\n\x06value2\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x07\n\x05inner\"\x17\n\x06\x44ouble\x12\r\n\x05value\x18\x01 \x01(\x01\"\x16\n\x05\x46loat\x12\r\n\x05value\x18\x01 \x01(\x02\"\x16\n\x05Int64\x12\r\n\x05value\x18\x01 \x01(\x03\"\x17\n\x06UInt64\x12\r\n\x05value\x18\x01 \x01(\x04\"\x16\n\x05Int32\x12\r\n\x05value\x18\x01 \x01(\x05\"\x17\n\x06UInt32\x12\r\n\x05value\x18\x01 \x01(\r\"\x15\n\x04\x42ool\x12\r\n\x05value\x18\x01 \x01(\x08\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x16\n\x05\x42ytes\x12\r\n\x05value\x18\x01 \x01(\x0c\"\"\n\x11RepeatedPrimitive\x12\r\n\x05value\x18\x01 \x03(\t\"f\n\x0eRepeatedNested\x12<\n\x05inner\x18\x01 \x03(\x0b\x32-.schematics_proto3.tests.RepeatedNested.Inner\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\t\"=\n\x0fRepeatedWrapped\x12*\n\x05value\x18\x01 \x03(\x0b\x32\x1b.google.protobuf.Int32Value\"=\n\x0eOneOfPrimitive\x12\x10\n\x06value1\x18\x01 \x01(\x04H\x00\x12\x10\n\x06value2\x18\x02 \x01(\tH\x00\x42\x07\n\x05inner\"\x9c\x01\n\x0bOneOfNested\x12<\n\x06value1\x18\x01 \x01(\x0b\x32*.schematics_proto3.tests.OneOfNested.InnerH\x00\x12.\n\x06value2\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x1a\x16\n\x05Inner\x12\r\n\x05value\x18\x01 \x01(\tB\x07\n\x05inner\":\n\nSimpleEnum\x12,\n\x05value\x18\x01 \x01(\x0e\x32\x1d.schematics_proto3.tests.Enum\"<\n\x0cRepeatedEnum\x12,\n\x05value\x18\x01 \x03(\x0e\x32\x1d.schematics_proto3.tests.Enum\"u\n\tOneOfEnum\x12.\n\x06value1\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x12/\n\x06value2\x18\x02 \x01(\x0e\x32\x1d.schematics_proto3.tests.EnumH\x00\x42\x07\n\x05inner**\n\x04\x45num\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
_ENUM = _descriptor.EnumDescriptor(
name='Enum',
full_name='schematics_proto3.tests.Enum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIRST', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SECOND', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1922,
serialized_end=1964,
)
_sym_db.RegisterEnumDescriptor(_ENUM)
Enum = enum_type_wrapper.EnumTypeWrapper(_ENUM)
UNKNOWN = 0
FIRST = 1
SECOND = 2
_NESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.Nested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Nested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_NESTED = _descriptor.Descriptor(
name='Nested',
full_name='schematics_proto3.tests.Nested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='schematics_proto3.tests.Nested.inner', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='other', full_name='schematics_proto3.tests.Nested.other', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_NESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=129,
serialized_end=230,
)
_WRAPPEDDOUBLE = _descriptor.Descriptor(
name='WrappedDouble',
full_name='schematics_proto3.tests.WrappedDouble',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedDouble.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=232,
serialized_end=294,
)
_WRAPPEDFLOAT = _descriptor.Descriptor(
name='WrappedFloat',
full_name='schematics_proto3.tests.WrappedFloat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedFloat.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=296,
serialized_end=356,
)
_WRAPPEDINT64 = _descriptor.Descriptor(
name='WrappedInt64',
full_name='schematics_proto3.tests.WrappedInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedInt64.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=358,
serialized_end=418,
)
_WRAPPEDUINT64 = _descriptor.Descriptor(
name='WrappedUInt64',
full_name='schematics_proto3.tests.WrappedUInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedUInt64.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=482,
)
_WRAPPEDINT32 = _descriptor.Descriptor(
name='WrappedInt32',
full_name='schematics_proto3.tests.WrappedInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedInt32.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=484,
serialized_end=544,
)
_WRAPPEDUINT32 = _descriptor.Descriptor(
name='WrappedUInt32',
full_name='schematics_proto3.tests.WrappedUInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedUInt32.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=546,
serialized_end=608,
)
_WRAPPEDBOOL = _descriptor.Descriptor(
name='WrappedBool',
full_name='schematics_proto3.tests.WrappedBool',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedBool.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=610,
serialized_end=668,
)
_WRAPPEDSTRING = _descriptor.Descriptor(
name='WrappedString',
full_name='schematics_proto3.tests.WrappedString',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedString.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=670,
serialized_end=732,
)
_WRAPPEDBYTES = _descriptor.Descriptor(
name='WrappedBytes',
full_name='schematics_proto3.tests.WrappedBytes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wrapped', full_name='schematics_proto3.tests.WrappedBytes.wrapped', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=734,
serialized_end=794,
)
_TIMESTAMP = _descriptor.Descriptor(
name='Timestamp',
full_name='schematics_proto3.tests.Timestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Timestamp.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=796,
serialized_end=850,
)
_REPEATEDTIMESTAMP = _descriptor.Descriptor(
name='RepeatedTimestamp',
full_name='schematics_proto3.tests.RepeatedTimestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedTimestamp.value', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=852,
serialized_end=914,
)
_ONEOFTIMESTAMP = _descriptor.Descriptor(
name='OneOfTimestamp',
full_name='schematics_proto3.tests.OneOfTimestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfTimestamp.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfTimestamp.value2', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfTimestamp.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=916,
serialized_end=1035,
)
_DOUBLE = _descriptor.Descriptor(
name='Double',
full_name='schematics_proto3.tests.Double',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Double.value', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1037,
serialized_end=1060,
)
_FLOAT = _descriptor.Descriptor(
name='Float',
full_name='schematics_proto3.tests.Float',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Float.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1062,
serialized_end=1084,
)
_INT64 = _descriptor.Descriptor(
name='Int64',
full_name='schematics_proto3.tests.Int64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Int64.value', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1086,
serialized_end=1108,
)
_UINT64 = _descriptor.Descriptor(
name='UInt64',
full_name='schematics_proto3.tests.UInt64',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.UInt64.value', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1110,
serialized_end=1133,
)
_INT32 = _descriptor.Descriptor(
name='Int32',
full_name='schematics_proto3.tests.Int32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Int32.value', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1135,
serialized_end=1157,
)
_UINT32 = _descriptor.Descriptor(
name='UInt32',
full_name='schematics_proto3.tests.UInt32',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.UInt32.value', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1159,
serialized_end=1182,
)
_BOOL = _descriptor.Descriptor(
name='Bool',
full_name='schematics_proto3.tests.Bool',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Bool.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1184,
serialized_end=1205,
)
_STRING = _descriptor.Descriptor(
name='String',
full_name='schematics_proto3.tests.String',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.String.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1207,
serialized_end=1230,
)
_BYTES = _descriptor.Descriptor(
name='Bytes',
full_name='schematics_proto3.tests.Bytes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.Bytes.value', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1232,
serialized_end=1254,
)
_REPEATEDPRIMITIVE = _descriptor.Descriptor(
name='RepeatedPrimitive',
full_name='schematics_proto3.tests.RepeatedPrimitive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedPrimitive.value', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1256,
serialized_end=1290,
)
_REPEATEDNESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.RepeatedNested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedNested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_REPEATEDNESTED = _descriptor.Descriptor(
name='RepeatedNested',
full_name='schematics_proto3.tests.RepeatedNested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='schematics_proto3.tests.RepeatedNested.inner', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_REPEATEDNESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1292,
serialized_end=1394,
)
_REPEATEDWRAPPED = _descriptor.Descriptor(
name='RepeatedWrapped',
full_name='schematics_proto3.tests.RepeatedWrapped',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedWrapped.value', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1396,
serialized_end=1457,
)
_ONEOFPRIMITIVE = _descriptor.Descriptor(
name='OneOfPrimitive',
full_name='schematics_proto3.tests.OneOfPrimitive',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfPrimitive.value1', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfPrimitive.value2', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfPrimitive.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1459,
serialized_end=1520,
)
_ONEOFNESTED_INNER = _descriptor.Descriptor(
name='Inner',
full_name='schematics_proto3.tests.OneOfNested.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.OneOfNested.Inner.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=208,
serialized_end=230,
)
_ONEOFNESTED = _descriptor.Descriptor(
name='OneOfNested',
full_name='schematics_proto3.tests.OneOfNested',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfNested.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfNested.value2', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ONEOFNESTED_INNER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfNested.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1523,
serialized_end=1679,
)
_SIMPLEENUM = _descriptor.Descriptor(
name='SimpleEnum',
full_name='schematics_proto3.tests.SimpleEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.SimpleEnum.value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1681,
serialized_end=1739,
)
_REPEATEDENUM = _descriptor.Descriptor(
name='RepeatedEnum',
full_name='schematics_proto3.tests.RepeatedEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='schematics_proto3.tests.RepeatedEnum.value', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1741,
serialized_end=1801,
)
_ONEOFENUM = _descriptor.Descriptor(
name='OneOfEnum',
full_name='schematics_proto3.tests.OneOfEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value1', full_name='schematics_proto3.tests.OneOfEnum.value1', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value2', full_name='schematics_proto3.tests.OneOfEnum.value2', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='inner', full_name='schematics_proto3.tests.OneOfEnum.inner',
index=0, containing_type=None, fields=[]),
],
serialized_start=1803,
serialized_end=1920,
)
_NESTED_INNER.containing_type = _NESTED
_NESTED.fields_by_name['inner'].message_type = _NESTED_INNER
_WRAPPEDDOUBLE.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_WRAPPEDFLOAT.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE
_WRAPPEDINT64.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_WRAPPEDUINT64.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE
_WRAPPEDINT32.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_WRAPPEDUINT32.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE
_WRAPPEDBOOL.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_WRAPPEDSTRING.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_WRAPPEDBYTES.fields_by_name['wrapped'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE
_TIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_REPEATEDTIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ONEOFTIMESTAMP.fields_by_name['value1'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFTIMESTAMP.fields_by_name['value2'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ONEOFTIMESTAMP.oneofs_by_name['inner'].fields.append(
_ONEOFTIMESTAMP.fields_by_name['value1'])
_ONEOFTIMESTAMP.fields_by_name['value1'].containing_oneof = _ONEOFTIMESTAMP.oneofs_by_name['inner']
_ONEOFTIMESTAMP.oneofs_by_name['inner'].fields.append(
_ONEOFTIMESTAMP.fields_by_name['value2'])
_ONEOFTIMESTAMP.fields_by_name['value2'].containing_oneof = _ONEOFTIMESTAMP.oneofs_by_name['inner']
_REPEATEDNESTED_INNER.containing_type = _REPEATEDNESTED
_REPEATEDNESTED.fields_by_name['inner'].message_type = _REPEATEDNESTED_INNER
_REPEATEDWRAPPED.fields_by_name['value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_ONEOFPRIMITIVE.oneofs_by_name['inner'].fields.append(
_ONEOFPRIMITIVE.fields_by_name['value1'])
_ONEOFPRIMITIVE.fields_by_name['value1'].containing_oneof = _ONEOFPRIMITIVE.oneofs_by_name['inner']
_ONEOFPRIMITIVE.oneofs_by_name['inner'].fields.append(
_ONEOFPRIMITIVE.fields_by_name['value2'])
_ONEOFPRIMITIVE.fields_by_name['value2'].containing_oneof = _ONEOFPRIMITIVE.oneofs_by_name['inner']
_ONEOFNESTED_INNER.containing_type = _ONEOFNESTED
_ONEOFNESTED.fields_by_name['value1'].message_type = _ONEOFNESTED_INNER
_ONEOFNESTED.fields_by_name['value2'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFNESTED.oneofs_by_name['inner'].fields.append(
_ONEOFNESTED.fields_by_name['value1'])
_ONEOFNESTED.fields_by_name['value1'].containing_oneof = _ONEOFNESTED.oneofs_by_name['inner']
_ONEOFNESTED.oneofs_by_name['inner'].fields.append(
_ONEOFNESTED.fields_by_name['value2'])
_ONEOFNESTED.fields_by_name['value2'].containing_oneof = _ONEOFNESTED.oneofs_by_name['inner']
_SIMPLEENUM.fields_by_name['value'].enum_type = _ENUM
_REPEATEDENUM.fields_by_name['value'].enum_type = _ENUM
_ONEOFENUM.fields_by_name['value1'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ONEOFENUM.fields_by_name['value2'].enum_type = _ENUM
_ONEOFENUM.oneofs_by_name['inner'].fields.append(
_ONEOFENUM.fields_by_name['value1'])
_ONEOFENUM.fields_by_name['value1'].containing_oneof = _ONEOFENUM.oneofs_by_name['inner']
_ONEOFENUM.oneofs_by_name['inner'].fields.append(
_ONEOFENUM.fields_by_name['value2'])
_ONEOFENUM.fields_by_name['value2'].containing_oneof = _ONEOFENUM.oneofs_by_name['inner']
DESCRIPTOR.message_types_by_name['Nested'] = _NESTED
DESCRIPTOR.message_types_by_name['WrappedDouble'] = _WRAPPEDDOUBLE
DESCRIPTOR.message_types_by_name['WrappedFloat'] = _WRAPPEDFLOAT
DESCRIPTOR.message_types_by_name['WrappedInt64'] = _WRAPPEDINT64
DESCRIPTOR.message_types_by_name['WrappedUInt64'] = _WRAPPEDUINT64
DESCRIPTOR.message_types_by_name['WrappedInt32'] = _WRAPPEDINT32
DESCRIPTOR.message_types_by_name['WrappedUInt32'] = _WRAPPEDUINT32
DESCRIPTOR.message_types_by_name['WrappedBool'] = _WRAPPEDBOOL
DESCRIPTOR.message_types_by_name['WrappedString'] = _WRAPPEDSTRING
DESCRIPTOR.message_types_by_name['WrappedBytes'] = _WRAPPEDBYTES
DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
DESCRIPTOR.message_types_by_name['RepeatedTimestamp'] = _REPEATEDTIMESTAMP
DESCRIPTOR.message_types_by_name['OneOfTimestamp'] = _ONEOFTIMESTAMP
DESCRIPTOR.message_types_by_name['Double'] = _DOUBLE
DESCRIPTOR.message_types_by_name['Float'] = _FLOAT
DESCRIPTOR.message_types_by_name['Int64'] = _INT64
DESCRIPTOR.message_types_by_name['UInt64'] = _UINT64
DESCRIPTOR.message_types_by_name['Int32'] = _INT32
DESCRIPTOR.message_types_by_name['UInt32'] = _UINT32
DESCRIPTOR.message_types_by_name['Bool'] = _BOOL
DESCRIPTOR.message_types_by_name['String'] = _STRING
DESCRIPTOR.message_types_by_name['Bytes'] = _BYTES
DESCRIPTOR.message_types_by_name['RepeatedPrimitive'] = _REPEATEDPRIMITIVE
DESCRIPTOR.message_types_by_name['RepeatedNested'] = _REPEATEDNESTED
DESCRIPTOR.message_types_by_name['RepeatedWrapped'] = _REPEATEDWRAPPED
DESCRIPTOR.message_types_by_name['OneOfPrimitive'] = _ONEOFPRIMITIVE
DESCRIPTOR.message_types_by_name['OneOfNested'] = _ONEOFNESTED
DESCRIPTOR.message_types_by_name['SimpleEnum'] = _SIMPLEENUM
DESCRIPTOR.message_types_by_name['RepeatedEnum'] = _REPEATEDENUM
DESCRIPTOR.message_types_by_name['OneOfEnum'] = _ONEOFENUM
DESCRIPTOR.enum_types_by_name['Enum'] = _ENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Nested = _reflection.GeneratedProtocolMessageType('Nested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _NESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Nested.Inner)
))
,
DESCRIPTOR = _NESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Nested)
))
_sym_db.RegisterMessage(Nested)
_sym_db.RegisterMessage(Nested.Inner)
WrappedDouble = _reflection.GeneratedProtocolMessageType('WrappedDouble', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDDOUBLE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedDouble)
))
_sym_db.RegisterMessage(WrappedDouble)
WrappedFloat = _reflection.GeneratedProtocolMessageType('WrappedFloat', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDFLOAT,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedFloat)
))
_sym_db.RegisterMessage(WrappedFloat)
WrappedInt64 = _reflection.GeneratedProtocolMessageType('WrappedInt64', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedInt64)
))
_sym_db.RegisterMessage(WrappedInt64)
WrappedUInt64 = _reflection.GeneratedProtocolMessageType('WrappedUInt64', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDUINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedUInt64)
))
_sym_db.RegisterMessage(WrappedUInt64)
WrappedInt32 = _reflection.GeneratedProtocolMessageType('WrappedInt32', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedInt32)
))
_sym_db.RegisterMessage(WrappedInt32)
WrappedUInt32 = _reflection.GeneratedProtocolMessageType('WrappedUInt32', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDUINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedUInt32)
))
_sym_db.RegisterMessage(WrappedUInt32)
WrappedBool = _reflection.GeneratedProtocolMessageType('WrappedBool', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDBOOL,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedBool)
))
_sym_db.RegisterMessage(WrappedBool)
WrappedString = _reflection.GeneratedProtocolMessageType('WrappedString', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDSTRING,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedString)
))
_sym_db.RegisterMessage(WrappedString)
WrappedBytes = _reflection.GeneratedProtocolMessageType('WrappedBytes', (_message.Message,), dict(
DESCRIPTOR = _WRAPPEDBYTES,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.WrappedBytes)
))
_sym_db.RegisterMessage(WrappedBytes)
Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict(
DESCRIPTOR = _TIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Timestamp)
))
_sym_db.RegisterMessage(Timestamp)
RepeatedTimestamp = _reflection.GeneratedProtocolMessageType('RepeatedTimestamp', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDTIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedTimestamp)
))
_sym_db.RegisterMessage(RepeatedTimestamp)
OneOfTimestamp = _reflection.GeneratedProtocolMessageType('OneOfTimestamp', (_message.Message,), dict(
DESCRIPTOR = _ONEOFTIMESTAMP,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfTimestamp)
))
_sym_db.RegisterMessage(OneOfTimestamp)
Double = _reflection.GeneratedProtocolMessageType('Double', (_message.Message,), dict(
DESCRIPTOR = _DOUBLE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Double)
))
_sym_db.RegisterMessage(Double)
Float = _reflection.GeneratedProtocolMessageType('Float', (_message.Message,), dict(
DESCRIPTOR = _FLOAT,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Float)
))
_sym_db.RegisterMessage(Float)
Int64 = _reflection.GeneratedProtocolMessageType('Int64', (_message.Message,), dict(
DESCRIPTOR = _INT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Int64)
))
_sym_db.RegisterMessage(Int64)
UInt64 = _reflection.GeneratedProtocolMessageType('UInt64', (_message.Message,), dict(
DESCRIPTOR = _UINT64,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.UInt64)
))
_sym_db.RegisterMessage(UInt64)
Int32 = _reflection.GeneratedProtocolMessageType('Int32', (_message.Message,), dict(
DESCRIPTOR = _INT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Int32)
))
_sym_db.RegisterMessage(Int32)
UInt32 = _reflection.GeneratedProtocolMessageType('UInt32', (_message.Message,), dict(
DESCRIPTOR = _UINT32,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.UInt32)
))
_sym_db.RegisterMessage(UInt32)
Bool = _reflection.GeneratedProtocolMessageType('Bool', (_message.Message,), dict(
DESCRIPTOR = _BOOL,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Bool)
))
_sym_db.RegisterMessage(Bool)
String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), dict(
DESCRIPTOR = _STRING,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.String)
))
_sym_db.RegisterMessage(String)
Bytes = _reflection.GeneratedProtocolMessageType('Bytes', (_message.Message,), dict(
DESCRIPTOR = _BYTES,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.Bytes)
))
_sym_db.RegisterMessage(Bytes)
RepeatedPrimitive = _reflection.GeneratedProtocolMessageType('RepeatedPrimitive', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDPRIMITIVE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedPrimitive)
))
_sym_db.RegisterMessage(RepeatedPrimitive)
RepeatedNested = _reflection.GeneratedProtocolMessageType('RepeatedNested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDNESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedNested.Inner)
))
,
DESCRIPTOR = _REPEATEDNESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedNested)
))
_sym_db.RegisterMessage(RepeatedNested)
_sym_db.RegisterMessage(RepeatedNested.Inner)
RepeatedWrapped = _reflection.GeneratedProtocolMessageType('RepeatedWrapped', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDWRAPPED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedWrapped)
))
_sym_db.RegisterMessage(RepeatedWrapped)
OneOfPrimitive = _reflection.GeneratedProtocolMessageType('OneOfPrimitive', (_message.Message,), dict(
DESCRIPTOR = _ONEOFPRIMITIVE,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfPrimitive)
))
_sym_db.RegisterMessage(OneOfPrimitive)
OneOfNested = _reflection.GeneratedProtocolMessageType('OneOfNested', (_message.Message,), dict(
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
DESCRIPTOR = _ONEOFNESTED_INNER,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfNested.Inner)
))
,
DESCRIPTOR = _ONEOFNESTED,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfNested)
))
_sym_db.RegisterMessage(OneOfNested)
_sym_db.RegisterMessage(OneOfNested.Inner)
SimpleEnum = _reflection.GeneratedProtocolMessageType('SimpleEnum', (_message.Message,), dict(
DESCRIPTOR = _SIMPLEENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.SimpleEnum)
))
_sym_db.RegisterMessage(SimpleEnum)
RepeatedEnum = _reflection.GeneratedProtocolMessageType('RepeatedEnum', (_message.Message,), dict(
DESCRIPTOR = _REPEATEDENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.RepeatedEnum)
))
_sym_db.RegisterMessage(RepeatedEnum)
OneOfEnum = _reflection.GeneratedProtocolMessageType('OneOfEnum', (_message.Message,), dict(
DESCRIPTOR = _ONEOFENUM,
__module__ = 'tests.schematics_proto3_tests_pb2'
# @@protoc_insertion_point(class_scope:schematics_proto3.tests.OneOfEnum)
))
_sym_db.RegisterMessage(OneOfEnum)
# @@protoc_insertion_point(module_scope)
| 33.750518 | 3,198 | 0.757377 |
4b4dceb98d231438a803f497f8f31de32f299051 | 241 | py | Python | sps_demo/accounts/api/serializers.py | JuanDM93/sps_django | df47c7ee63a1e99468644a6f428a6cdabc7ac6ae | [
"MIT"
] | null | null | null | sps_demo/accounts/api/serializers.py | JuanDM93/sps_django | df47c7ee63a1e99468644a6f428a6cdabc7ac6ae | [
"MIT"
] | 1 | 2021-07-27T06:46:05.000Z | 2021-07-27T06:46:05.000Z | sps_demo/accounts/api/serializers.py | JuanDM93/sps_django | df47c7ee63a1e99468644a6f428a6cdabc7ac6ae | [
"MIT"
] | null | null | null | from rest_framework.serializers import ModelSerializer
from accounts.models import Account
| 21.909091 | 54 | 0.6639 |
4b4ddf5eeb83ed879035c41d407475a7baf89592 | 6,320 | py | Python | benchmark/HIGGS/explore/contour_nll.py | victor-estrade/SystGradDescent | 822e7094290301ec47a99433381a8d6406798aff | [
"MIT"
] | 2 | 2019-03-20T09:05:02.000Z | 2019-03-20T15:23:44.000Z | benchmark/HIGGS/explore/contour_nll.py | victor-estrade/SystGradDescent | 822e7094290301ec47a99433381a8d6406798aff | [
"MIT"
] | null | null | null | benchmark/HIGGS/explore/contour_nll.py | victor-estrade/SystGradDescent | 822e7094290301ec47a99433381a8d6406798aff | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import logging
import datetime
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from config import SAVING_DIR
from config import SEED
from visual import set_plot_config
set_plot_config()
from utils.log import set_logger
from utils.log import flush
from utils.log import print_line
from utils.evaluation import evaluate_minuit
from problem.higgs import HiggsConfigTesOnly as Config
from problem.higgs import get_minimizer
from problem.higgs import get_minimizer_no_nuisance
from problem.higgs import get_generators_torch
from problem.higgs import HiggsNLL as NLLComputer
from ..common import N_BINS
| 39.012346 | 125 | 0.708861 |
4b4e8e6fb685efd1c0bdeed695e0d638ddd30af1 | 1,376 | py | Python | backend/framework/qlf/dashboard/migrations/0012_auto_20180921_1717.py | desihub/qlf | a9c455f7aee41d7901c89ae90dd821c617340a86 | [
"BSD-3-Clause"
] | 8 | 2017-09-08T00:24:20.000Z | 2019-02-03T07:31:03.000Z | backend/framework/qlf/dashboard/migrations/0012_auto_20180921_1717.py | desihub/qlf | a9c455f7aee41d7901c89ae90dd821c617340a86 | [
"BSD-3-Clause"
] | 77 | 2017-06-15T21:39:09.000Z | 2019-07-13T19:41:27.000Z | backend/framework/qlf/dashboard/migrations/0012_auto_20180921_1717.py | desihub/qlf | a9c455f7aee41d7901c89ae90dd821c617340a86 | [
"BSD-3-Clause"
] | 5 | 2017-09-10T02:25:03.000Z | 2019-02-06T20:55:59.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-09-21 17:17
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
| 38.222222 | 152 | 0.635174 |
4b53328e075db009dbb8d21c3c121da0a2ce955a | 476 | py | Python | qcodes/instrument_drivers/rohde_schwarz/HMC8042.py | LGruenhaupt/Qcodes | ffb74dae53c13c4885e61b5a2df3f833d524de04 | [
"MIT"
] | 1 | 2019-12-07T01:25:49.000Z | 2019-12-07T01:25:49.000Z | qcodes/instrument_drivers/rohde_schwarz/HMC8042.py | Dominik-Vogel/Qcodes | b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb | [
"MIT"
] | 12 | 2020-10-13T16:53:37.000Z | 2020-10-14T17:16:22.000Z | qcodes/instrument_drivers/rohde_schwarz/HMC8042.py | Dominik-Vogel/Qcodes | b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb | [
"MIT"
] | 1 | 2020-05-03T22:47:40.000Z | 2020-05-03T22:47:40.000Z | from .private.HMC804x import _RohdeSchwarzHMC804x
from qcodes.utils.deprecate import deprecate_moved_to_qcd
| 39.666667 | 110 | 0.781513 |
4b5611952b114a3d2cf44eadfe8d22e693d8c643 | 682 | py | Python | python_fundamentals/Multiple_Sum_Average/index.py | justnclrk/Python | 0922961cbd94694a69ae8132a5c33baf552d8d89 | [
"MIT"
] | null | null | null | python_fundamentals/Multiple_Sum_Average/index.py | justnclrk/Python | 0922961cbd94694a69ae8132a5c33baf552d8d89 | [
"MIT"
] | 8 | 2020-06-06T01:02:06.000Z | 2022-03-12T00:24:13.000Z | python_fundamentals/Multiple_Sum_Average/index.py | justnclrk/Python | 0922961cbd94694a69ae8132a5c33baf552d8d89 | [
"MIT"
] | null | null | null | # Multiples -- Part I - Write code that prints all the odd numbers from 1 to 1000. Use the for loop and don't use a list to do this exercise
for i in range(1, 1000, 2):
print(i)
# Multiples -- Part II - Create another program that prints all the multiples of 5 from 5 to 1,000,000
for m in range(5, 1000000, 5):
print(m)
# Sum List -- Create a program that prints the sum of all the values in the list: a = [1, 2, 5, 10, 255, 3]
a = [1, 2, 5, 10, 255, 3]
b = sum(a)
print(b)
# Average List -- Create a program that prints the average of the values in the list: c = [1, 2, 5, 10, 255, 3]
c = [1, 2, 5, 10, 255, 3]
dSum = sum(c)
eLen = len(c)
fAvg = (dSum / eLen)
print(fAvg)
| 40.117647 | 140 | 0.64956 |
4b5646cef1fca290360a2f8a03244f3cf60a9b62 | 2,817 | py | Python | examples/gen9_valset_test.py | mgesteiro/pyubx2 | 02fd8fa2863b88ed2d746b5800717a1b6b213181 | [
"BSD-3-Clause"
] | null | null | null | examples/gen9_valset_test.py | mgesteiro/pyubx2 | 02fd8fa2863b88ed2d746b5800717a1b6b213181 | [
"BSD-3-Clause"
] | null | null | null | examples/gen9_valset_test.py | mgesteiro/pyubx2 | 02fd8fa2863b88ed2d746b5800717a1b6b213181 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
"""
Demo example to test CFG-VALSET ublox message - generation 9
@author: mgesteiro
"""
import sys
import time
from serial import Serial, SerialException, SerialTimeoutException
from pyubx2 import (
UBXMessage,
GET,
SET,
VALSET_RAM,
UBXMessageError,
UBXTypeError,
UBXParseError,
)
def message_valsetuart1baudrate_set(baudrate):
"""
Function to generate a CFG-VALSET CFG-UART1-BAUDRATE set UBX message
"""
# https://www.u-blox.com/en/docs/UBX-18010854#page=86&zoom=auto,-74,499
# CFG-UART1-BAUDRATE Key = 0x40520001
return UBXMessage(
"CFG",
"CFG-VALSET",
SET,
payload=b"\x00"
+ VALSET_RAM # version
+ int(0).to_bytes(2, byteorder="little", signed=False) # layers
+ 0x40520001 .to_bytes(4, byteorder="little", signed=False) # reserved0
+ baudrate.to_bytes(4, byteorder="little", signed=False), # key # value
)
def message_valsetuart1baudrate_response():
"""
Function to generate a ACK-ACK-ACK UBX message
"""
# https://www.u-blox.com/en/docs/UBX-18010854#page=52&zoom=auto,-74,379
return UBXMessage("ACK", "ACK-ACK", GET, clsID=0x06, msgID=0x8A)
if __name__ == "__main__":
PORTNAME = "/dev/tty.usbserial-A50285BI"
BAUDRATE = 230400
try:
print("\nBuilding CFG-UART1-BAUDRATE VALSET message:")
msg = message_valsetuart1baudrate_set(BAUDRATE)
print(f" GENERATED: {msg.serialize().hex()}")
print(
" EXPECTED: b562068a0c00000100000100524000840300b7ef"
+ " (Note: valid for 230400 baudrate)"
)
print(f" {msg}\n")
print(f"This demo will now set your module's UART1 to {BAUDRATE} (only in RAM)")
try:
input("press <ENTER> to continue, CTRL-C to abort!\n")
except KeyboardInterrupt:
print("\nExecution aborted.\n")
sys.exit(0)
sport = Serial(PORTNAME, BAUDRATE, timeout=2)
time.sleep(0.250) # stabilize
print(
f"Sending set message to {PORTNAME} at {BAUDRATE} "
+ "(edit the code to change these values)\n"
)
sport.flushInput()
sport.write(msg.serialize())
print("Receiving response ...")
raw = sport.read(512)
START = raw.find(b"\xB5\x62")
data = raw[START : START + 10] # expected ACK
msg = message_valsetuart1baudrate_response()
print(f" RECEIVED: {data.hex()}")
print(f" EXPECTED: {msg.serialize().hex()}")
print(f" {UBXMessage.parse(data)}\n")
except (
UBXMessageError,
UBXTypeError,
UBXParseError,
SerialException,
SerialTimeoutException,
) as err:
print(f"Something broke : {err}\n")
| 28.17 | 88 | 0.606674 |
4b57edd76cfedc441b5ed69fe2a9fd78c4dbd2d2 | 3,996 | py | Python | main.py | saswatsamal/Snake-Game | 2c0f427fd6001f09d26a4586ce55453af706c355 | [
"CC0-1.0"
] | 2 | 2021-04-25T07:34:14.000Z | 2021-04-30T15:24:55.000Z | main.py | saswatsamal/Snake-Game | 2c0f427fd6001f09d26a4586ce55453af706c355 | [
"CC0-1.0"
] | null | null | null | main.py | saswatsamal/Snake-Game | 2c0f427fd6001f09d26a4586ce55453af706c355 | [
"CC0-1.0"
] | null | null | null | import pygame
import time
import sys, random
pygame.init()
yellow = (255, 255, 102)
green = (0, 255, 0)
black = (0,0,0)
width = 1280
height = 720
gameDisplay = pygame.display.set_mode((width, height))
pygame.display.set_caption('Snake Game By Saswat Samal')
clock = pygame.time.Clock()
snake_block = 10
snake_speed = 15
font_style = pygame.font.SysFont("ubuntu", 25)
score_font = pygame.font.SysFont("ubuntu", 20)
main_menu() | 27.369863 | 104 | 0.56006 |
4b59cb1bcbcd0c6d58e12de2aac812a57e139151 | 918 | py | Python | SoftMax_Regression.py | chunish/tfboy-is-on-the-way | 7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c | [
"Apache-2.0"
] | null | null | null | SoftMax_Regression.py | chunish/tfboy-is-on-the-way | 7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c | [
"Apache-2.0"
] | null | null | null | SoftMax_Regression.py | chunish/tfboy-is-on-the-way | 7cd4c1f7c0c1dd94189377ee0751f2c232a1e98c | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot = True)
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
y_ = tf.placeholder(tf.float32, [None, 10]) #
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices = [1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
tf.global_variables_initializer().run()
for i in range(10000):
batch_xs, batch_ys = mnist.train.next_batch(100)
train_step.run({x: batch_xs, y_: batch_ys})
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))
| 31.655172 | 87 | 0.734205 |
4b5a4dc0a470a6b6a0219e69281685d307bd50e5 | 465 | py | Python | ltc/analyzer/migrations/0006_graphitevariable_function.py | v0devil/jltom | b302a39a187b8e1154c6deda636a4db8b30bb40b | [
"MIT"
] | 4 | 2016-12-30T13:26:59.000Z | 2017-04-26T12:07:36.000Z | ltc/analyzer/migrations/0006_graphitevariable_function.py | v0devil/jltom | b302a39a187b8e1154c6deda636a4db8b30bb40b | [
"MIT"
] | null | null | null | ltc/analyzer/migrations/0006_graphitevariable_function.py | v0devil/jltom | b302a39a187b8e1154c6deda636a4db8b30bb40b | [
"MIT"
] | null | null | null | # Generated by Django 2.2.20 on 2021-05-27 14:53
from django.db import migrations, models
| 24.473684 | 117 | 0.584946 |
4b5c74257ca507c7289c021413a4bdff6ed7d6a6 | 2,865 | py | Python | Python/061.py | jaimeliew1/Project_Euler_Solutions | 963c9c6d6571cade8f87341f97a6a2cd1af202bb | [
"MIT"
] | null | null | null | Python/061.py | jaimeliew1/Project_Euler_Solutions | 963c9c6d6571cade8f87341f97a6a2cd1af202bb | [
"MIT"
] | 1 | 2018-04-16T21:01:50.000Z | 2018-04-16T21:01:50.000Z | Python/061.py | jaimeliew1/Project_Euler_Solutions | 963c9c6d6571cade8f87341f97a6a2cd1af202bb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Solution to Project Euler problem X
Author: Jaime Liew
https://github.com/jaimeliew1/Project_Euler_Solutions
"""
import math
isPoly = [isTri, isSqr, isPent, isHex,isHept,isOct]
if __name__ == "__main__":
print(run())
| 25.131579 | 89 | 0.506806 |
4b5cbe04dec6e7c55b09522e71410c55307b8fa0 | 302 | py | Python | shop/views/shop_views.py | cuescience/cuescience-shop | bf5ea159f9277d1d6ab7acfcad3f2517723a225c | [
"MIT"
] | null | null | null | shop/views/shop_views.py | cuescience/cuescience-shop | bf5ea159f9277d1d6ab7acfcad3f2517723a225c | [
"MIT"
] | null | null | null | shop/views/shop_views.py | cuescience/cuescience-shop | bf5ea159f9277d1d6ab7acfcad3f2517723a225c | [
"MIT"
] | null | null | null | from shop.models import Product
from django.shortcuts import render_to_response
from django.template import RequestContext
| 20.133333 | 108 | 0.788079 |
4b5d16684195ca44a761cc1ab6755c005952e4d5 | 163 | py | Python | qrogue/test/statevector_tests.py | 7Magic7Mike7/Qrogue | 70bd5671a77981c1d4b633246321ba44f13c21ff | [
"MIT"
] | 4 | 2021-12-14T19:13:43.000Z | 2022-02-16T13:25:38.000Z | qrogue/test/statevector_tests.py | 7Magic7Mike7/Qrogue | 70bd5671a77981c1d4b633246321ba44f13c21ff | [
"MIT"
] | null | null | null | qrogue/test/statevector_tests.py | 7Magic7Mike7/Qrogue | 70bd5671a77981c1d4b633246321ba44f13c21ff | [
"MIT"
] | 1 | 2022-01-04T18:35:51.000Z | 2022-01-04T18:35:51.000Z | import numpy as np
from qrogue.game.logic.actors import StateVector
stv = StateVector([1 / np.sqrt(2), 0 + 0j, 0 + 0j, 1 / np.sqrt(2)])
#stv.extend(1)
print(stv) | 23.285714 | 67 | 0.680982 |
4b5fd841b1005516ab298b5be16fb1dd41b071b3 | 3,190 | py | Python | taehyoungram/images/views.py | TaeHyoungKwon/taehyoungram | 055c9effdaa718d60e7627196754ea6b48dded20 | [
"MIT"
] | null | null | null | taehyoungram/images/views.py | TaeHyoungKwon/taehyoungram | 055c9effdaa718d60e7627196754ea6b48dded20 | [
"MIT"
] | 7 | 2020-02-12T01:23:48.000Z | 2022-03-11T23:26:02.000Z | taehyoungram/images/views.py | TaeHyoungKwon/taehyoungram | 055c9effdaa718d60e7627196754ea6b48dded20 | [
"MIT"
] | null | null | null | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from .models import Image, Comment, Like
from .serializers import ImageSerializer, CommentSerializer, LikeSerializer
| 27.73913 | 89 | 0.626646 |
4b6108a9e3aec6079aa6d738f5836676b16bd14f | 4,195 | py | Python | examples/zoomed.py | ColmTalbot/psd-covariance-matrices | 59631dd1860e9cf28658df1ce28b10f6f8d59868 | [
"MIT"
] | null | null | null | examples/zoomed.py | ColmTalbot/psd-covariance-matrices | 59631dd1860e9cf28658df1ce28b10f6f8d59868 | [
"MIT"
] | null | null | null | examples/zoomed.py | ColmTalbot/psd-covariance-matrices | 59631dd1860e9cf28658df1ce28b10f6f8d59868 | [
"MIT"
] | 2 | 2021-07-01T02:00:10.000Z | 2021-08-02T07:29:03.000Z | #!/usr/bin/env python
"""
Compute the comparison of the analytic and experimental PSD matrices.
This will generate Figure 1.
This is probably the only example that will run in a reasonable time without
a GPU.
For more details on the method see https://arxiv.org/abs/2106.13785.
"""
import numpy as np
import matplotlib.pyplot as plt
from bilby.core.utils import create_white_noise, create_frequency_series
from scipy.signal.windows import tukey
from scipy.interpolate import interp1d
from tqdm.auto import trange
from coarse_psd_matrix.utils import (
compute_psd_matrix,
create_parser,
fetch_psd_data,
)
from coarse_psd_matrix.plotting import plot_psd_matrix
from matplotlib import rcParams
rcParams["font.family"] = "serif"
rcParams["font.serif"] = "Computer Modern Roman"
rcParams["font.size"] = 20
rcParams["text.usetex"] = True
rcParams["grid.alpha"] = 0
if __name__ == "__main__":
parser = create_parser()
args = parser.parse_args()
interferometer = args.interferometer
outdir = args.outdir
duration = args.duration
medium_duration = args.medium_duration
sampling_frequency = args.sampling_frequency
low_frequency = args.low_frequency
tukey_alpha = args.tukey_alpha
minimum_frequency = 480
maximum_frequency = 530
event = args.event
data = fetch_psd_data(
interferometer_name=interferometer,
event=event,
duration=duration,
sampling_frequency=sampling_frequency,
low_frequency=low_frequency,
tukey_alpha=tukey_alpha,
medium_duration=medium_duration,
outdir=outdir,
)
svd = compute_psd_matrix(
interferometer_name=interferometer,
event=event,
duration=duration,
sampling_frequency=sampling_frequency,
low_frequency=low_frequency,
tukey_alpha=tukey_alpha,
minimum_frequency=minimum_frequency,
maximum_frequency=maximum_frequency,
medium_duration=medium_duration,
outdir=outdir,
)
psd = data["medium_psd"][: sampling_frequency // 2 * medium_duration + 1]
original_frequencies = create_frequency_series(
duration=medium_duration, sampling_frequency=sampling_frequency
)
new_frequencies = create_frequency_series(
duration=256, sampling_frequency=sampling_frequency
)
psd = interp1d(original_frequencies, psd)(new_frequencies)
short_window = tukey(duration * sampling_frequency, tukey_alpha)
short_window /= np.mean(short_window ** 2) ** 0.5
analytic_psd_matrix = (svd[0] * svd[1]) @ svd[2]
estimated_psd_matrix = np.zeros_like(analytic_psd_matrix)
nfft = duration * sampling_frequency
start_idx = minimum_frequency * duration
stop_idx = maximum_frequency * duration
n_average = 1024 * 1024 // 64
for _ in trange(n_average):
white_noise, frequencies = create_white_noise(
sampling_frequency=2048, duration=256
)
coloured_noise = white_noise * psd ** 0.5
td_noise = np.fft.irfft(coloured_noise).reshape((-1, nfft))
fd_noise = np.fft.rfft(td_noise * short_window)
reduced_noise = fd_noise[:, start_idx : stop_idx + 1]
estimated_psd_matrix += np.einsum(
"ki,kj->ij", reduced_noise, reduced_noise.conjugate()
) / 2
total_averages = n_average * len(reduced_noise)
estimated_psd_matrix /= total_averages
rcParams["font.family"] = "serif"
rcParams["font.serif"] = "Computer Modern Roman"
rcParams["font.size"] = 20
rcParams["text.usetex"] = True
rcParams["grid.alpha"] = 0
fig, axes = plt.subplots(nrows=2, figsize=(10, 16))
kwargs = dict(
minimum_frequency=minimum_frequency,
maximum_frequency=maximum_frequency,
duration=duration,
vmin=-53,
vmax=-41.8,
tick_step=10,
)
plot_psd_matrix(estimated_psd_matrix, axes[0], **kwargs)
plot_psd_matrix(analytic_psd_matrix, axes[1], **kwargs)
axes[0].text(-25, 190, "(a)")
axes[1].text(-25, 190, "(b)")
plt.tight_layout()
plt.savefig(f"{outdir}/zoom_{tukey_alpha}.pdf")
if tukey_alpha == 0.1:
plt.savefig("figure_1.pdf")
plt.close()
| 32.269231 | 77 | 0.695828 |
4b624ab13f54c8cfd7032b48000491920f6d9a27 | 5,581 | py | Python | web_spider/web_spider/pipelines.py | syun0216/simple_ci | 83d31cb04357fe0bd428ab8f09c2db81a06eb723 | [
"MIT"
] | null | null | null | web_spider/web_spider/pipelines.py | syun0216/simple_ci | 83d31cb04357fe0bd428ab8f09c2db81a06eb723 | [
"MIT"
] | null | null | null | web_spider/web_spider/pipelines.py | syun0216/simple_ci | 83d31cb04357fe0bd428ab8f09c2db81a06eb723 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
| 52.650943 | 226 | 0.392403 |
4b63b53c7d4c16192575ee81d07d9b767d76c245 | 239 | py | Python | etl_framework/config_mixins/AddDestinationMixin.py | pantheon-ci-bot/etl-framework | 36d4c0d5c26ddd7c0bb2d2b99e3138b50a21c46f | [
"MIT"
] | 2 | 2017-03-01T20:09:06.000Z | 2019-02-08T17:10:16.000Z | etl_framework/config_mixins/AddDestinationMixin.py | pantheon-ci-bot/etl-framework | 36d4c0d5c26ddd7c0bb2d2b99e3138b50a21c46f | [
"MIT"
] | 40 | 2015-10-10T15:02:21.000Z | 2020-03-17T22:32:04.000Z | etl_framework/config_mixins/AddDestinationMixin.py | pantheon-ci-bot/etl-framework | 36d4c0d5c26ddd7c0bb2d2b99e3138b50a21c46f | [
"MIT"
] | 2 | 2018-11-14T21:50:58.000Z | 2022-03-07T20:59:27.000Z | """mixin to add destinations to config attributes"""
| 23.9 | 68 | 0.707113 |
4b65eb4040ecf11e53140f9d3ec6fb5084fff907 | 6,298 | py | Python | src/utilities/download_bc.py | geoschem/integrated_methane_inversion | 0615e3b76c111beadaf0d7fb5b9fa99aa782f403 | [
"MIT"
] | null | null | null | src/utilities/download_bc.py | geoschem/integrated_methane_inversion | 0615e3b76c111beadaf0d7fb5b9fa99aa782f403 | [
"MIT"
] | 3 | 2022-02-14T20:42:35.000Z | 2022-03-29T18:11:40.000Z | src/utilities/download_bc.py | geoschem/integrated_methane_inversion | 0615e3b76c111beadaf0d7fb5b9fa99aa782f403 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Description:
------------
This Python script (assumes Python3) downloads boundary conditions
files from AWS S3 to a target directory for the requested date range.
Remarks:
--------
(1) Jiawei Zhuang found that it is much faster to issue aws s3 cp
commands from a bash script than a Python script. Therefore,
in this routine we create a bash script with all of the
download commands that will be executed by the main routine.
"""
# Imports
import os
import sys
import subprocess
# Exit with error if we are not using Python3
assert sys.version_info.major >= 3, "ERROR: Python 3 is required to run download_bc.py!"
# Define global variables
DATA_DOWNLOAD_SCRIPT = "./auto_generated_download_script.sh"
def list_missing_files(start_date, end_date, destination):
"""
Creates list of BC files in date range that do not already
exist at destination.
Args:
-----
start_date : str
Initial date of simulation.
end_date : str
Final date of simulation.
destination : str
Target directory for downloaded files
"""
missing_files = []
start_str = str(start_date)
start_year = start_str[:4]
start_month = start_str[4:6]
start_day = start_str[6:8]
end_str = str(end_date)
end_year = end_str[:4]
end_month = end_str[4:6]
end_day = end_str[6:8]
month_days = [31, [28, 29], 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
file_prefix = "GEOSChem.BoundaryConditions."
file_suffix = "_0000z.nc4"
for year in range(int(start_year), int(end_year) + 1):
# skip years with definite no data
if year < 2018:
print(
"Skipping BC data download for ", str(year), ": no data from this year"
)
continue
init_month = 1
final_month = 12
if year == int(start_year):
# only get desired months from incomplete years
init_month = int(start_month)
if year == int(end_year):
final_month = int(end_month)
for month in range(init_month, final_month + 1):
# skip months with definite no data
if year == 2018 and month < 4:
print(
"Skipping BC data download for ",
str(year),
"/0",
str(month),
": no data from this month",
)
continue
# add 0 to month string if necessary
month_prefix = "0" if month < 10 else ""
init_day = 1
final_day = month_days[month - 1]
# leap day
if month == 2:
if year % 4 == 0:
final_day = final_day[1]
else:
final_day = final_day[0]
if month == int(start_month) and year == int(start_year):
# only get desired days from incomplete months
init_day = int(start_day)
if month == int(end_month) and year == int(end_year):
final_day = int(end_day)
for day in range(init_day, final_day + 1):
# add 0 to day string if necessary
day_prefix = "0" if day < 10 else ""
# check if file for this day already exists
file_name = (
file_prefix
+ str(year)
+ month_prefix
+ str(month)
+ day_prefix
+ str(day)
+ file_suffix
)
# add file to download list if needed
if not os.path.exists(destination + "/" + file_name):
missing_files.append(file_name)
return missing_files
def create_download_script(paths, destination):
"""
Creates a data download script to obtain missing files
Args:
-----
paths : dict
Output of function list_missing_files.
"""
# Create the data download script
with open(DATA_DOWNLOAD_SCRIPT, "w") as f:
# Write shebang line to script
print("#!/bin/bash\n", file=f)
print("# This script was generated by download_bc.py\n", file=f)
cmd_prefix = "aws s3 cp --only-show-errors --request-payer=requester "
remote_root = "s3://imi-boundary-conditions/"
# make destination if needed
if not os.path.exists(destination):
os.mkdir(destination)
# Write download commands for only the missing data files
for path in paths:
cmd = cmd_prefix + remote_root + path + " " + destination
print(cmd, file=f)
print(file=f)
# Close file and make it executable
f.close()
os.chmod(DATA_DOWNLOAD_SCRIPT, 0o755)
def download_the_data(start_date, end_date, destination):
"""
Downloads required boundary conditions files from AWS.
Args:
-----
start_date : str
Initial date of simulation.
end_date : str
Final date of simulation.
destination : str
Target directory for downloaded files
"""
# Get a list of missing data paths
paths = list_missing_files(start_date, end_date, destination)
# Create script to download missing files from AWS S3
create_download_script(paths, destination)
# Run the data download script and return the status
# Remove the file afterwards
status = subprocess.call(DATA_DOWNLOAD_SCRIPT)
os.remove(DATA_DOWNLOAD_SCRIPT)
# Raise an exception if the data was not successfully downloaded
if status != 0:
err_msg = "Error downloading data from AWS!"
raise Exception(err_msg)
def main():
"""
Main program. Gets command-line arguments and calls function
download_the_data to initiate a data-downloading process.
Calling sequence:
-----------------
./download_data.py start_date end_date destination
Example call:
-------------
./download_data.py 20200501 20200531 /home/ubuntu/ExtData/BoundaryConditions
"""
download_the_data(sys.argv[1], sys.argv[2], sys.argv[3])
if __name__ == "__main__":
main()
| 30.872549 | 88 | 0.580343 |
4b675eebc011dcbf30a99943e8204d368b2ed0b9 | 216 | py | Python | backend/core/__init__.py | itimor/one-ops | f1111735de252012752dfabe11598e9690c89257 | [
"MIT"
] | 2 | 2020-09-25T05:52:55.000Z | 2021-01-14T07:06:17.000Z | backend/core/__init__.py | itimor/one-ops | f1111735de252012752dfabe11598e9690c89257 | [
"MIT"
] | 6 | 2021-03-19T10:20:05.000Z | 2021-09-22T19:30:21.000Z | backend/core/__init__.py | itimor/one-ops | f1111735de252012752dfabe11598e9690c89257 | [
"MIT"
] | 1 | 2022-02-24T01:37:06.000Z | 2022-02-24T01:37:06.000Z | from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
# import pymysql
#
# pymysql.install_as_MySQLdb()
| 30.857143 | 56 | 0.805556 |
4b6ab5ac671d440fac187ec117fb8831906a3c8f | 39 | tac | Python | semantics_and_generate/samples/link1.tac | AHEADer/my_decaf_compiler | 42ba9f140c5fda3cd2b4fdb727745d2cfd39c923 | [
"MIT"
] | 1 | 2018-01-03T03:35:38.000Z | 2018-01-03T03:35:38.000Z | semantics_and_generate/samples/link1.tac | AHEADer/my_decaf_compiler | 42ba9f140c5fda3cd2b4fdb727745d2cfd39c923 | [
"MIT"
] | null | null | null | semantics_and_generate/samples/link1.tac | AHEADer/my_decaf_compiler | 42ba9f140c5fda3cd2b4fdb727745d2cfd39c923 | [
"MIT"
] | null | null | null | ____notmain:
BeginFunc 0 ;
EndFunc ;
| 9.75 | 14 | 0.717949 |
4b6add180192d528e3ed133e29c757a81886beb8 | 483 | py | Python | NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py | Alejandro-sin/Learning_Notebooks | 161d6bed4c7b1d171b45f61c0cc6fa91e9894aad | [
"MIT"
] | 1 | 2021-02-26T13:12:22.000Z | 2021-02-26T13:12:22.000Z | NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py | Alejandro-sin/Learning_Notebooks | 161d6bed4c7b1d171b45f61c0cc6fa91e9894aad | [
"MIT"
] | null | null | null | NoteBooks/Curso de Python/Python/Paradigmas/Object Oriented Programming/Modelando Objetos_2.py | Alejandro-sin/Learning_Notebooks | 161d6bed4c7b1d171b45f61c0cc6fa91e9894aad | [
"MIT"
] | null | null | null | """
Ejercicio para operacin entre currencies
"""
""" Representacin del currency"""
# No me queda muy claro el uso de esta funcin, sirve para mostrar puntualmente qu?
# def __repr__(self):
# info = self.name
# info2 = self.symbol
# return info, info2
euro = Curency("Euro","EU","3.2")
print(euro)
| 17.888889 | 84 | 0.621118 |
4b6b2b2466b7f50264d915b0b9ab9925c879719e | 587 | py | Python | kora/install/blender.py | wannaphong/kora | 8a9034097d07b14094e077769c02a0b4857d179b | [
"MIT"
] | 91 | 2020-05-26T05:54:51.000Z | 2022-03-09T07:33:44.000Z | kora/install/blender.py | wannaphong/kora | 8a9034097d07b14094e077769c02a0b4857d179b | [
"MIT"
] | 12 | 2020-10-03T10:09:11.000Z | 2021-03-06T23:12:21.000Z | kora/install/blender.py | wannaphong/kora | 8a9034097d07b14094e077769c02a0b4857d179b | [
"MIT"
] | 16 | 2020-07-07T18:39:29.000Z | 2021-03-06T03:46:49.000Z | import os
from IPython import get_ipython
# need this fix first
os.environ["LD_PRELOAD"] = ""
os.system("apt remove libtcmalloc-minimal4")
os.system("apt install libtcmalloc-minimal4")
os.environ["LD_PRELOAD"] = "/usr/lib/x86_64-linux-gnu/libtcmalloc_minimal.so.4.3.0"
os.system("dpkg -L libtcmalloc-minimal4")
# then install blender
url = "https://download.blender.org/release/Blender2.83/blender-2.83.0-linux64.tar.xz"
os.system(f"curl {url} | tar xJ")
os.system("ln -s /content/blender-2.83.0-linux64/blender /usr/local/bin/blender")
# show result
get_ipython().system("blender -v") | 36.6875 | 86 | 0.749574 |
4b6cb29835a30f52c4bb14e8a53ca4a8d4a5cdb7 | 2,352 | py | Python | parks/models.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | parks/models.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | parks/models.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | from django.db import models
from django_countries.fields import CountryField
from localflavor.us.models import USStateField
from base.models import EONBaseModel
| 31.36 | 97 | 0.702381 |
4b6d6ef8de7836397b32a70984d9c9488bd0f64f | 3,891 | py | Python | roomfinder_dispo/roomfinder_dispo/dispo.py | GuillaumeMorini/roomfinder | d756bba6e50a7361ecf9cf529af4a1775a0e836b | [
"Apache-2.0"
] | 14 | 2017-01-23T02:58:53.000Z | 2020-12-21T14:05:07.000Z | roomfinder_dispo/roomfinder_dispo/dispo.py | GuillaumeMorini/roomfinder | d756bba6e50a7361ecf9cf529af4a1775a0e836b | [
"Apache-2.0"
] | 2 | 2017-01-23T09:46:54.000Z | 2017-09-11T10:15:07.000Z | roomfinder_dispo/roomfinder_dispo/dispo.py | GuillaumeMorini/roomfinder | d756bba6e50a7361ecf9cf529af4a1775a0e836b | [
"Apache-2.0"
] | 9 | 2017-01-23T02:55:27.000Z | 2020-05-20T18:38:18.000Z | #!/usr/bin/env python2.7
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
from flask import Flask, render_template, request, jsonify
import argparse
import datetime
import os, sys
import requests
from socket import error as SocketError
import errno
import json
import pika
import uuid
app = Flask(__name__)
def on_response(ch, method, props, body):
global corr_id
global response
if corr_id == props.correlation_id:
response = body
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser("Room Finder Dispo Service")
parser.add_argument("-r","--rabbitmq", help="IP or hostname for rabbitmq server, e.g. 'rabbit.domain.com'.")
parser.add_argument("-p","--port", help="tcp port for rabitmq server, e.g. '2765'.")
args = parser.parse_args()
rabbitmq = args.rabbitmq
if (rabbitmq == None):
rabbitmq = os.getenv("roomfinder_rabbitmq_server")
if (rabbitmq == None):
get_rabbitmq_server = raw_input("What is the rabbitmq server IP or hostname? ")
rabbitmq = get_rabbitmq_server
rabbitmq_port = args.port
if (rabbitmq_port == None):
rabbitmq_port = os.getenv("roomfinder_rabbitmq_port")
if (rabbitmq_port == None):
get_rabbitmq_port = raw_input("What is the rabbitmq TCP port? ")
rabbitmq_port = get_rabbitmq_port
try:
app.run(host='0.0.0.0', port=int("5000"))
except:
try:
app.run(host='0.0.0.0', port=int("5000"))
except:
print "Dispo web services error"
| 31.893443 | 138 | 0.627602 |
4b6e6530e3333cd913c07220255b812f35a812cc | 167 | py | Python | Sea/model/components/Component3D.py | FRidh/Sea | b474e93a449570a9ba3b915c4d80f814feee2545 | [
"BSD-3-Clause"
] | 2 | 2015-07-02T13:34:09.000Z | 2015-09-28T09:07:52.000Z | Sea/model/components/Component3D.py | FRidh/Sea | b474e93a449570a9ba3b915c4d80f814feee2545 | [
"BSD-3-Clause"
] | null | null | null | Sea/model/components/Component3D.py | FRidh/Sea | b474e93a449570a9ba3b915c4d80f814feee2545 | [
"BSD-3-Clause"
] | 1 | 2022-01-22T03:01:54.000Z | 2022-01-22T03:01:54.000Z |
import numpy as np
from ComponentStructural import ComponentStructural
from ..subsystems import SubsystemStructural
| 20.875 | 51 | 0.844311 |
4b6f1aec2b3a7aa82fa7792516bb55e9223b7c08 | 1,063 | py | Python | bot.py | federicosapienza/InboxNotionTelegramBot | 031d5e78cd352dfb692b93f3e0b421695f1dc18e | [
"MIT"
] | null | null | null | bot.py | federicosapienza/InboxNotionTelegramBot | 031d5e78cd352dfb692b93f3e0b421695f1dc18e | [
"MIT"
] | null | null | null | bot.py | federicosapienza/InboxNotionTelegramBot | 031d5e78cd352dfb692b93f3e0b421695f1dc18e | [
"MIT"
] | null | null | null | from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, ConversationHandler
import logging
from utils import TELEGRAM_TOKEN
from handlers import start, ask_new_url, get_url, get_description, cancel
from handlers import URL_URL, URL_DESCRIPTION
logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
updater = None
start_bot()
| 25.926829 | 94 | 0.739417 |
4b6fc4e98137fcd105847298b470d6ad64f71618 | 841 | py | Python | examples/face.py | birkenfeld/python-gr | 1d6cd36616a73c8e569b8348869e6e30f3830ec4 | [
"RSA-MD"
] | null | null | null | examples/face.py | birkenfeld/python-gr | 1d6cd36616a73c8e569b8348869e6e30f3830ec4 | [
"RSA-MD"
] | null | null | null | examples/face.py | birkenfeld/python-gr | 1d6cd36616a73c8e569b8348869e6e30f3830ec4 | [
"RSA-MD"
] | null | null | null | #!/usr/bin/env python
"""
Simple surface plot example
"""
from gr import *
from math import *
x = [-2 + i * 0.5 for i in range(0, 29)]
y = [-7 + i * 0.5 for i in range(0, 29)]
z = list(range(0, 841))
for i in range(0, 29):
for j in range(0, 29):
r1 = sqrt((x[j] - 5)**2 + y[i]**2)
r2 = sqrt((x[j] + 5)**2 + y[i]**2)
z[i * 29 - 1 + j] = (exp(cos(r1)) + exp(cos(r2)) - 0.9) * 25
setcharheight(24.0/500)
settextalign(TEXT_HALIGN_CENTER, TEXT_VALIGN_TOP)
textext(0.5, 0.9, "Surface Example")
(tbx, tby) = inqtextext(0.5, 0.9, "Surface Example")
fillarea(tbx, tby)
setwindow(-2, 12, -7, 7)
setspace(-80, 200, 45, 70)
setcharheight(14.0/500)
axes3d(1, 0, 20, -2, -7, -80, 2, 0, 2, -0.01)
axes3d(0, 1, 0, 12, -7, -80, 0, 2, 0, 0.01)
titles3d("X-Axis", "Y-Axis", "Z-Axis")
surface(x, y, z, 3)
surface(x, y, z, 1)
updatews()
| 22.72973 | 64 | 0.567182 |
4b70e648e25de3717c9f7effa2fbe1723459da27 | 344 | py | Python | tests/system/examples/dask/assets/dask_function.py | Hedingber/mlrun | e2269718fcc7caa7e1aa379ac28495830b45f9da | [
"Apache-2.0"
] | 1 | 2021-02-17T08:12:33.000Z | 2021-02-17T08:12:33.000Z | tests/system/examples/dask/assets/dask_function.py | Hedingber/mlrun | e2269718fcc7caa7e1aa379ac28495830b45f9da | [
"Apache-2.0"
] | 1 | 2020-12-31T14:36:29.000Z | 2020-12-31T14:36:29.000Z | tests/system/examples/dask/assets/dask_function.py | Hedingber/mlrun | e2269718fcc7caa7e1aa379ac28495830b45f9da | [
"Apache-2.0"
] | 1 | 2021-08-30T21:43:38.000Z | 2021-08-30T21:43:38.000Z | # function that will be distributed
# wrapper function, uses the dask client object
| 24.571429 | 47 | 0.627907 |
4b714a892a0b336d54d129baf723bfd26bcf8c4a | 1,495 | py | Python | app/core.py | antmicro/raw-image-data-previewer | 1fc14848a27ce628047cf3e473a9f30f83c9892d | [
"Apache-2.0"
] | 5 | 2021-06-08T15:37:23.000Z | 2021-06-10T15:41:21.000Z | app/core.py | antmicro/raw-image-data-previewer | 1fc14848a27ce628047cf3e473a9f30f83c9892d | [
"Apache-2.0"
] | 37 | 2021-03-12T12:48:56.000Z | 2021-12-09T11:41:05.000Z | app/core.py | antmicro/raw-image-data-previewer | 1fc14848a27ce628047cf3e473a9f30f83c9892d | [
"Apache-2.0"
] | 9 | 2021-03-22T14:03:37.000Z | 2021-12-31T07:22:04.000Z | """Main functionalities."""
from .image.image import (Image, RawDataContainer)
from .image.color_format import AVAILABLE_FORMATS
from .parser.factory import ParserFactory
import cv2 as cv
import os
| 26.22807 | 69 | 0.681605 |
4b73785d9dd8a4aaaf6a1aac49dbeb16165c0050 | 1,423 | py | Python | demo/demo/urls.py | AlanCoding/Example-Django-App | 1cca52b720d1b117ccce780618d9af94f584ac2c | [
"MIT"
] | null | null | null | demo/demo/urls.py | AlanCoding/Example-Django-App | 1cca52b720d1b117ccce780618d9af94f584ac2c | [
"MIT"
] | null | null | null | demo/demo/urls.py | AlanCoding/Example-Django-App | 1cca52b720d1b117ccce780618d9af94f584ac2c | [
"MIT"
] | null | null | null | """demo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView
import debug_toolbar
from rockband import rocking_urls
# from movies import urls as movie_urls
from async import async_urls
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name="index.html")),
url(r'^admin/', admin.site.urls),
# Rock band urls
url(r'^bands/', include(rocking_urls)),
# asynchronous demo app
url(r'^async/', include(async_urls)),
# url(r'$movies/', include(movie_urls))
# Django auth views
url('^', include('django.contrib.auth.urls')),
]
# For the debug toolbar
if settings.DEBUG:
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| 33.093023 | 79 | 0.702741 |
4b7544498643883f50311519a373ed59f4faa469 | 3,478 | py | Python | app/urls.py | etihadprime/etihadwebclass | 3b46d9068afeb0806198ef08fe26849ab9a09bb9 | [
"Apache-2.0"
] | null | null | null | app/urls.py | etihadprime/etihadwebclass | 3b46d9068afeb0806198ef08fe26849ab9a09bb9 | [
"Apache-2.0"
] | 6 | 2021-03-19T03:55:20.000Z | 2021-09-22T19:06:06.000Z | app/urls.py | etihadprime/etihadwebclass | 3b46d9068afeb0806198ef08fe26849ab9a09bb9 | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from .views import teacherregister,studentregister,login_view,logout
from . import views
from .views import (
ClassroomCreateView,ClassroomListView,ClassroomDetailView,ClassroomUpdateView,ClassroomDeleteView,
SubjectCreateView,SubjectListView,SubjectDetailView,SubjectUpdateView,SubjectDeleteView,
ClassMemberCreateView,ClassMemberListView,ClassMemberDetailView,ClassMemberUpdateView,ClassMemberDeleteView,
TimetableCreateView,TimetableListView,TimetableDetailView,TimetableUpdateView,TimetableDeleteView,CrudView,chatroom
)
urlpatterns = [
path('', views.index, name='index'),
path('health', views.health, name='health'),
path('404', views.handler404, name='404'),
path('500', views.handler500, name='500'),
path('signup/teacher', teacherregister,name='register-teacher'),
path('signup/student', studentregister,name='register-student'),
path('accounts/login/', login_view, name='login'),
path('logout/', logout,name='logout'),
#Classroom
path('classroom/new', ClassroomCreateView.as_view(),name='classroom-create'),
path('classroom_list', ClassroomListView.as_view(),name='classroom-list'),
path('classroom/<str:pk>/', ClassroomDetailView.as_view(),name='classroom-detail'),
path('classroom/<str:pk>/update', ClassroomUpdateView.as_view(),name='classroom-update'),
path('classroom/<str:pk>/delete', ClassroomDeleteView.as_view(),name='classroom-delete'),
#path('Classroom/<int:pk>/image',ChildImageUpdateView.as_view(),name='Classroom-image'),
#Subject
path('subject/new', SubjectCreateView.as_view(),name='subject-create'),
path('subject_list', SubjectListView.as_view(),name='subject-list'),
path('subject/<int:pk>/', SubjectDetailView.as_view(),name='subject-detail'),
path('subject/<int:pk>/update', SubjectUpdateView.as_view(),name='subject-update'),
path('subject/<int:pk>/delete', SubjectDeleteView.as_view(),name='subject-delete'),
# Class Members
path('classmember/new', ClassMemberCreateView.as_view(),name='classmember-create'),
path('classmember_list', ClassMemberListView.as_view(),name='classmember-list'),
path('classmember/<str:pk>/', ClassMemberDetailView.as_view(),name='classmember-detail'),
path('classmember/<str:pk>/update', ClassMemberUpdateView.as_view(),name='classmember-update'),
path('classmember/<str:pk>/delete', ClassMemberDeleteView.as_view(),name='classmember-delete'),
# TimeTable
path('timetable/new', TimetableCreateView.as_view(),name='timetable-create'),
path('timetable_list', TimetableListView.as_view(),name='timetable-list'),
path('timetable/<int:pk>/', TimetableDetailView.as_view(),name='timetable-detail'),
path('timetable/<int:pk>/update', TimetableUpdateView.as_view(),name='timetable-update'),
path('timetable/<int:pk>/delete', TimetableDeleteView.as_view(),name='timetable-delete'),
# chatroom
path('chat/new',chatroom,name='chatroom'),
path('crud/',CrudView.as_view(), name='crud_ajax'),
] | 70.979592 | 115 | 0.648074 |
4b76dabace6084b6df07b8d27c9db12c437ca835 | 44,634 | py | Python | qaboard/qa.py | Samsung/qaboard | a2290f33da2bbd87cacf95822e1c85376083dfa1 | [
"Apache-2.0"
] | 51 | 2019-12-02T07:25:58.000Z | 2022-03-23T13:27:11.000Z | qaboard/qa.py | Samsung/qaboard | a2290f33da2bbd87cacf95822e1c85376083dfa1 | [
"Apache-2.0"
] | 25 | 2020-01-20T16:13:49.000Z | 2022-02-19T17:07:38.000Z | qaboard/qa.py | Samsung/qaboard | a2290f33da2bbd87cacf95822e1c85376083dfa1 | [
"Apache-2.0"
] | 15 | 2020-01-17T21:21:17.000Z | 2022-02-23T10:13:48.000Z | #!/usr/bin/env python
"""
CLI tool to runs various tasks related to QA.
"""
import os
import time
from pathlib import Path
import sys
import traceback
import json
import yaml
import uuid
import datetime
import click
from .run import RunContext
from .runners import runners, Job, JobGroup
from .runners.lsf import LsfPriority
from .conventions import batch_dir, batch_dir, make_batch_dir, make_batch_conf_dir, make_hash
from .conventions import serialize_config, deserialize_config, get_settings
from .utils import PathType, entrypoint_module, load_tuning_search
from .utils import save_outputs_manifest, total_storage
from .utils import redirect_std_streams
from .utils import getenvs
from .api import url_to_dir, print_url
from .api import get_outputs, notify_qa_database, serialize_paths
from .iterators import iter_inputs, iter_parameters
from .config import config_has_error, ignore_config_errors
from .config import project, project_root, subproject, config
from .config import default_batches_files, get_default_database, default_batch_label, default_platform
from .config import get_default_configuration, default_input_type
from .config import commit_id, outputs_commit, artifacts_commit, root_qatools, artifacts_commit_root, outputs_commit_root
from .config import user, is_ci, on_windows
def postprocess_(runtime_metrics, run_context, skip=False, save_manifests_in_database=False):
"""Computes computes various success metrics and outputs."""
from .utils import file_info
try:
if not skip:
try:
entrypoint_postprocess = entrypoint_module(config).postprocess
except:
metrics = runtime_metrics
else:
metrics = entrypoint_postprocess(runtime_metrics, run_context)
else:
metrics = runtime_metrics
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
# TODO: in case of import error because postprocess was not defined, just ignore it...?
# TODO: we should provide a default postprocess function, that reads metrics.json and returns {**previous, **runtime_metrics}
exc_type, exc_value, exc_traceback = sys.exc_info()
click.secho(f'[ERROR] Your `postprocess` function raised an exception:', fg='red', bold=True)
click.secho(''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)), fg='red')
metrics = {**runtime_metrics, 'is_failed': True}
if 'is_failed' not in metrics:
click.secho("[Warning] The result of the `postprocess` function misses a key `is_failed` (bool)", fg='yellow')
metrics['is_failed'] = False
if (run_context.output_dir / 'metrics.json').exists():
with (run_context.output_dir / 'metrics.json').open('r') as f:
previous_metrics = json.load(f)
metrics = {
**previous_metrics,
**metrics,
}
with (run_context.output_dir / 'metrics.json').open('w') as f:
json.dump(metrics, f, sort_keys=True, indent=2, separators=(',', ': '))
# To help identify if input files change, we compute and save some metadata.
if is_ci or save_manifests_in_database:
manifest_inputs = run_context.obj.get('manifest-inputs', [run_context.input_path])
input_files = {}
for manifest_input in manifest_inputs:
manifest_input = Path(manifest_input)
if manifest_input.is_dir():
for idx, path in enumerate(manifest_input.rglob('*')):
if idx >= 200:
break
if not path.is_file():
continue
input_files[path.as_posix()] = file_info(path, config=config)
elif manifest_input.is_file():
input_files.update({manifest_input.as_posix(): file_info(manifest_input, config=config)})
with (run_context.output_dir / 'manifest.inputs.json').open('w') as f:
json.dump(input_files, f, indent=2)
outputs_manifest = save_outputs_manifest(run_context.output_dir, config=config)
output_data = {
'storage': total_storage(outputs_manifest),
}
if save_manifests_in_database:
if run_context.input_path.is_file():
click.secho('WARNING: saving the manifests in the database is only implemented for inputs that are *folders*.', fg='yellow', err=True)
else:
from .utils import copy
copy(run_context.output_dir / 'manifest.inputs.json', run_context.input_path / 'manifest.inputs.json')
copy(run_context.output_dir / 'manifest.outputs.json', run_context.input_path / 'manifest.outputs.json')
if not run_context.obj.get('offline') and not run_context.obj.get('dryrun'):
notify_qa_database(**run_context.obj, metrics=metrics, data=output_data, is_pending=False, is_running=False)
return metrics
runners_config = config.get('runners', {})
if 'default' in runners_config:
default_runner = runners_config['default']
else:
task_runners = [r for r in runners_config if r not in ['default', 'local']]
default_runner = task_runners[0] if task_runners else 'local'
lsf_config = config['lsf'] if 'lsf' in config else config.get('runners', {}).get('lsf', {})
if 'lsf' in config:
default_runner = 'lsf'
if default_runner == 'lsf' and os.name=='nt':
default_runner = 'local'
local_config = config.get('runners', {}).get('local', {})
from .optimize import optimize
qa.add_command(optimize)
# TODO: split more...
# from .bit_accuracy import check_bit_accuracy, check_bit_accuracy_manifest
# qa.add_command(check_bit_accuracy)
# qa.add_command(check_bit_accuracy_manifest)
if __name__ == '__main__':
main()
| 50.720455 | 318 | 0.703992 |
4b77f58f441974f14bdaad4bde4687feee866e3a | 5,838 | py | Python | 20210220_simulation_sample/data_handler.py | 3x3x3/Presentations | 3c31b136ed4d9214bb3730fa41a4a575da38edc9 | [
"MIT"
] | null | null | null | 20210220_simulation_sample/data_handler.py | 3x3x3/Presentations | 3c31b136ed4d9214bb3730fa41a4a575da38edc9 | [
"MIT"
] | null | null | null | 20210220_simulation_sample/data_handler.py | 3x3x3/Presentations | 3c31b136ed4d9214bb3730fa41a4a575da38edc9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import threading
import time
import global_def as gd
from db_reader import DbReaderDef, DbReaer
from queue import Queue, Empty
| 32.797753 | 162 | 0.52381 |
4b786431aa3dbf51672c3a6c4d1ccbdb01c1f809 | 7,865 | py | Python | todo/views.py | Azarn/mytodo | 599b5017b9a952100f05a6180dba5bca0823ad70 | [
"Apache-2.0"
] | null | null | null | todo/views.py | Azarn/mytodo | 599b5017b9a952100f05a6180dba5bca0823ad70 | [
"Apache-2.0"
] | null | null | null | todo/views.py | Azarn/mytodo | 599b5017b9a952100f05a6180dba5bca0823ad70 | [
"Apache-2.0"
] | null | null | null | import logging
from rest_framework import mixins, generics, permissions, exceptions
from django.conf import settings
from django.utils import timezone
from .serializers import CategorySerializer, TagSerializer, TodoSerializer
from .models import Category, Tag, Todo
logger = logging.getLogger(__name__)
| 34.047619 | 105 | 0.628608 |
4b7a04ca06d8701872be7f11c6588abbce31dce4 | 16,294 | py | Python | hypothesis/_settings.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | hypothesis/_settings.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | hypothesis/_settings.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null | # coding=utf-8
#
# This file is part of Hypothesis (https://github.com/DRMacIver/hypothesis)
#
# Most of this work is copyright (C) 2013-2015 David R. MacIver
# (david@drmaciver.com), but it contains contributions by others. See
# https://github.com/DRMacIver/hypothesis/blob/master/CONTRIBUTING.rst for a
# full list of people who may hold copyright, and consult the git log if you
# need to determine who owns an individual contribution.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
#
# END HEADER
"""A module controlling settings for Hypothesis to use in falsification.
Either an explicit settings object can be used or the default object on
this module can be modified.
"""
from __future__ import division, print_function, absolute_import
import os
import inspect
import warnings
import threading
from collections import namedtuple
from hypothesis.errors import InvalidArgument, HypothesisDeprecationWarning
from hypothesis.configuration import hypothesis_home_dir
from hypothesis.utils.conventions import not_set
from hypothesis.utils.dynamicvariables import DynamicVariable
__all__ = [
'settings',
]
all_settings = {}
_db_cache = {}
default_variable = DynamicVariable(None)
class settings(SettingsMeta('settings', (object,), {})):
"""A settings object controls a variety of parameters that are used in
falsification. These may control both the falsification strategy and the
details of the data that is generated.
Default values are picked up from the settings.default object and
changes made there will be picked up in newly created settings.
"""
_WHITELISTED_REAL_PROPERTIES = [
'_database', '_construction_complete', 'storage'
]
__definitions_are_locked = False
_profiles = {}
def __setattr__(self, name, value):
if name in settings._WHITELISTED_REAL_PROPERTIES:
return object.__setattr__(self, name, value)
elif name == 'database':
if self._construction_complete:
raise AttributeError(
'Settings objects are immutable and may not be assigned to'
' after construction.'
)
else:
return object.__setattr__(self, '_database', value)
elif name in all_settings:
if self._construction_complete:
raise AttributeError(
'Settings objects are immutable and may not be assigned to'
' after construction.'
)
else:
setting = all_settings[name]
if (
setting.options is not None and
value not in setting.options
):
raise InvalidArgument(
'Invalid %s, %r. Valid options: %r' % (
name, value, setting.options
)
)
return object.__setattr__(self, name, value)
else:
raise AttributeError('No such setting %s' % (name,))
Setting = namedtuple(
'Setting', (
'name', 'description', 'default', 'options', 'deprecation'))
settings.define_setting(
'min_satisfying_examples',
default=5,
description="""
Raise Unsatisfiable for any tests which do not produce at least this many
values that pass all assume() calls and which have not exhaustively covered the
search space.
"""
)
settings.define_setting(
'max_examples',
default=200,
description="""
Once this many satisfying examples have been considered without finding any
counter-example, falsification will terminate.
"""
)
settings.define_setting(
'max_iterations',
default=1000,
description="""
Once this many iterations of the example loop have run, including ones which
failed to satisfy assumptions and ones which produced duplicates, falsification
will terminate.
"""
)
settings.define_setting(
'max_shrinks',
default=500,
description="""
Once this many successful shrinks have been performed, Hypothesis will assume
something has gone a bit wrong and give up rather than continuing to try to
shrink the example.
"""
)
settings.define_setting(
'timeout',
default=60,
description="""
Once this many seconds have passed, falsify will terminate even
if it has not found many examples. This is a soft rather than a hard
limit - Hypothesis won't e.g. interrupt execution of the called
function to stop it. If this value is <= 0 then no timeout will be
applied.
"""
)
settings.define_setting(
'derandomize',
default=False,
description="""
If this is True then hypothesis will run in deterministic mode
where each falsification uses a random number generator that is seeded
based on the hypothesis to falsify, which will be consistent across
multiple runs. This has the advantage that it will eliminate any
randomness from your tests, which may be preferable for some situations
. It does have the disadvantage of making your tests less likely to
find novel breakages.
"""
)
settings.define_setting(
'strict',
default=os.getenv('HYPOTHESIS_STRICT_MODE') == 'true',
description="""
If set to True, anything that would cause Hypothesis to issue a warning will
instead raise an error. Note that new warnings may be added at any time, so
running with strict set to True means that new Hypothesis releases may validly
break your code.
You can enable this setting temporarily by setting the HYPOTHESIS_STRICT_MODE
environment variable to the string 'true'.
"""
)
settings.define_setting(
'database_file',
default=lambda: (
os.getenv('HYPOTHESIS_DATABASE_FILE') or
os.path.join(hypothesis_home_dir(), 'examples.db')
),
description="""
database: An instance of hypothesis.database.ExampleDatabase that will be
used to save examples to and load previous examples from. May be None
in which case no storage will be used.
"""
)
Verbosity.quiet = Verbosity('quiet', 0)
Verbosity.normal = Verbosity('normal', 1)
Verbosity.verbose = Verbosity('verbose', 2)
Verbosity.debug = Verbosity('debug', 3)
Verbosity.all = [
Verbosity.quiet, Verbosity.normal, Verbosity.verbose, Verbosity.debug
]
ENVIRONMENT_VERBOSITY_OVERRIDE = os.getenv('HYPOTHESIS_VERBOSITY_LEVEL')
if ENVIRONMENT_VERBOSITY_OVERRIDE:
DEFAULT_VERBOSITY = Verbosity.by_name(ENVIRONMENT_VERBOSITY_OVERRIDE)
else:
DEFAULT_VERBOSITY = Verbosity.normal
settings.define_setting(
'verbosity',
options=Verbosity.all,
default=DEFAULT_VERBOSITY,
description='Control the verbosity level of Hypothesis messages',
)
settings.define_setting(
name='stateful_step_count',
default=50,
description="""
Number of steps to run a stateful program for before giving up on it breaking.
"""
)
settings.define_setting(
'perform_health_check',
default=True,
description=u"""
If set to True, Hypothesis will run a preliminary health check before
attempting to actually execute your test.
"""
)
settings.lock_further_definitions()
settings.register_profile('default', settings())
settings.load_profile('default')
assert settings.default is not None
| 31.334615 | 79 | 0.650669 |
4b7b56b22f9f50b0ab5dcb31b7bb4cdc39078ed0 | 2,014 | py | Python | 2_writeups/4_other/pdf/pipeline.py | araujorayza/robot_hacking_manual | d11feecc8931b1449b0ab30a51a55f71f51dd965 | [
"Apache-2.0"
] | 141 | 2021-11-14T15:27:04.000Z | 2022-03-30T00:44:48.000Z | 2_writeups/4_other/pdf/pipeline.py | araujorayza/robot_hacking_manual | d11feecc8931b1449b0ab30a51a55f71f51dd965 | [
"Apache-2.0"
] | 1 | 2021-11-17T06:38:44.000Z | 2021-11-17T06:38:45.000Z | 2_writeups/4_other/pdf/pipeline.py | araujorayza/robot_hacking_manual | d11feecc8931b1449b0ab30a51a55f71f51dd965 | [
"Apache-2.0"
] | 18 | 2021-11-15T09:55:48.000Z | 2022-03-08T10:25:58.000Z | """
Script to generate a security pipeline for PDF files.
It does the following:
- Adds specified meta-data
- Encrypts file
Run:
python3 pipeline.py
"""
from PyPDF2 import PdfFileWriter, PdfFileReader
from PyPDF2.generic import NameObject, createStringObject
if __name__ == '__main__':
# path for the file to process
filepath = "/Users/victor/Desktop/Apex.AI_Threat_Model_AliasRobotics.pdf"
# meta-data-value
meta_value = u'HitachiVentures'
meta(input_pdf=filepath,
output_pdf=filepath+"underNDA.pdf",
value=meta_value)
encrypt(input_pdf=filepath+"underNDA.pdf",
output_pdf=filepath+"underNDA_encrypted.pdf",
password='4l14srobotics')
| 28.366197 | 77 | 0.682224 |
4b7c945d6b1d560f6d85d5ab876aed99787d4072 | 1,989 | py | Python | code/MergeTrack/print_max_reid_distance.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 140 | 2018-10-25T11:58:34.000Z | 2022-01-18T15:29:38.000Z | code/MergeTrack/print_max_reid_distance.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 18 | 2018-11-21T04:48:03.000Z | 2020-09-14T09:30:56.000Z | code/MergeTrack/print_max_reid_distance.py | MTonyM/PReMVOS | 3d01f0c6156628083a4c8441b4b57622c500e04e | [
"MIT"
] | 32 | 2018-10-25T11:58:57.000Z | 2021-12-27T06:13:45.000Z | import glob
from numpy.linalg import norm
import numpy as np
from copy import deepcopy as copy
from MergeTrack.merge_functions import read_ann,read_props
from MergeTrack.ReID_net_functions import ReID_net_init, add_ReID
input_images = "DAVIS/val17/"
input_proposals = "DAVIS/ReID_props/"
first_frame_anns = "DAVIS/val17-ff/"
output_images = "DAVIS/final_results/"
output_proposals = "DAVIS/final_props/"
ReID_net = ReID_net_init()
dataset_max_distances = []
for video_fn in sorted(glob.glob(input_images+"*/")):
video_proposals = []
templates = []
for image_fn in sorted(glob.glob(video_fn+"*")):
ann_fn = image_fn.replace(input_images,first_frame_anns).replace('.jpg','.png')
if glob.glob(ann_fn):
new_templates = read_ann(ann_fn)
new_templates = add_ReID(new_templates, image_fn, ReID_net)
# import json
# ff_fn = image_fn.replace(input_images, "DAVIS/ff_test/").replace('.jpg', '.json')
# with open(ff_fn, "r") as f:
# new_templates = json.load(f)
# for id, templ in enumerate(new_templates):
# templ['ReID'] = np.array(templ['ReID'])
# templ['id'] = id
templates = templates + new_templates
prop_fn = image_fn.replace(input_images,input_proposals).replace('.jpg','.json')
proposals = read_props(prop_fn)
video_proposals.append(proposals)
ReIDs = [[prop['ReID'] for prop in props] for props in video_proposals]
template_ReIDs = [templ['ReID'] for templ in templates]
all_reid_distances = [np.array([[norm(c_reid - gt_reid) for c_reid in curr] for gt_reid in template_ReIDs]) for curr in ReIDs]
all_reid_distances_no_inf = copy(all_reid_distances)
for mat in all_reid_distances_no_inf:
mat[np.isinf(mat)] = 0
max_distances = np.array([mat.max(axis=1) if mat.shape[1]>0 else np.zeros((mat.shape[0])) for mat in all_reid_distances_no_inf]).max(axis=0)
print(max_distances)
dataset_max_distances.append(max_distances.max())
print(np.array(dataset_max_distances).max()) | 38.25 | 142 | 0.723479 |
4b7d11da5ac6e1b0ebc4170e7d035cb4092ec2fa | 1,377 | py | Python | algorithms/tests/test_string_matching.py | t3rm1n4l/python-algorithms | 0fbcb38b26d8690028cd5a676743950fdf3a060f | [
"MIT"
] | 1 | 2018-05-02T07:37:43.000Z | 2018-05-02T07:37:43.000Z | algorithms/tests/test_string_matching.py | t3rm1n4l/python-algorithms | 0fbcb38b26d8690028cd5a676743950fdf3a060f | [
"MIT"
] | null | null | null | algorithms/tests/test_string_matching.py | t3rm1n4l/python-algorithms | 0fbcb38b26d8690028cd5a676743950fdf3a060f | [
"MIT"
] | null | null | null | import unittest
import string_matching
if __name__ == '__main__':
unittest.main()
| 33.585366 | 96 | 0.655773 |
4b7d6c918015930582e1fb1d514d24f1d777be05 | 1,411 | py | Python | molecool_test/tests/test_molecule.py | radifar/molecool_test | 9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9 | [
"BSD-3-Clause"
] | null | null | null | molecool_test/tests/test_molecule.py | radifar/molecool_test | 9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9 | [
"BSD-3-Clause"
] | null | null | null | molecool_test/tests/test_molecule.py | radifar/molecool_test | 9e0027656d6f68d2efd9cdf8f24872b4bcea6cb9 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import pytest
import molecool_test
| 21.707692 | 81 | 0.697378 |
4b7e597bab0f3442569b2c0f944ee9a51ebdc5c8 | 5,004 | py | Python | tests/unit/html/test_search_page.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 325 | 2015-10-27T03:15:49.000Z | 2022-03-16T06:49:12.000Z | tests/unit/html/test_search_page.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 173 | 2018-10-16T04:11:05.000Z | 2022-03-29T17:52:08.000Z | tests/unit/html/test_search_page.py | tttgm/basketball_reference_web_scraper | 2dbd9d7bacbcfee17f08bcf8629bd7d50893761d | [
"MIT"
] | 97 | 2016-04-09T19:11:28.000Z | 2022-03-21T09:57:50.000Z | from unittest import TestCase
from unittest.mock import patch, MagicMock, PropertyMock
from basketball_reference_web_scraper.html import SearchPage, PlayerSearchResult
| 42.40678 | 116 | 0.711631 |
4b7fad07fb9954bb150ff9b9a3fc6a0e8f2cf560 | 19,891 | py | Python | cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | 1 | 2021-10-30T00:03:05.000Z | 2021-10-30T00:03:05.000Z | ##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# New_WindGust_Tool
#
# Authors: Tom Mazza NWS Charleston, WV Created: 04/25/03
# Matthew H. Belk NWS Taunton, MA Last Modified: 06/16/03
# Mathewson FSL Modified: 3/30/04
# -change in model names to OB3 names
#----------------------------------------------------------------------------
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 02/10/2016 5283 nabowle Remove NGM support.
# ----------------------------------------------------------------------------
##
# This is an absolute override file, indicating that a higher priority version
# of the file will completely replace a lower priority version of the file.
##
ToolType = "numeric"
WeatherElementEdited = "WindGust"
from numpy import *
# without this, the builtin max() is used
from numpy import max
import LogStream
# You can screen the elements for which your tool will appear by using
# a ScreenList. For example:
#ScreenList = ["MixHgt","WindGust", "TransWind"]
# Set up variables to be solicited from the user:
VariableList = [
("Momentum algorithm:", "RUC", "radio", ["RUC", "Power"]),
("Use BL Winds:", "No", "radio", ["Yes", "No"]),
("Model:", "NAM12", "radio",
["GFS80", "NAM12", "gfsLR", "RAP40"])
]
#Set up Class
import SmartScript
## For available commands, see SmartScript
toolName = 'WindGustFromAlgorithm'
| 48.045894 | 195 | 0.42934 |
4b7fc93c2e30ca54b02519e2a781a191d7e736a1 | 6,705 | py | Python | pochta/tracking.py | john-phonk/fs-pochta-api | c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d | [
"MIT"
] | 16 | 2019-05-13T01:12:10.000Z | 2022-01-17T06:21:35.000Z | pochta/tracking.py | john-phonk/fs-pochta-api | c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d | [
"MIT"
] | 4 | 2020-03-06T06:46:35.000Z | 2020-11-22T04:24:34.000Z | pochta/tracking.py | john-phonk/fs-pochta-api | c3b7df4ecdbfc45fb482cedd8ab6c2927e0a1c9d | [
"MIT"
] | 6 | 2019-08-10T13:18:21.000Z | 2021-11-25T08:57:30.000Z | from abc import ABC
from typing import List
from zeep import CachingClient, Client, Settings
from .exceptions import APIError
| 40.149701 | 99 | 0.670097 |
4b7fd5f816b4e255d1e40adf591dc8b3e21efaa2 | 2,291 | py | Python | CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py | Chang-Liu-TAMU/Python-Cookbook-reading | 7b974c32f77b4b3d7cfeed30d1671081057c566f | [
"MIT"
] | null | null | null | CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py | Chang-Liu-TAMU/Python-Cookbook-reading | 7b974c32f77b4b3d7cfeed30d1671081057c566f | [
"MIT"
] | null | null | null | CH04_Iterators_and_Generators/4.4.Implementing_the_iterator_protocol.py | Chang-Liu-TAMU/Python-Cookbook-reading | 7b974c32f77b4b3d7cfeed30d1671081057c566f | [
"MIT"
] | null | null | null | # @Time: 2022/4/12 20:50
# @Author: chang liu
# @Email: chang_liu_tamu@gmail.com
# @File:4.4.Implementing_the_iterator_protocol.py
################ clean version #########################
# class Node:
# def __init__(self, val):
# self._value = val
# self._children = []
#
# def __repr__(self):
# return "Node({!r})".format(self._value)
#
# def add_child(self, node):
# self._children.append(node)
#
# def __iter__(self):
# return iter(self._children)
#
# def depth_first(self):
# yield self
# for c in self:
# yield from c.depth_first()
############# some messy version ####################
root = Node(0)
left = Node(1)
right = Node(2)
left.add_child(Node(3))
left.add_child(Node(4))
right.add_child(Node(5))
right.add_child(Node(6))
root.add_child(left)
root.add_child(right)
for i in root.depth_first():
print(i)
# for i in root:
# print(i) | 22.91 | 70 | 0.572676 |
4b800dc76b871db39c746e292171f32b25ee44ff | 29,762 | py | Python | FGPVAE_model.py | metodj/FGP-VAE | 607559ab465b29878f10a5d95b8e3c6ec8d94e0c | [
"MIT"
] | 3 | 2021-01-27T14:06:01.000Z | 2021-09-09T12:10:34.000Z | FGPVAE_model.py | metodj/FGP-VAE | 607559ab465b29878f10a5d95b8e3c6ec8d94e0c | [
"MIT"
] | null | null | null | FGPVAE_model.py | metodj/FGP-VAE | 607559ab465b29878f10a5d95b8e3c6ec8d94e0c | [
"MIT"
] | null | null | null | import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
import pickle
import random
from utils import gauss_cross_entropy
tfk = tfp.math.psd_kernels
def forward_pass_FGPVAE_rotated_mnist(data_batch, beta, vae, GP, N_t, clipping_qs,
bayes_reg_view, omit_C_tilde, C_ma, lagrange_mult, alpha,
kappa, GECO=False):
"""
:param data_batch:
:param beta:
:param vae:
:param GP:
:param N_t:
:param clipping_qs:
:param bayes_reg_view: whether or not to use Bayesian regresion view for linear kernel in global channels
:param omit_C_tilde: omit C_tilde from derivation and modify cross-entropy term instead
:param C_ma: average constraint from t-1 step (GECO)
:param lagrange_mult: lambda from t-1 step (GECO)
:param kappa: reconstruction level parameter for GECO
:param alpha: moving average parameter for GECO
:param GECO: whether or not to use GECO algorithm for training
:return:
"""
images, aux_data = data_batch
aux_data = tf.reshape(aux_data, (-1, N_t, 10))
L = vae.L
L_w = GP.L_w
w = tf.shape(images)[1]
h = tf.shape(images)[2]
K = tf.cast(w, dtype=tf.float64) * tf.cast(h, dtype=tf.float64)
b = tf.cast(tf.shape(images)[0], dtype=tf.float64) # batch_size
# ENCODER NETWORK
qnet_mu, qnet_var = vae.encode(images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP
p_m, p_v, lhoods_local, lhoods_global = [], [], [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, lhood_i, K_local = GP.build_1d_gp_local(X=aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
lhoods_local.append(lhood_i)
ce_global_arr = []
for i in range(L_w, L): # fit global GPs
if GP.object_prior_corr:
object_aux_data_filtered = tf.transpose(aux_data[:, ::N_t, :], perm=[1, 0, 2])
bar_means, bar_vars, C_tilde = GP.preprocess_1d_gp_global_correlated_object_priors(qnet_mu[:, :, i],
qnet_var[:, :, i])
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global_correlated_object_priors(object_aux_data_filtered,
bar_means,
bar_vars,
object_aux_data_filtered,
C_tilde,
bayesian_reg_view=bayes_reg_view,
omit_C_tilde=omit_C_tilde)
if omit_C_tilde:
ce_global_i = gauss_cross_entropy(p_m_i, p_v_i, bar_means, bar_vars)
ce_global_arr.append(ce_global_i)
else:
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
# repeat p_m_i and p_v_i N_t times, since those are shared across all images within one object dataset D_t
p_m_i = tf.tile(tf.expand_dims(p_m_i, 1), [1, N_t])
p_v_i = tf.tile(tf.expand_dims(p_v_i, 1), [1, N_t])
p_m.append(p_m_i)
p_v.append(p_v_i)
lhoods_global.append(lhood_i)
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
if GP.object_prior_corr:
# for local channels sum over latent channels and over digits' datasets
# for global channels we only sum over latent channels (as there is only one global GP per channel)
lhoods = tf.reduce_sum(lhoods_local, axis=(0, 1)) + tf.reduce_sum(lhoods_global, axis=0)
# CE (cross-entropy)
if omit_C_tilde:
ce_global = tf.reduce_sum(ce_global_arr)
ce_local = gauss_cross_entropy(p_m[:, :, :L_w], p_v[:, :, :L_w], qnet_mu[:, :, :L_w], qnet_var[:, :, :L_w])
ce_local = tf.reduce_sum(ce_local, (0, 1, 2)) # sum also over digits' datasets
ce_term = ce_global + ce_local
else:
ce_term = gauss_cross_entropy(p_m, p_v, qnet_mu, qnet_var)
ce_term = tf.reduce_sum(ce_term, (0, 1, 2)) # sum also over digits' datasets
# KL part
elbo_kl_part = lhoods - ce_term
else:
lhoods = lhoods_global + lhoods_local
lhoods = tf.reduce_sum(lhoods, axis=0)
# CE (cross-entropy)
ce_term = gauss_cross_entropy(p_m, p_v, qnet_mu, qnet_var)
ce_term = tf.reduce_sum(ce_term, (1, 2))
# KL part
elbo_kl_part = lhoods - ce_term
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
# DECODER NETWORK (Gaussian observational likelihood, MSE)
recon_images = vae.decode(tf.reshape(latent_samples, (-1, L)))
if GP.object_prior_corr:
if GECO:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[2, 3])
recon_loss = tf.reduce_sum(recon_loss/K - kappa**2)
C_ma = alpha * C_ma + (1 - alpha) * recon_loss / b
# elbo = - (1/L) * KL_term + lagrange_mult * C_ma
# elbo = - (1/b) * KL_term + lagrange_mult * C_ma
# elbo = - KL_term + lagrange_mult * C_ma
elbo = - elbo_kl_part + lagrange_mult * (recon_loss / b + tf.stop_gradient(C_ma - recon_loss / b))
lagrange_mult = lagrange_mult * tf.exp(C_ma)
else:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[1, 2, 3])
recon_loss = tf.reduce_sum(recon_loss) / K
elbo = -recon_loss + (beta / L) * elbo_kl_part
else:
if GECO:
recon_loss = tf.reduce_mean((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[2, 3])
N_t = tf.cast(N_t, dtype=tf.float64)
C_ma = alpha * C_ma + (1 - alpha) * tf.reduce_mean(recon_loss - kappa ** 2)
recon_loss = tf.reduce_sum(recon_loss - kappa ** 2)
# elbo = - (1/L) * elbo_kl_part + lagrange_mult * C_ma
# elbo = - (1/b) * elbo_kl_part + lagrange_mult * C_ma
# elbo = - elbo_kl_part + lagrange_mult * C_ma
elbo = - elbo_kl_part + lagrange_mult * (recon_loss / N_t + tf.stop_gradient(C_ma - recon_loss / N_t))
lagrange_mult = lagrange_mult * tf.exp(C_ma)
else:
recon_loss = tf.reduce_sum((tf.reshape(images, (-1, N_t, w, h)) - tf.reshape(recon_images,
(-1, N_t, w, h))) ** 2,
axis=[1, 2, 3])
# ELBO
# beta plays role of sigma_gaussian_decoder here (\lambda(\sigma_y) in Casale paper)
# K and L are not part of ELBO. They are used in loss objective to account for the fact that magnitudes of
# reconstruction and KL terms depend on number of pixels (K) and number of latent GPs used (L), respectively
recon_loss = recon_loss / K
elbo = -recon_loss + (beta/L) * elbo_kl_part
# average across object datasets
elbo = tf.reduce_sum(elbo)
elbo_kl_part = tf.reduce_sum(elbo_kl_part)
recon_loss = tf.reduce_sum(recon_loss)
return elbo, recon_loss, elbo_kl_part, p_m, p_v, qnet_mu, qnet_var, recon_images, latent_samples, C_ma, lagrange_mult
def predict_FGPVAE_rotated_mnist(test_images, test_aux_data, train_images, train_aux_data, vae, GP,
bayes_reg_view, omit_C_tilde, N_t=15, clipping_qs=False):
"""
Get FGPVAE predictions for rotated MNIST test data.
:param test_data_batch:
:param train_images:
:param train_aux_data:
:param vae:
:param GP:
:param N_t:
:param clipping_qs:
:return:
"""
L = vae.L
L_w = GP.L_w
w = tf.shape(train_images)[1]
h = tf.shape(train_images)[2]
train_aux_data = tf.reshape(train_aux_data, (-1, N_t, 10))
test_aux_data = tf.expand_dims(test_aux_data, 1)
# encode train images
qnet_mu, qnet_var = vae.encode(train_images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP, get latent embeddings for test images
p_m, p_v = [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, _ , _= GP.build_1d_gp_local(X=train_aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=test_aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
for i in range(L_w, L): # fit global GPs
if GP.object_prior_corr:
object_aux_data_filtered = tf.transpose(train_aux_data[:, ::N_t, :], perm=[1, 0, 2])
bar_means, bar_vars, C_tilde = GP.preprocess_1d_gp_global_correlated_object_priors(qnet_mu[:, :, i],
qnet_var[:, :, i])
p_m_i, p_v_i, _ = GP.build_1d_gp_global_correlated_object_priors(object_aux_data_filtered,
bar_means,
bar_vars,
object_aux_data_filtered,
C_tilde,
omit_C_tilde=omit_C_tilde,
bayesian_reg_view=bayes_reg_view)
else:
p_m_i, p_v_i, _ = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
p_m.append(tf.expand_dims(p_m_i, 1))
p_v.append(tf.expand_dims(p_v_i, 1))
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
# decode, calculate error (Gaussian observational likelihood, MSE)
recon_images = vae.decode(tf.reshape(latent_samples, (-1, L)))
recon_loss = tf.reduce_mean((test_images - recon_images) ** 2)
return recon_images, recon_loss
def extrapolate_experiment_eval_data(mnist_path, digit, N_t, pred_angle_id=7, nr_angles=16):
"""
Prepare validation dataset for the extrapolate experiment.
:param mnist_path:
:param digit:
:param N_t: how many angles do we observe for each image in test set
:param pred_angle_id: which angle to leave out for prediction
:param nr_angles: size of object dataset
:return:
"""
eval_data_dict = pickle.load(open(mnist_path + 'eval_data{}_not_shuffled.p'.format(digit), 'rb'))
eval_images, eval_aux_data = eval_data_dict["images"], eval_data_dict["aux_data"]
pred_angle_mask = [pred_angle_id + i * nr_angles for i in range(int(len(eval_aux_data) / nr_angles))]
not_pred_angle_mask = [i for i in range(len(eval_images)) if i not in pred_angle_mask]
observed_images = eval_images[not_pred_angle_mask]
observed_aux_data = eval_aux_data[not_pred_angle_mask]
# randomly drop some observed angles
if N_t < 15:
digit_mask = [True]*N_t + [False]*(15-N_t)
mask = [random.sample(digit_mask, len(digit_mask)) for _ in range(int(len(eval_aux_data)/nr_angles))]
flatten = lambda l: [item for sublist in l for item in sublist]
mask = flatten(mask)
observed_images = observed_images[mask]
observed_aux_data = observed_aux_data[mask]
test_images = eval_images[pred_angle_mask]
test_aux_data = eval_aux_data[pred_angle_mask]
return observed_images, observed_aux_data, test_images, test_aux_data
def latent_samples_FGPVAE(train_images, train_aux_data, vae, GP, N_t, clipping_qs=False):
"""
Get latent samples for training data. For t-SNE plots :)
:param train_images:
:param train_aux_data:
:param vae:
:param GP:
:param clipping_qs:
:return:
"""
train_aux_data = tf.reshape(train_aux_data, (-1, N_t, 10))
L = vae.L
L_w = GP.L_w
# ENCODER NETWORK
qnet_mu, qnet_var = vae.encode(train_images)
qnet_mu = tf.reshape(qnet_mu, (-1, N_t, L))
qnet_var = tf.reshape(qnet_var, (-1, N_t, L))
# clipping of VAE posterior variance
if clipping_qs:
qnet_var = tf.clip_by_value(qnet_var, 1e-3, 100)
# GP
p_m, p_v = [], []
for i in range(L_w): # fit local GPs
p_m_i, p_v_i, _, _ = GP.build_1d_gp_local(X=train_aux_data[:, :, 1], Y=qnet_mu[:, :, i],
varY=qnet_var[:, :, i], X_test=train_aux_data[:, :, 1])
p_m.append(p_m_i)
p_v.append(p_v_i)
for i in range(L_w, L): # fit global GPs
p_m_i, p_v_i, lhood_i = GP.build_1d_gp_global(means=qnet_mu[:, :, i], vars=qnet_var[:, :, i])
# repeat p_m_i and p_v_i N_t times, since those are shared across all images within one object dataset D_t
p_m_i = tf.tile(tf.expand_dims(p_m_i, 1), [1, N_t])
p_v_i = tf.tile(tf.expand_dims(p_v_i, 1), [1, N_t])
p_m.append(p_m_i)
p_v.append(p_v_i)
p_m = tf.stack(p_m, axis=2)
p_v = tf.stack(p_v, axis=2)
# SAMPLE
epsilon = tf.random.normal(shape=tf.shape(p_m), dtype=tf.float64)
latent_samples = p_m + epsilon * tf.sqrt(p_v)
return latent_samples
| 42.456491 | 123 | 0.556145 |
4b81b5fe4aceb22e7a99ad217502d745e5a0019f | 1,069 | py | Python | spotty/commands/abstract_command.py | Inculus/spotty | 56863012668a6c13ad13c2a04f900047e229fbe6 | [
"MIT"
] | 246 | 2018-09-03T09:09:48.000Z | 2020-07-18T21:07:15.000Z | spotty/commands/abstract_command.py | Inculus/spotty | 56863012668a6c13ad13c2a04f900047e229fbe6 | [
"MIT"
] | 42 | 2018-10-09T19:41:56.000Z | 2020-06-15T22:55:58.000Z | spotty/commands/abstract_command.py | Inculus/spotty | 56863012668a6c13ad13c2a04f900047e229fbe6 | [
"MIT"
] | 27 | 2018-10-09T22:16:40.000Z | 2020-06-08T22:26:00.000Z | from abc import ABC, abstractmethod
from argparse import Namespace, ArgumentParser
from spotty.commands.writers.abstract_output_writrer import AbstractOutputWriter
| 30.542857 | 93 | 0.654818 |
4b849b209996da99ee667a5b45419939d4653d3a | 9,495 | py | Python | tests/test_protocols/test_generator.py | cyenyxe/agents-aea | c2aec9127028ae13def3f69fbc80d35400de1565 | [
"Apache-2.0"
] | null | null | null | tests/test_protocols/test_generator.py | cyenyxe/agents-aea | c2aec9127028ae13def3f69fbc80d35400de1565 | [
"Apache-2.0"
] | 1 | 2020-02-21T14:28:13.000Z | 2020-03-05T14:53:53.000Z | tests/test_protocols/test_generator.py | cyenyxe/agents-aea | c2aec9127028ae13def3f69fbc80d35400de1565 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests for the protocol generator."""
import inspect
import os
import shutil
import tempfile
import yaml
from aea.configurations.base import ProtocolSpecification
from aea.configurations.loader import ConfigLoader
from aea.protocols.generator import ProtocolGenerator
CUR_PATH = os.path.dirname(inspect.getfile(inspect.currentframe())) # type: ignore
# class TestCases(TestCase):
# """Test class for the light protocol generator."""
#
# def test_all_custom_data_types(self):
# """Test all custom data types."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "all_custom.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
# test_protocol_template.load()
# test_protocol_generator = ProtocolGenerator(test_protocol_template, 'tests')
# test_protocol_generator.generate()
#
# from two_party_negotiation_protocol.message import TwoPartyNegotiationMessage
# from two_party_negotiation_protocol.serialization import TwoPartyNegotiationSerializer
# from two_party_negotiation_protocol.message import DataModel
# from two_party_negotiation_protocol.message import Signature
#
# data_model = DataModel()
# signature = Signature()
# content_list = [data_model, signature]
#
# message = TwoPartyNegotiationMessage(message_id=5, target=4, performative="propose", contents=content_list)
# print(str.format("message is {}", message))
# message.check_consistency()
# serialized_message = TwoPartyNegotiationSerializer().encode(msg=message)
# print(str.format("serialized message is {}", serialized_message))
# deserialised_message = TwoPartyNegotiationSerializer().decode(obj=serialized_message)
# print(str.format("deserialized message is {}", deserialised_message))
#
# assert message == deserialised_message, "Failure"
#
# def test_correct_functionality(self):
# """End to end test of functionality."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "correct_spec.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
# test_protocol_template.load()
# test_protocol_generator = ProtocolGenerator(test_protocol_template, 'tests')
# test_protocol_generator.generate()
#
# from two_party_negotiation_protocol.message import TwoPartyNegotiationMessage
# from two_party_negotiation_protocol.serialization import TwoPartyNegotiationSerializer
# from two_party_negotiation_protocol.message import DataModel
#
# data_model = DataModel()
# content_list = [data_model, 10.5]
#
# message = TwoPartyNegotiationMessage(message_id=5, target=4, performative="propose", contents=content_list)
# print(str.format("message is {}", message))
# message.check_consistency()
# serialized_message = TwoPartyNegotiationSerializer().encode(msg=message)
# print(str.format("serialized message is {}", serialized_message))
# deserialised_message = TwoPartyNegotiationSerializer().decode(obj=serialized_message)
# print(str.format("deserialized message is {}", deserialised_message))
#
# assert message == deserialised_message, "Failure"
#
# def test_missing_name(self):
# """Test missing name handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_name.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_missing_description(self):
# """Test missing description handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_description.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# assert test_protocol_template.load(), "Failure"
#
# def test_missing_speech_acts(self):
# """Test missing speech acts handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "missing_speech_acts.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_extra_fields(self):
# """Test extra fields handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "extra_fields.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# assert test_protocol_template.load(), "Failure"
#
# def test_one_document(self):
# """Test one document handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "one_document.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_wrong_speech_act_type_sequence_performatives(self):
# """Test wrong speech act handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "wrong_speech_act_type_sequence_performatives.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
#
# def test_wrong_speech_act_type_dictionary_contents(self):
# """Test wrong speech act dictionary contents handling."""
# test_protocol_specification_path = os.path.join(CUR_PATH, "data", "wrong_speech_act_type_dictionary_contents.yaml")
# test_protocol_template = ProtocolTemplate(test_protocol_specification_path)
#
# self.assertRaises(ProtocolSpecificationParseError, test_protocol_template.load)
| 45.214286 | 128 | 0.685519 |
4b86ef7acd08f81f39f9fde4c5d2779a3995da3e | 6,981 | py | Python | tabfkioskgoogledrive/MyGDTest3.py | isalan06/myflaskapiserver | 2922f62c9b9ede2b6cba2db774e924b226a120f7 | [
"MIT"
] | null | null | null | tabfkioskgoogledrive/MyGDTest3.py | isalan06/myflaskapiserver | 2922f62c9b9ede2b6cba2db774e924b226a120f7 | [
"MIT"
] | null | null | null | tabfkioskgoogledrive/MyGDTest3.py | isalan06/myflaskapiserver | 2922f62c9b9ede2b6cba2db774e924b226a120f7 | [
"MIT"
] | null | null | null | import os.path
import os
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from datetime import datetime
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
print("Refresh Creds")
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
q="mimeType = 'application/vnd.google-apps.folder' and '0ALNhV0hP-QYDUk9PVA' in parents",
pageSize=100, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
pic_id = ''
if not items:
print('No files found.')
else:
print('1st Files:')
for item in items:
if item['name']=='KIOSK Picture':
pic_id = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
#print(pic_id)
# Check Machine ID
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(pic_id) +"' in parents"
#print(q_str)
results = service.files().list(
q=q_str, #"mimeType = 'application/vnd.google-apps.folder' and '" + str(pic_id) +"' in parents",
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
bHasBaseFolder = False
sMachineID = 'Test_MachineID'
sMachineID_ID = ''
if not items:
print('No files found.')
else:
print('2nd Files:')
for item in items:
if item['name']==sMachineID:
bHasBaseFolder = True
sMachineID_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sMachineID,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(pic_id)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sMachineID_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
#print(sMachineID_ID)
# Check Date Folder
sTodayDateString = datetime.now().strftime("%Y%m%d")
sTodayDate_ID = ''
bHasBaseFolder = False
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(sMachineID_ID) +"' in parents"
results = service.files().list(
q=q_str,
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('3nd Files:')
for item in items:
if item['name']==sTodayDateString:
bHasBaseFolder = True
sTodayDate_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sTodayDateString,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(sMachineID_ID)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sTodayDate_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
#Check Test Location
sTestLocation='()'
sTestLocation_ID = ''
bHasBaseFolder = False
q_str = "mimeType = 'application/vnd.google-apps.folder' and '" + str(sTodayDate_ID) +"' in parents"
results = service.files().list(
q=q_str,
pageSize=10, fields="nextPageToken, files(id, name, parents)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('4nd Files:')
for item in items:
if item['name']==sTestLocation:
bHasBaseFolder = True
sTestLocation_ID = item['id']
print(u'{0} ({1}) - {2}'.format(item['name'], item['id'], item['parents']))
if bHasBaseFolder == False:
file_metadata = {
'name': sTestLocation,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [str(sTodayDate_ID)]
}
file = service.files().create(body=file_metadata,
fields='id').execute()
sTestLocation_ID = str(file.get('id'))
print('Folder ID: %s' % file.get('id'))
sTestLocation_ID = CreateGoogleDriveFolder(service, sTestLocation, sTodayDate_ID)
print('Check Function')
print(sTestLocation_ID)
if __name__ == '__main__':
main() | 37.331551 | 104 | 0.578284 |
4b8822ba30ebfce8e71617e36c3ea7b8cc487de1 | 46 | py | Python | qarithmetic/__init__.py | daniil-lyakhov/QArithmetic | 7a5df9504e17c1979107c119bbaf0c5b2750a619 | [
"Apache-2.0"
] | null | null | null | qarithmetic/__init__.py | daniil-lyakhov/QArithmetic | 7a5df9504e17c1979107c119bbaf0c5b2750a619 | [
"Apache-2.0"
] | null | null | null | qarithmetic/__init__.py | daniil-lyakhov/QArithmetic | 7a5df9504e17c1979107c119bbaf0c5b2750a619 | [
"Apache-2.0"
] | null | null | null | from .QArithmetic import *
from .qft import *
| 15.333333 | 26 | 0.73913 |
4b88bb3938cbed6bd9ddf6e52090c0d588399179 | 2,631 | py | Python | clustering/conditional_probs.py | griffij/QuakeRates | 70069bb271a1987e72fcbdf3aa0c0a8a79591580 | [
"Apache-2.0"
] | null | null | null | clustering/conditional_probs.py | griffij/QuakeRates | 70069bb271a1987e72fcbdf3aa0c0a8a79591580 | [
"Apache-2.0"
] | null | null | null | clustering/conditional_probs.py | griffij/QuakeRates | 70069bb271a1987e72fcbdf3aa0c0a8a79591580 | [
"Apache-2.0"
] | null | null | null | """Calculate conditional probability of a short interevent
time being followed by another short interevent time, compared
with the unconditional probability.
This is used to test whether fault records have memory
"""
import os, sys
from glob import glob
import numpy as np
import matplotlib.pyplot as plt
from QuakeRates.dataman.parse_params import parse_param_file, \
get_event_sets
# Define parameter files
filepath = '../params'
param_file_list = glob(os.path.join(filepath, '*.txt'))
n_samples = 500 # Number of Monte Carlo samples of the eq chronologies
half_n = int(n_samples/2)
plot_dir = './plots_conditional_probs'
if not os.path.exists(plot_dir):
os.makedirs(plot_dir)
# Define subset to take
#faulting_styles = ['Reverse']
#faulting_styles = ['Normal']
#faulting_styles = ['Strike_slip']
faulting_styles = ['all']
tectonic_regions = ['all']
#tectonic_regions = ['Plate_boundary_master', 'Plate_boundary_network']
min_number_events = 10
names, event_sets, event_certainties, num_events = \
get_event_sets(param_file_list, tectonic_regions,
faulting_styles, min_number_events)
# Now loop over paleo-earthquake records
for i, event_set in enumerate(event_sets):
# Generate some chronologies
event_set.gen_chronologies(n_samples, observation_end=2019, min_separation=1)
print(num_events[i])
event_set.calculate_cov() # Calculate interevent times and mean as part of this
# Lists to store results
uncond_probs = []
cond_probs = []
for j, sample in enumerate(event_set.interevent_times.T):
num_less_mean = len(np.argwhere(sample < event_set.means[j]))
uncond_prob_less_mean = num_less_mean/event_set.num_events
count_short = 0
for k, ie_time in enumerate(sample):
if k==0:
ie_time_0 = ie_time
else:
if ie_time < event_set.means[i] and \
ie_time_0 < event_set.means[i]:
count_short += 1
ie_time_0 = ie_time
cond_prob_less_mean = count_short/num_less_mean
uncond_probs.append(uncond_prob_less_mean)
cond_probs.append(cond_prob_less_mean)
print(uncond_probs)
print(cond_probs)
uncond_probs = np.array(uncond_probs)
cond_probs = np.array(cond_probs)
probs_ratio = cond_probs/uncond_probs
print(probs_ratio)
plt.clf()
plt.hist(probs_ratio, bins = 10, facecolor='0.6',
edgecolor='0.2', density=True)
figname = 'conditional_prob_ratio_histogram_%s.png' % names[i]
fig_filename = os.path.join(plot_dir, figname)
plt.savefig(fig_filename)
| 36.541667 | 84 | 0.707336 |
4b8d4301cde6fb6de24f8efb96ff5081761e33de | 643 | py | Python | Computer Science/Development/GUI/robo_controls.py | zbendt/ECE-Capstone-Project | 1bafc61f896191ccd5a843980500fb4b8bbeb8bd | [
"MIT"
] | null | null | null | Computer Science/Development/GUI/robo_controls.py | zbendt/ECE-Capstone-Project | 1bafc61f896191ccd5a843980500fb4b8bbeb8bd | [
"MIT"
] | null | null | null | Computer Science/Development/GUI/robo_controls.py | zbendt/ECE-Capstone-Project | 1bafc61f896191ccd5a843980500fb4b8bbeb8bd | [
"MIT"
] | null | null | null | import time
#Controls functions for the delta
sleep_time = 0.5
| 20.09375 | 37 | 0.634526 |
4b8e4f10e68bbf6b6e9801bf943ec3cb8b4d1bf7 | 3,120 | py | Python | src/dynamic_programming/basic_scripts/value_iteration.py | johannesharmse/move_37_course | a2060129cbc6fb651113aa18f1a6ea2673845182 | [
"MIT"
] | 1 | 2019-03-13T06:29:54.000Z | 2019-03-13T06:29:54.000Z | src/dynamic_programming/basic_scripts/value_iteration.py | johannesharmse/move_37_course | a2060129cbc6fb651113aa18f1a6ea2673845182 | [
"MIT"
] | null | null | null | src/dynamic_programming/basic_scripts/value_iteration.py | johannesharmse/move_37_course | a2060129cbc6fb651113aa18f1a6ea2673845182 | [
"MIT"
] | null | null | null | # From The School of AI's Move 37 Course https://www.theschool.ai/courses/move-37-course/
# Coding demo by Colin Skow
# Forked from https://github.com/lazyprogrammer/machine_learning_examples/tree/master/rl
# Credit goes to LazyProgrammer
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
from grid_world import standard_grid
from utils import print_values, print_policy
# SMALL_ENOUGH is referred to by the mathematical symbol theta in equations
SMALL_ENOUGH = 1e-3
GAMMA = 0.9
ALL_POSSIBLE_ACTIONS = ('U', 'D', 'L', 'R')
if __name__ == '__main__':
# this grid gives you a reward of -0.1 for every non-terminal state
# we want to see if this will encourage finding a shorter path to the goal
grid = standard_grid(obey_prob=0.8, step_cost=-0.5)
# print rewards
print("rewards:")
print_values(grid.rewards, grid)
# calculate accurate values for each square
V = calculate_values(grid)
# calculate the optimum policy based on our values
policy = calculate_greedy_policy(grid, V)
# our goal here is to verify that we get the same answer as with policy iteration
print("values:")
print_values(V, grid)
print("policy:")
print_policy(policy, grid)
| 31.515152 | 89 | 0.709936 |
4b8f66af4fc844e8c289287b2a2bc4ba119f529e | 19,238 | py | Python | photoplace/addons/CSVImport/GTKcsvimport.py | jriguera/photoplace | 93674ef8531d0e5b8f26de9ba568ed8e115b27e1 | [
"Apache-2.0"
] | 10 | 2015-02-20T19:01:19.000Z | 2021-12-13T23:07:19.000Z | photoplace/addons/CSVImport/GTKcsvimport.py | jriguera/photoplace | 93674ef8531d0e5b8f26de9ba568ed8e115b27e1 | [
"Apache-2.0"
] | 1 | 2020-06-16T13:23:05.000Z | 2021-02-13T14:14:57.000Z | photoplace/addons/CSVImport/GTKcsvimport.py | jriguera/photoplace | 93674ef8531d0e5b8f26de9ba568ed8e115b27e1 | [
"Apache-2.0"
] | 4 | 2017-03-28T23:06:14.000Z | 2019-09-25T07:59:36.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GTKcsvimport.py
#
# Copyright 2010-2015 Jose Riguera Lopez <jriguera@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Parse a CSV to add variables or geolocate photos. GTK User Interface.
"""
__program__ = "photoplace.csvimport"
__author__ = "Jose Riguera Lopez <jriguera@gmail.com>"
__version__ = "0.1.2"
__date__ = "Dec 2014"
__license__ = "Apache 2.0"
__copyright__ ="(c) Jose Riguera"
import os.path
import csv
import sys
import codecs
import warnings
import gettext
import locale
warnings.filterwarnings('ignore', module='gtk')
try:
import pygtk
pygtk.require("2.0")
import gtk
import gobject
except Exception as e:
warnings.resetwarnings()
print("Warning: %s" % str(e))
print("You don't have the PyGTK 2.0 module installed")
raise
warnings.resetwarnings()
from csvimport import *
# I18N gettext support
__GETTEXT_DOMAIN__ = __program__
__PACKAGE_DIR__ = os.path.abspath(os.path.dirname(__file__))
__LOCALE_DIR__ = os.path.join(__PACKAGE_DIR__, u"locale")
try:
if not os.path.isdir(__LOCALE_DIR__):
print ("Error: Cannot locate default locale dir: '%s'." % (__LOCALE_DIR__))
__LOCALE_DIR__ = None
locale.setlocale(locale.LC_ALL,"")
#gettext.bindtextdomain(__GETTEXT_DOMAIN__, __LOCALE_DIR__)
t = gettext.translation(__GETTEXT_DOMAIN__, __LOCALE_DIR__, fallback=False)
_ = t.ugettext
except Exception as e:
print ("Error setting up the translations: %s" % (str(e)))
_ = lambda s: unicode(s)
#EOF
| 39.995842 | 218 | 0.615501 |
4b8fcf8f0fe4212ea52ae11e77f6cd66ebb3437f | 9,024 | py | Python | src/opt_utils.py | mateuszz0000/POSA | 1295065251dd22c89d923fbff7d8bf4c53339d95 | [
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 71 | 2021-05-02T21:40:29.000Z | 2022-03-30T03:52:01.000Z | src/opt_utils.py | mateuszz0000/POSA | 1295065251dd22c89d923fbff7d8bf4c53339d95 | [
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 4 | 2021-06-18T06:31:29.000Z | 2021-12-07T07:29:21.000Z | src/opt_utils.py | mateuszz0000/POSA | 1295065251dd22c89d923fbff7d8bf4c53339d95 | [
"CNRI-Python",
"Xnet",
"Info-ZIP",
"X11"
] | 10 | 2021-05-08T08:16:31.000Z | 2022-02-17T04:40:30.000Z | # -*- coding: utf-8 -*-
# Max-Planck-Gesellschaft zur Frderung der Wissenschaften e.V. (MPG) is
# holder of all proprietary rights on this computer program.
# You can only use this computer program if you have closed
# a license agreement with MPG or you get the right to use the computer
# program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and
# liable to prosecution.
#
# Copyright2020 Max-Planck-Gesellschaft zur Frderung
# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute
# for Intelligent Systems. All rights reserved.
#
# Contact: ps-license@tuebingen.mpg.de
import torch
import torch.nn.functional as F
import numpy as np
import torchgeometry as tgm
from src import misc_utils, eulerangles
from tqdm import tqdm
| 47 | 120 | 0.625332 |
4b90f733d945576384389e3af5e8eb7b26b24785 | 137 | py | Python | gan_provider.py | jiameng1010/pointNet | 17d230f46f64136baba2c3d6cb7f05ab4bbb9f31 | [
"MIT"
] | null | null | null | gan_provider.py | jiameng1010/pointNet | 17d230f46f64136baba2c3d6cb7f05ab4bbb9f31 | [
"MIT"
] | null | null | null | gan_provider.py | jiameng1010/pointNet | 17d230f46f64136baba2c3d6cb7f05ab4bbb9f31 | [
"MIT"
] | 1 | 2019-02-03T12:19:36.000Z | 2019-02-03T12:19:36.000Z | from tensorflow.contrib.slim.python.slim.data import data_provider
from tensorflow.contrib.slim.python.slim.data import parallel_reader
| 34.25 | 68 | 0.861314 |
4b91ba97fda9b2ee93796afb30a9ecc697c21159 | 1,205 | py | Python | script.module.placenta/lib/resources/lib/modules/thexem.py | parser4life/tantrumrepo | 3b37145f4772409e538cbddb0b7aa23be525772a | [
"Beerware"
] | 1 | 2021-05-09T19:55:51.000Z | 2021-05-09T19:55:51.000Z | script.module.placenta/lib/resources/lib/modules/thexem.py | parser4life/tantrumrepo | 3b37145f4772409e538cbddb0b7aa23be525772a | [
"Beerware"
] | null | null | null | script.module.placenta/lib/resources/lib/modules/thexem.py | parser4life/tantrumrepo | 3b37145f4772409e538cbddb0b7aa23be525772a | [
"Beerware"
] | 2 | 2020-04-01T22:11:12.000Z | 2020-05-07T23:54:52.000Z | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: MuadDib
import json
from resources.lib.modules import client
URL_PATTERN = 'http://thexem.de/map/single?id=%s&origin=tvdb&season=%s&episode=%s&destination=scene'
| 36.515152 | 100 | 0.480498 |
4b92d579f1edf869213b966f3c57e11cb659219d | 1,048 | py | Python | Day18/main.py | MHKomeili/100DaysofCode | a5799011a43f777ddc5ac9e649aa27291313b62b | [
"MIT"
] | null | null | null | Day18/main.py | MHKomeili/100DaysofCode | a5799011a43f777ddc5ac9e649aa27291313b62b | [
"MIT"
] | null | null | null | Day18/main.py | MHKomeili/100DaysofCode | a5799011a43f777ddc5ac9e649aa27291313b62b | [
"MIT"
] | null | null | null | # import colorgram
#
# colors = colorgram.extract('image.jpg', 30)
# rgb_colors = []
# for color in colors:
# rgb_colors.append((color.rgb.r, color.rgb.g, color.rgb.b))
#
# print(rgb_colors)
from turtle import Turtle, Screen
import random
color_list = [(238, 251, 245), (250, 228, 15), (213, 12, 8), (199, 11, 36), (10, 98, 61), (5, 39, 32), (232, 228, 5),
(64, 221, 157), (198, 68, 19), (32, 91, 189), (43, 212, 71), (235, 148, 38), (32, 30, 153),
(242, 247, 251), (15, 22, 54), (67, 9, 49), (245, 38, 148), (14, 206, 222), (65, 203, 230), (62, 20, 10),
(229, 164, 7), (226, 19, 111), (14, 154, 22), (246, 58, 14), (98, 75, 8), (248, 11, 9), (223, 140, 205),
(66, 241, 160),
]
tim = Turtle()
scr = Screen()
scr.colormode(255)
tim.penup()
tim.hideturtle()
tim.setposition(-300, -300)
for i in range(10):
tim.setposition(-300, tim.ycor() + 50)
for j in range(10):
tim.setx(tim.xcor() + 50)
tim.dot(20, random.choice(color_list))
scr.exitonclick() | 31.757576 | 119 | 0.540076 |
4b9490ebcc233667c0f331f949a3dfce27be8b1f | 8,723 | py | Python | hirebob/portal/forms.py | shantanub0/hirebob | 5a55e97c6e220059964fbb55439b0189abae1307 | [
"MIT"
] | null | null | null | hirebob/portal/forms.py | shantanub0/hirebob | 5a55e97c6e220059964fbb55439b0189abae1307 | [
"MIT"
] | 1 | 2018-06-23T01:20:26.000Z | 2018-06-25T21:49:17.000Z | hirebob/portal/forms.py | shantanub0/hirebob | 5a55e97c6e220059964fbb55439b0189abae1307 | [
"MIT"
] | 1 | 2018-06-14T12:11:59.000Z | 2018-06-14T12:11:59.000Z | from django import forms
from .models import UserAccount, JobPost, JobPostActivity, UserProfile
| 56.642857 | 129 | 0.458558 |
4b95d82a263834a4e169c435b74dfded71be2e85 | 5,538 | py | Python | siemstress/trigger.py | dogoncouch/siemstress | be7f60bb0228a886d48deb4f46309be7fb8aa0af | [
"MIT"
] | 28 | 2017-08-14T12:41:56.000Z | 2022-02-18T01:18:11.000Z | siemstress/trigger.py | dogoncouch/siemstress | be7f60bb0228a886d48deb4f46309be7fb8aa0af | [
"MIT"
] | 1 | 2017-08-23T10:47:16.000Z | 2017-08-24T18:52:48.000Z | siemstress/trigger.py | dogoncouch/siemstress | be7f60bb0228a886d48deb4f46309be7fb8aa0af | [
"MIT"
] | 6 | 2018-01-07T11:42:18.000Z | 2020-06-08T00:04:57.000Z | #!/usr/bin/env python
#_MIT License
#_
#_Copyright (c) 2017 Dan Persons (dpersonsdev@gmail.com)
#_
#_Permission is hereby granted, free of charge, to any person obtaining a copy
#_of this software and associated documentation files (the "Software"), to deal
#_in the Software without restriction, including without limitation the rights
#_to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#_copies of the Software, and to permit persons to whom the Software is
#_furnished to do so, subject to the following conditions:
#_
#_The above copyright notice and this permission notice shall be included in all
#_copies or substantial portions of the Software.
#_
#_THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#_IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#_FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#_AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#_LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#_OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#_SOFTWARE.
import time
from time import strftime
from time import sleep
from time import daylight
from time import timezone
from time import altzone
from random import randrange
from datetime import datetime
import MySQLdb as mdb
import json
import threading
import os
from sys import exit
import siemstress.manage
#import signal
def start_rule(db, rule, oneshot):
"""Initialize trigger object and start watching"""
# Make sure the table exists:
siemstress.manage.create_ruleevent_table(rule['out_table'])
sentry = SiemTrigger(db, rule)
if oneshot:
sentry.check_rule()
elif int(rule['time_int']) == 0:
pass
else:
# Before starting, sleep randomly up to rule interval to stagger
# database use:
sleep(randrange(0, int(rule['time_int']) * 60))
sentry.watch_rule()
| 34.397516 | 81 | 0.548754 |
4b996a561c6739777af3fa1902cca7e146f0eeaf | 687 | py | Python | TianJiPlanBackend/authentication/migrations/0002_auto_20210912_0929.py | weridolin/tianji-plan | b98a49d92ee2a365095f9e15f4231f5178aca1c0 | [
"Apache-2.0"
] | null | null | null | TianJiPlanBackend/authentication/migrations/0002_auto_20210912_0929.py | weridolin/tianji-plan | b98a49d92ee2a365095f9e15f4231f5178aca1c0 | [
"Apache-2.0"
] | null | null | null | TianJiPlanBackend/authentication/migrations/0002_auto_20210912_0929.py | weridolin/tianji-plan | b98a49d92ee2a365095f9e15f4231f5178aca1c0 | [
"Apache-2.0"
] | 1 | 2021-12-07T11:45:13.000Z | 2021-12-07T11:45:13.000Z | # Generated by Django 3.2.7 on 2021-09-12 01:29
from django.db import migrations, models
| 28.625 | 122 | 0.615721 |
4b9aca9719a2480581a602385b8fda1e00bcfadc | 3,040 | py | Python | ooobuild/lo/util/time_with_timezone.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/util/time_with_timezone.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/util/time_with_timezone.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Struct Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.util
# Libre Office Version: 7.3
from ooo.oenv.env_const import UNO_NONE
import typing
from .time import Time as Time_604e0855
__all__ = ['TimeWithTimezone']
| 30.4 | 190 | 0.650329 |
4b9af91c0efeb81facf6d27474553a4bb9a6505d | 2,025 | py | Python | tests/unit_tests/tasks/fortran/test_fortran_compiler.py | bblay/fab | bbdac7bae20c5b8695a2d56945c9593b4fda9c74 | [
"BSD-3-Clause"
] | null | null | null | tests/unit_tests/tasks/fortran/test_fortran_compiler.py | bblay/fab | bbdac7bae20c5b8695a2d56945c9593b4fda9c74 | [
"BSD-3-Clause"
] | null | null | null | tests/unit_tests/tasks/fortran/test_fortran_compiler.py | bblay/fab | bbdac7bae20c5b8695a2d56945c9593b4fda9c74 | [
"BSD-3-Clause"
] | null | null | null | from pathlib import Path
from unittest import mock
import pytest
from fab.build_config import AddFlags
from fab.dep_tree import AnalysedFile
from fab.steps.compile_fortran import CompileFortran
# todo: we might have liked to reuse this from test_dep_tree
from fab.util import CompiledFile
| 38.942308 | 112 | 0.651852 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.