hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e572d40ef88a1ec3058d9cc94eb6dce557f2d6d | 4,728 | py | Python | src/voicemaker/voicemaker.py | IAL32/voicemaker | 66c9dd25749743d94bb9c3aac8ba2c858f327723 | [
"MIT"
] | null | null | null | src/voicemaker/voicemaker.py | IAL32/voicemaker | 66c9dd25749743d94bb9c3aac8ba2c858f327723 | [
"MIT"
] | 1 | 2022-03-04T14:52:16.000Z | 2022-03-08T08:00:59.000Z | src/voicemaker/voicemaker.py | IAL32/voicemaker | 66c9dd25749743d94bb9c3aac8ba2c858f327723 | [
"MIT"
] | null | null | null | import requests
LANGUAGES_LIST = [
'en-US', 'en-GB', 'en-AU', 'en-HK', 'en-NZ', 'en-SG', 'en-ZA', 'de-DE',
'ar-XA', 'ar-SA', 'bn-IN', 'bg-BG', 'ca-ES', 'cmn-CN', 'zh-HK', 'cmn-TW',
'cy-GB', 'cs-CZ', 'da-DK', 'de-CH', 'es-AR', 'es-CO', 'es-US', 'ga-IE',
'gu-IN', 'hr-HR', 'mr-IN', 'ms-MY', 'mt-MT', 'nl-NL', 'nl-BE', 'en-CA',
'en-IN', 'en-IE', 'et-EE', 'en-PH', 'fil-PH', 'fi-FI', 'fr-BE', 'fr-FR',
'fr-CA', 'fr-CH', 'el-GR', 'he-IL', 'hi-IN', 'hu-HU', 'id-ID', 'it-IT',
'ja-JP', 'lv-LV', 'lt-LT', 'ko-KR', 'nb-NO', 'pl-PL', 'pt-PT', 'pt-BR',
'ro-RO', 'ru-RU', 'sk-SK', 'sw-KE', 'es-ES', 'es-MX', 'es-LA', 'es-US',
'sl-SI', 'sv-SE', 'tr-TR', 'ta-IN', 'te-IN', 'th-TH', 'uk-UA', 'ur-PK',
'vi-VN'
]
class Voicemaker():
token: str = None
base_url: str = None
def __init__(self, token=None) -> None:
self.base_url = "https://developer.voicemaker.in/voice"
self.token = None
if token is not None:
self.set_token(token)
def set_token(self, token: str) -> None:
"""Sets the API token. You can get yours from https://developer.voicemaker.in/apidocs
Args:
token (str): API Token.
"""
self.token = token
def __headers__(self) -> dict:
headers = {'Content-Type': 'application/json'}
if self.token is not None:
headers['Authorization'] = 'Bearer ' + self.token
return headers
def __get__(self, api: str, params={}):
result = requests.get(self.base_url + api, params=params,
headers=self.__headers__())
result.raise_for_status()
return result.json()
def __post__(self, api: str, data={}):
result = requests.post(self.base_url + api, json=data,
headers=self.__headers__())
result.raise_for_status()
return result.json()
def generate_audio_url(self,
text: str,
engine='neural', voice_id='ai3-Jony', language_code='en-US',
output_format='mp3', sample_rate=48000, effect='default',
master_speed=0, master_volume=0,
master_pitch=0) -> str:
"""Generates an audio URL from the given text and using the selected options
Args:
text (str): Text to generate an audio from.
engine (str, optional): Choose between 'standard' and 'neutral'. Defaults to 'neural'.
voice_id (str, optional): Uses the selected voice id from the available one for the selected language. Defaults to 'ai3-Jony'.
language_code (str, optional): Language of the target voice. Defaults to 'en-US'.
output_format (str, optional): Choose from 'mp3' and 'wav'. Defaults to 'mp3'.
sample_rate (int, optional): Choose from 48000, 44100, 24000, 22050, 16000, 8000. Defaults to 48000.
effect (str, optional): Effect to give to the voice. Defaults to 'default'.
master_speed (int, optional): Speed from -100 to 100. Defaults to 0.
master_volume (int, optional): Volume of the voice from -100 to 100. Defaults to 0.
master_pitch (int, optional): Pitch of the voice, from -100 to 100. Defaults to 0.
Returns:
str: URL of the MP3 to download, hosted on Voicemaker.in
"""
return self.__post__('/api', {
'Text': text,
'Engine': engine,
'VoiceId': voice_id,
'LanguageCode': language_code,
'OutputFormat': output_format,
'SampleRate': str(sample_rate),
'Effect': effect,
'MasterSpeed': str(master_speed),
'MasterVolume': str(master_volume),
'MasterPitch': str(master_pitch),
})['path']
def generate_audio_to_file(self, out_path: str, text: str, **kwargs) -> None:
"""Generates audio from text and saves it to a file
Args:
out_path (str): Path where the generated audio should be written
text (str): Text to generate an audio from
"""
url = self.generate_audio_url(text, **kwargs)
file = requests.get(url)
with open(out_path, 'wb') as file_handle:
file_handle.write(file.content)
def list_voices(self, language='en-US') -> list:
"""Lists all available voices for the selected language
Args:
language (str, optional): Language of choice. Defaults to 'en-US'.
Raises:
ValueError: When the selected language is not supported
Returns:
list: List of languages of the form { "Engine": "xxx", "VoiceId": "xxx", "VoiceGender": "xxx", "VoiceWebname": "xxx", "Country": "XX", "Language": "xx-XX" }
"""
if language not in LANGUAGES_LIST:
raise ValueError('Selected language is not supported')
return self.__get__('/list', {'language': language})['data']['voices_list']
| 39.4 | 164 | 0.597716 | 3,988 | 0.843486 | 0 | 0 | 0 | 0 | 0 | 0 | 2,675 | 0.565778 |
3e5810f45ee6abfb855c478735026a678b651dd9 | 1,365 | py | Python | Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py | PhilippMatthes/tensorflow-playground | b5fee6e5f5044dc5cbcd54529d559388a3df7813 | [
"MIT"
] | null | null | null | Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py | PhilippMatthes/tensorflow-playground | b5fee6e5f5044dc5cbcd54529d559388a3df7813 | [
"MIT"
] | null | null | null | Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py | PhilippMatthes/tensorflow-playground | b5fee6e5f5044dc5cbcd54529d559388a3df7813 | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
m, n = housing.data.shape
housing_data_plus_bias = np.c_[np.ones((m, 1)), housing.data]
X = tf.constant(housing_data_plus_bias, dtype=tf.float32, name="X")
y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name="y")
n_epochs = 1000
learning_rate = 0.01
theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0), name="theta")
y_pred = tf.matmul(X, theta, name="predictions")
error = y_pred - y
mse = tf.reduce_mean(tf.square(error), name="mse")
gradients = 2 / m * tf.matmul(tf.transpose(X), error)
training_op = tf.assign(theta, theta - learning_rate * gradients)
def run():
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for epoch in range(n_epochs):
if epoch % 100 == 0:
print("Epoch", epoch, "MSE =", mse.eval())
sess.run(training_op)
best_theta = theta.eval()
print(best_theta)
run()
gradients = tf.gradients(mse, [theta])[0]
run()
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
training_op = optimizer.minimize(mse)
run()
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.9)
training_op = optimizer.minimize(mse)
run() | 26.764706 | 81 | 0.69304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 45 | 0.032967 |
3e582f1280b1545b27d8bb65ef57684f484bd7bc | 1,634 | py | Python | python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py | erisyon/whatprot | 176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c | [
"MIT"
] | null | null | null | python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py | erisyon/whatprot | 176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c | [
"MIT"
] | 1 | 2021-06-12T00:50:08.000Z | 2021-06-15T17:59:12.000Z | python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py | erisyon/whatprot | 176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c | [
"MIT"
] | 1 | 2021-06-11T19:34:43.000Z | 2021-06-11T19:34:43.000Z | # -*- coding: utf-8 -*-
"""
@author: Matthew Beauregard Smith (UT Austin)
"""
from common.peptide import Peptide
from plotting.plot_pr_curve import plot_pr_curve
from numpy import load
from simulate.label_peptides import label_peptides
TRUE_Y_FILE = 'C:/Users/Matthew/ICES/MarcotteLab/data/classification/control_15_proteins/true_pep_i.npy'
NUM_PEPTIDES = 705
NUM_CHANNELS = 3
LABEL_SET = ['DE','Y','C']
PEPTIDE_FILE = 'C:/Users/Matthew/ICES/MarcotteLab/data/classification/control_15_proteins/peps.csv'
true_y = load(TRUE_Y_FILE)
class GroundTruth:
def __init__(self, value):
self.value = value
def class_index(self):
return self.value
ground_truth = [0] * len(true_y)
for i in range(0, len(true_y)):
ground_truth[i] = GroundTruth(true_y[i])
f = open(PEPTIDE_FILE, 'r')
f.readline() # header
f.readline() # Zack's null line
line = f.readline()
peptides = [0] * NUM_PEPTIDES
i = 0
while line != '\n' and line != '':
items = line.split(",")
pep_id = items[0]
pep_str = items[-1]
peptides[i] = Peptide(pep_str, pep_id=pep_id)
line = f.readline()
i += 1
f.close()
dye_seqs = label_peptides(peptides, LABEL_SET)
id_to_prediction = {}
for dye_seq in dye_seqs:
for peptide in dye_seq.src_peptides:
id_to_prediction[int(peptide.pep_id)] = (
int(dye_seq.src_peptides[0].pep_id),
1 / len(dye_seq.src_peptides))
predictions = [0] * len(ground_truth)
for i in range(len(ground_truth)):
predictions[i] = id_to_prediction[ground_truth[i].value]
plot_pr_curve(predictions, ground_truth)
| 30.259259 | 105 | 0.676255 | 133 | 0.081395 | 0 | 0 | 0 | 0 | 0 | 0 | 303 | 0.185435 |
3e5961792d37ca4a7091e59c1c74180b0cb0ef47 | 337,498 | py | Python | fitparse/profile.py | vlcvboyer/python-fitparse | bef76231a1c8dddfafc23070b43684e7d0c6e916 | [
"MIT"
] | null | null | null | fitparse/profile.py | vlcvboyer/python-fitparse | bef76231a1c8dddfafc23070b43684e7d0c6e916 | [
"MIT"
] | 1 | 2019-01-03T08:56:11.000Z | 2019-06-05T09:24:13.000Z | fitparse/profile.py | vlcvboyer/python-fitparse | bef76231a1c8dddfafc23070b43684e7d0c6e916 | [
"MIT"
] | 1 | 2018-08-30T15:06:12.000Z | 2018-08-30T15:06:12.000Z | ################# BEGIN AUTOMATICALLY GENERATED FIT PROFILE ##################
########################### DO NOT EDIT THIS FILE ############################
####### EXPORTED PROFILE FROM SDK VERSION 20.33 AT 2017-05-17 22:36:12 #######
########## PARSED 118 TYPES (1699 VALUES), 76 MESSAGES (950 FIELDS) ##########
from fitparse.records import (
ComponentField,
Field,
FieldType,
MessageType,
ReferenceField,
SubField,
BASE_TYPES,
)
FIELD_TYPES = {
'activity': FieldType(
name='activity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto_multi_sport',
},
),
'activity_class': FieldType(
name='activity_class',
base_type=BASE_TYPES[0x00], # enum
values={
100: 'level_max',
0x7F: 'level', # 0 to 100
0x80: 'athlete',
},
),
'activity_level': FieldType(
name='activity_level',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'low',
1: 'medium',
2: 'high',
},
),
'activity_subtype': FieldType(
name='activity_subtype',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
254: 'all',
},
),
'activity_type': FieldType(
name='activity_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'walking',
8: 'sedentary',
254: 'all', # All is for goals only to include all sports.
},
),
'analog_watchface_layout': FieldType(
name='analog_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'minimal',
1: 'traditional',
2: 'modern',
},
),
'ant_network': FieldType(
name='ant_network',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'public',
1: 'antplus',
2: 'antfs',
3: 'private',
},
),
'antplus_device_type': FieldType(
name='antplus_device_type',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'antfs',
11: 'bike_power',
12: 'environment_sensor_legacy',
15: 'multi_sport_speed_distance',
16: 'control',
17: 'fitness_equipment',
18: 'blood_pressure',
19: 'geocache_node',
20: 'light_electric_vehicle',
25: 'env_sensor',
26: 'racquet',
27: 'control_hub',
31: 'muscle_oxygen',
35: 'bike_light_main',
36: 'bike_light_shared',
38: 'exd',
40: 'bike_radar',
119: 'weight_scale',
120: 'heart_rate',
121: 'bike_speed_cadence',
122: 'bike_cadence',
123: 'bike_speed',
124: 'stride_speed_distance',
},
),
'attitude_stage': FieldType(
name='attitude_stage',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'failed',
1: 'aligning',
2: 'degraded',
3: 'valid',
},
),
'attitude_validity': FieldType(
name='attitude_validity',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0001: 'track_angle_heading_valid',
0x0002: 'pitch_valid',
0x0004: 'roll_valid',
0x0008: 'lateral_body_accel_valid',
0x0010: 'normal_body_accel_valid',
0x0020: 'turn_rate_valid',
0x0040: 'hw_fail',
0x0080: 'mag_invalid',
0x0100: 'no_gps',
0x0200: 'gps_invalid',
0x0400: 'solution_coasting',
0x0800: 'true_track_angle',
0x1000: 'magnetic_heading',
},
),
'auto_activity_detect': FieldType(
name='auto_activity_detect',
base_type=BASE_TYPES[0x86], # uint32
values={
0x00000000: 'none',
0x00000001: 'running',
0x00000002: 'cycling',
0x00000004: 'swimming',
0x00000008: 'walking',
0x00000020: 'elliptical',
0x00000400: 'sedentary',
},
),
'auto_sync_frequency': FieldType(
name='auto_sync_frequency',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'never',
1: 'occasionally',
2: 'frequent',
3: 'once_a_day',
4: 'remote',
},
),
'autolap_trigger': FieldType(
name='autolap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'position_start',
3: 'position_lap',
4: 'position_waypoint',
5: 'position_marked',
6: 'off',
},
),
'autoscroll': FieldType(
name='autoscroll',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'slow',
2: 'medium',
3: 'fast',
},
),
'backlight_mode': FieldType(
name='backlight_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'manual',
2: 'key_and_messages',
3: 'auto_brightness',
4: 'smart_notifications',
5: 'key_and_messages_night',
6: 'key_and_messages_and_smart_notifications',
},
),
'battery_status': FieldType(
name='battery_status',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'new',
2: 'good',
3: 'ok',
4: 'low',
5: 'critical',
6: 'charging',
7: 'unknown',
},
),
'bike_light_beam_angle_mode': FieldType(
name='bike_light_beam_angle_mode',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'manual',
1: 'auto',
},
),
'bike_light_network_config_type': FieldType(
name='bike_light_network_config_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto',
4: 'individual',
5: 'high_visibility',
6: 'trail',
},
),
'body_location': FieldType(
name='body_location',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'left_leg',
1: 'left_calf',
2: 'left_shin',
3: 'left_hamstring',
4: 'left_quad',
5: 'left_glute',
6: 'right_leg',
7: 'right_calf',
8: 'right_shin',
9: 'right_hamstring',
10: 'right_quad',
11: 'right_glute',
12: 'torso_back',
13: 'left_lower_back',
14: 'left_upper_back',
15: 'right_lower_back',
16: 'right_upper_back',
17: 'torso_front',
18: 'left_abdomen',
19: 'left_chest',
20: 'right_abdomen',
21: 'right_chest',
22: 'left_arm',
23: 'left_shoulder',
24: 'left_bicep',
25: 'left_tricep',
26: 'left_brachioradialis', # Left anterior forearm
27: 'left_forearm_extensors', # Left posterior forearm
28: 'right_arm',
29: 'right_shoulder',
30: 'right_bicep',
31: 'right_tricep',
32: 'right_brachioradialis', # Right anterior forearm
33: 'right_forearm_extensors', # Right posterior forearm
34: 'neck',
35: 'throat',
36: 'waist_mid_back',
37: 'waist_front',
38: 'waist_left',
39: 'waist_right',
},
),
'bool': FieldType(
name='bool',
base_type=BASE_TYPES[0x00], # enum
),
'bp_status': FieldType(
name='bp_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_error',
1: 'error_incomplete_data',
2: 'error_no_measurement',
3: 'error_data_out_of_range',
4: 'error_irregular_heart_rate',
},
),
'camera_event_type': FieldType(
name='camera_event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'video_start', # Start of video recording
1: 'video_split', # Mark of video file split (end of one file, beginning of the other)
2: 'video_end', # End of video recording
3: 'photo_taken', # Still photo taken
4: 'video_second_stream_start',
5: 'video_second_stream_split',
6: 'video_second_stream_end',
7: 'video_split_start', # Mark of video file split start
8: 'video_second_stream_split_start',
11: 'video_pause', # Mark when a video recording has been paused
12: 'video_second_stream_pause',
13: 'video_resume', # Mark when a video recording has been resumed
14: 'video_second_stream_resume',
},
),
'camera_orientation_type': FieldType(
name='camera_orientation_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'camera_orientation_0',
1: 'camera_orientation_90',
2: 'camera_orientation_180',
3: 'camera_orientation_270',
},
),
'checksum': FieldType(
name='checksum',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'clear', # Allows clear of checksum for flash memory where can only write 1 to 0 without erasing sector.
1: 'ok', # Set to mark checksum as valid if computes to invalid values 0 or 0xFF. Checksum can also be set to ok to save encoding computation time.
},
),
'comm_timeout_type': FieldType(
name='comm_timeout_type',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'wildcard_pairing_timeout', # Timeout pairing to any device
1: 'pairing_timeout', # Timeout pairing to previously paired device
2: 'connection_lost', # Temporary loss of communications
3: 'connection_timeout', # Connection closed due to extended bad communications
},
),
'connectivity_capabilities': FieldType(
name='connectivity_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'bluetooth',
0x00000002: 'bluetooth_le',
0x00000004: 'ant',
0x00000008: 'activity_upload',
0x00000010: 'course_download',
0x00000020: 'workout_download',
0x00000040: 'live_track',
0x00000080: 'weather_conditions',
0x00000100: 'weather_alerts',
0x00000200: 'gps_ephemeris_download',
0x00000400: 'explicit_archive',
0x00000800: 'setup_incomplete',
0x00001000: 'continue_sync_after_software_update',
0x00002000: 'connect_iq_app_download',
0x00004000: 'golf_course_download',
0x00008000: 'device_initiates_sync', # Indicates device is in control of initiating all syncs
0x00010000: 'connect_iq_watch_app_download',
0x00020000: 'connect_iq_widget_download',
0x00040000: 'connect_iq_watch_face_download',
0x00080000: 'connect_iq_data_field_download',
0x00100000: 'connect_iq_app_managment', # Device supports delete and reorder of apps via GCM
0x00200000: 'swing_sensor',
0x00400000: 'swing_sensor_remote',
0x00800000: 'incident_detection', # Device supports incident detection
0x01000000: 'audio_prompts',
0x02000000: 'wifi_verification', # Device supports reporting wifi verification via GCM
0x04000000: 'true_up', # Device supports True Up
0x08000000: 'find_my_watch', # Device supports Find My Watch
0x10000000: 'remote_manual_sync',
0x20000000: 'live_track_auto_start', # Device supports LiveTrack auto start
0x40000000: 'live_track_messaging', # Device supports LiveTrack Messaging
0x80000000: 'instant_input', # Device supports instant input feature
},
),
'course_capabilities': FieldType(
name='course_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'processed',
0x00000002: 'valid',
0x00000004: 'time',
0x00000008: 'distance',
0x00000010: 'position',
0x00000020: 'heart_rate',
0x00000040: 'power',
0x00000080: 'cadence',
0x00000100: 'training',
0x00000200: 'navigation',
0x00000400: 'bikeway',
},
),
'course_point': FieldType(
name='course_point',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'summit',
2: 'valley',
3: 'water',
4: 'food',
5: 'danger',
6: 'left',
7: 'right',
8: 'straight',
9: 'first_aid',
10: 'fourth_category',
11: 'third_category',
12: 'second_category',
13: 'first_category',
14: 'hors_category',
15: 'sprint',
16: 'left_fork',
17: 'right_fork',
18: 'middle_fork',
19: 'slight_left',
20: 'sharp_left',
21: 'slight_right',
22: 'sharp_right',
23: 'u_turn',
24: 'segment_start',
25: 'segment_end',
},
),
'date_mode': FieldType(
name='date_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'day_month',
1: 'month_day',
},
),
'date_time': FieldType( # seconds since UTC 00:00 Dec 31 1989
name='date_time',
base_type=BASE_TYPES[0x86], # uint32
),
'day_of_week': FieldType(
name='day_of_week',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'sunday',
1: 'monday',
2: 'tuesday',
3: 'wednesday',
4: 'thursday',
5: 'friday',
6: 'saturday',
},
),
'device_index': FieldType(
name='device_index',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'creator', # Creator of the file is always device index 0.
},
),
'digital_watchface_layout': FieldType(
name='digital_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'traditional',
1: 'modern',
2: 'bold',
},
),
'display_heart': FieldType(
name='display_heart',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bpm',
1: 'max',
2: 'reserve',
},
),
'display_measure': FieldType(
name='display_measure',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'metric',
1: 'statute',
2: 'nautical',
},
),
'display_orientation': FieldType(
name='display_orientation',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # automatic if the device supports it
1: 'portrait',
2: 'landscape',
3: 'portrait_flipped', # portrait mode but rotated 180 degrees
4: 'landscape_flipped', # landscape mode but rotated 180 degrees
},
),
'display_position': FieldType(
name='display_position',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'degree', # dd.dddddd
1: 'degree_minute', # dddmm.mmm
2: 'degree_minute_second', # dddmmss
3: 'austrian_grid', # Austrian Grid (BMN)
4: 'british_grid', # British National Grid
5: 'dutch_grid', # Dutch grid system
6: 'hungarian_grid', # Hungarian grid system
7: 'finnish_grid', # Finnish grid system Zone3 KKJ27
8: 'german_grid', # Gausss Krueger (German)
9: 'icelandic_grid', # Icelandic Grid
10: 'indonesian_equatorial', # Indonesian Equatorial LCO
11: 'indonesian_irian', # Indonesian Irian LCO
12: 'indonesian_southern', # Indonesian Southern LCO
13: 'india_zone_0', # India zone 0
14: 'india_zone_IA', # India zone IA
15: 'india_zone_IB', # India zone IB
16: 'india_zone_IIA', # India zone IIA
17: 'india_zone_IIB', # India zone IIB
18: 'india_zone_IIIA', # India zone IIIA
19: 'india_zone_IIIB', # India zone IIIB
20: 'india_zone_IVA', # India zone IVA
21: 'india_zone_IVB', # India zone IVB
22: 'irish_transverse', # Irish Transverse Mercator
23: 'irish_grid', # Irish Grid
24: 'loran', # Loran TD
25: 'maidenhead_grid', # Maidenhead grid system
26: 'mgrs_grid', # MGRS grid system
27: 'new_zealand_grid', # New Zealand grid system
28: 'new_zealand_transverse', # New Zealand Transverse Mercator
29: 'qatar_grid', # Qatar National Grid
30: 'modified_swedish_grid', # Modified RT-90 (Sweden)
31: 'swedish_grid', # RT-90 (Sweden)
32: 'south_african_grid', # South African Grid
33: 'swiss_grid', # Swiss CH-1903 grid
34: 'taiwan_grid', # Taiwan Grid
35: 'united_states_grid', # United States National Grid
36: 'utm_ups_grid', # UTM/UPS grid system
37: 'west_malayan', # West Malayan RSO
38: 'borneo_rso', # Borneo RSO
39: 'estonian_grid', # Estonian grid system
40: 'latvian_grid', # Latvian Transverse Mercator
41: 'swedish_ref_99_grid', # Reference Grid 99 TM (Swedish)
},
),
'display_power': FieldType(
name='display_power',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'watts',
1: 'percent_ftp',
},
),
'event': FieldType(
name='event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'timer', # Group 0. Start / stop_all
3: 'workout', # start / stop
4: 'workout_step', # Start at beginning of workout. Stop at end of each step.
5: 'power_down', # stop_all group 0
6: 'power_up', # stop_all group 0
7: 'off_course', # start / stop group 0
8: 'session', # Stop at end of each session.
9: 'lap', # Stop at end of each lap.
10: 'course_point', # marker
11: 'battery', # marker
12: 'virtual_partner_pace', # Group 1. Start at beginning of activity if VP enabled, when VP pace is changed during activity or VP enabled mid activity. stop_disable when VP disabled.
13: 'hr_high_alert', # Group 0. Start / stop when in alert condition.
14: 'hr_low_alert', # Group 0. Start / stop when in alert condition.
15: 'speed_high_alert', # Group 0. Start / stop when in alert condition.
16: 'speed_low_alert', # Group 0. Start / stop when in alert condition.
17: 'cad_high_alert', # Group 0. Start / stop when in alert condition.
18: 'cad_low_alert', # Group 0. Start / stop when in alert condition.
19: 'power_high_alert', # Group 0. Start / stop when in alert condition.
20: 'power_low_alert', # Group 0. Start / stop when in alert condition.
21: 'recovery_hr', # marker
22: 'battery_low', # marker
23: 'time_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
24: 'distance_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
25: 'calorie_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
26: 'activity', # Group 1.. Stop at end of activity.
27: 'fitness_equipment', # marker
28: 'length', # Stop at end of each length.
32: 'user_marker', # marker
33: 'sport_point', # marker
36: 'calibration', # start/stop/marker
42: 'front_gear_change', # marker
43: 'rear_gear_change', # marker
44: 'rider_position_change', # marker
45: 'elev_high_alert', # Group 0. Start / stop when in alert condition.
46: 'elev_low_alert', # Group 0. Start / stop when in alert condition.
47: 'comm_timeout', # marker
},
),
'event_type': FieldType(
name='event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'start',
1: 'stop',
2: 'consecutive_depreciated',
3: 'marker',
4: 'stop_all',
5: 'begin_depreciated',
6: 'end_depreciated',
7: 'end_all_depreciated',
8: 'stop_disable',
9: 'stop_disable_all',
},
),
'exd_data_units': FieldType(
name='exd_data_units',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_units',
1: 'laps',
2: 'miles_per_hour',
3: 'kilometers_per_hour',
4: 'feet_per_hour',
5: 'meters_per_hour',
6: 'degrees_celsius',
7: 'degrees_farenheit',
8: 'zone',
9: 'gear',
10: 'rpm',
11: 'bpm',
12: 'degrees',
13: 'millimeters',
14: 'meters',
15: 'kilometers',
16: 'feet',
17: 'yards',
18: 'kilofeet',
19: 'miles',
20: 'time',
21: 'enum_turn_type',
22: 'percent',
23: 'watts',
24: 'watts_per_kilogram',
25: 'enum_battery_status',
26: 'enum_bike_light_beam_angle_mode',
27: 'enum_bike_light_battery_status',
28: 'enum_bike_light_network_config_type',
29: 'lights',
30: 'seconds',
31: 'minutes',
32: 'hours',
33: 'calories',
34: 'kilojoules',
35: 'milliseconds',
36: 'second_per_mile',
37: 'second_per_kilometer',
38: 'centimeter',
39: 'enum_course_point',
40: 'bradians',
41: 'enum_sport',
42: 'inches_hg',
43: 'mm_hg',
44: 'mbars',
45: 'hecto_pascals',
46: 'feet_per_min',
47: 'meters_per_min',
48: 'meters_per_sec',
49: 'eight_cardinal',
},
),
'exd_descriptors': FieldType(
name='exd_descriptors',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bike_light_battery_status',
1: 'beam_angle_status',
2: 'batery_level',
3: 'light_network_mode',
4: 'number_lights_connected',
5: 'cadence',
6: 'distance',
7: 'estimated_time_of_arrival',
8: 'heading',
9: 'time',
10: 'battery_level',
11: 'trainer_resistance',
12: 'trainer_target_power',
13: 'time_seated',
14: 'time_standing',
15: 'elevation',
16: 'grade',
17: 'ascent',
18: 'descent',
19: 'vertical_speed',
20: 'di2_battery_level',
21: 'front_gear',
22: 'rear_gear',
23: 'gear_ratio',
24: 'heart_rate',
25: 'heart_rate_zone',
26: 'time_in_heart_rate_zone',
27: 'heart_rate_reserve',
28: 'calories',
29: 'gps_accuracy',
30: 'gps_signal_strength',
31: 'temperature',
32: 'time_of_day',
33: 'balance',
34: 'pedal_smoothness',
35: 'power',
36: 'functional_threshold_power',
37: 'intensity_factor',
38: 'work',
39: 'power_ratio',
40: 'normalized_power',
41: 'training_stress_Score',
42: 'time_on_zone',
43: 'speed',
44: 'laps',
45: 'reps',
46: 'workout_step',
47: 'course_distance',
48: 'navigation_distance',
49: 'course_estimated_time_of_arrival',
50: 'navigation_estimated_time_of_arrival',
51: 'course_time',
52: 'navigation_time',
53: 'course_heading',
54: 'navigation_heading',
55: 'power_zone',
56: 'torque_effectiveness',
57: 'timer_time',
58: 'power_weight_ratio',
59: 'left_platform_center_offset',
60: 'right_platform_center_offset',
61: 'left_power_phase_start_angle',
62: 'right_power_phase_start_angle',
63: 'left_power_phase_finish_angle',
64: 'right_power_phase_finish_angle',
65: 'gears', # Combined gear information
66: 'pace',
67: 'training_effect',
68: 'vertical_oscillation',
69: 'vertical_ratio',
70: 'ground_contact_time',
71: 'left_ground_contact_time_balance',
72: 'right_ground_contact_time_balance',
73: 'stride_length',
74: 'running_cadence',
75: 'performance_condition',
76: 'course_type',
77: 'time_in_power_zone',
78: 'navigation_turn',
79: 'course_location',
80: 'navigation_location',
81: 'compass',
82: 'gear_combo',
83: 'muscle_oxygen',
84: 'icon',
85: 'compass_heading',
86: 'gps_heading',
87: 'gps_elevation',
88: 'anaerobic_training_effect',
89: 'course',
90: 'off_course',
91: 'glide_ratio',
92: 'vertical_distance',
93: 'vmg',
94: 'ambient_pressure',
95: 'pressure',
},
),
'exd_display_type': FieldType(
name='exd_display_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'numerical',
1: 'simple',
2: 'graph',
3: 'bar',
4: 'circle_graph',
5: 'virtual_partner',
6: 'balance',
7: 'string_list',
8: 'string',
9: 'simple_dynamic_icon',
10: 'gauge',
},
),
'exd_layout': FieldType(
name='exd_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'full_screen',
1: 'half_vertical',
2: 'half_horizontal',
3: 'half_vertical_right_split',
4: 'half_horizontal_bottom_split',
5: 'full_quarter_split',
6: 'half_vertical_left_split',
7: 'half_horizontal_top_split',
},
),
'exd_qualifiers': FieldType(
name='exd_qualifiers',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_qualifier',
1: 'instantaneous',
2: 'average',
3: 'lap',
4: 'maximum',
5: 'maximum_average',
6: 'maximum_lap',
7: 'last_lap',
8: 'average_lap',
9: 'to_destination',
10: 'to_go',
11: 'to_next',
12: 'next_course_point',
13: 'total',
14: 'three_second_average',
15: 'ten_second_average',
16: 'thirty_second_average',
17: 'percent_maximum',
18: 'percent_maximum_average',
19: 'lap_percent_maximum',
20: 'elapsed',
21: 'sunrise',
22: 'sunset',
23: 'compared_to_virtual_partner',
24: 'maximum_24h',
25: 'minimum_24h',
26: 'minimum',
27: 'first',
28: 'second',
29: 'third',
30: 'shifter',
31: 'last_sport',
32: 'moving',
33: 'stopped',
242: 'zone_9',
243: 'zone_8',
244: 'zone_7',
245: 'zone_6',
246: 'zone_5',
247: 'zone_4',
248: 'zone_3',
249: 'zone_2',
250: 'zone_1',
},
),
'file': FieldType(
name='file',
base_type=BASE_TYPES[0x00], # enum
values={
1: 'device', # Read only, single file. Must be in root directory.
2: 'settings', # Read/write, single file. Directory=Settings
3: 'sport', # Read/write, multiple files, file number = sport type. Directory=Sports
4: 'activity', # Read/erase, multiple files. Directory=Activities
5: 'workout', # Read/write/erase, multiple files. Directory=Workouts
6: 'course', # Read/write/erase, multiple files. Directory=Courses
7: 'schedules', # Read/write, single file. Directory=Schedules
9: 'weight', # Read only, single file. Circular buffer. All message definitions at start of file. Directory=Weight
10: 'totals', # Read only, single file. Directory=Totals
11: 'goals', # Read/write, single file. Directory=Goals
14: 'blood_pressure', # Read only. Directory=Blood Pressure
15: 'monitoring_a', # Read only. Directory=Monitoring. File number=sub type.
20: 'activity_summary', # Read/erase, multiple files. Directory=Activities
28: 'monitoring_daily',
32: 'monitoring_b', # Read only. Directory=Monitoring. File number=identifier
34: 'segment', # Read/write/erase. Multiple Files. Directory=Segments
35: 'segment_list', # Read/write/erase. Single File. Directory=Segments
40: 'exd_configuration', # Read/write/erase. Single File. Directory=Settings
0xF7: 'mfg_range_min', # 0xF7 - 0xFE reserved for manufacturer specific file types
0xFE: 'mfg_range_max', # 0xF7 - 0xFE reserved for manufacturer specific file types
},
),
'file_flags': FieldType(
name='file_flags',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x02: 'read',
0x04: 'write',
0x08: 'erase',
},
),
'fit_base_type': FieldType(
name='fit_base_type',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'enum',
1: 'sint8',
2: 'uint8',
7: 'string',
10: 'uint8z',
13: 'byte',
131: 'sint16',
132: 'uint16',
133: 'sint32',
134: 'uint32',
136: 'float32',
137: 'float64',
139: 'uint16z',
140: 'uint32z',
142: 'sint64',
143: 'uint64',
144: 'uint64z',
},
),
'fit_base_unit': FieldType(
name='fit_base_unit',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'other',
1: 'kilogram',
2: 'pound',
},
),
'fitness_equipment_state': FieldType( # fitness equipment event data
name='fitness_equipment_state',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ready',
1: 'in_use',
2: 'paused',
3: 'unknown', # lost connection to fitness equipment
},
),
'garmin_product': FieldType(
name='garmin_product',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'hrm1',
2: 'axh01', # AXH01 HRM chipset
3: 'axb01',
4: 'axb02',
5: 'hrm2ss',
6: 'dsi_alf02',
7: 'hrm3ss',
8: 'hrm_run_single_byte_product_id', # hrm_run model for HRM ANT+ messaging
9: 'bsm', # BSM model for ANT+ messaging
10: 'bcm', # BCM model for ANT+ messaging
11: 'axs01', # AXS01 HRM Bike Chipset model for ANT+ messaging
12: 'hrm_tri_single_byte_product_id', # hrm_tri model for HRM ANT+ messaging
14: 'fr225_single_byte_product_id', # fr225 model for HRM ANT+ messaging
473: 'fr301_china',
474: 'fr301_japan',
475: 'fr301_korea',
494: 'fr301_taiwan',
717: 'fr405', # Forerunner 405
782: 'fr50', # Forerunner 50
987: 'fr405_japan',
988: 'fr60', # Forerunner 60
1011: 'dsi_alf01',
1018: 'fr310xt', # Forerunner 310
1036: 'edge500',
1124: 'fr110', # Forerunner 110
1169: 'edge800',
1199: 'edge500_taiwan',
1213: 'edge500_japan',
1253: 'chirp',
1274: 'fr110_japan',
1325: 'edge200',
1328: 'fr910xt',
1333: 'edge800_taiwan',
1334: 'edge800_japan',
1341: 'alf04',
1345: 'fr610',
1360: 'fr210_japan',
1380: 'vector_ss',
1381: 'vector_cp',
1386: 'edge800_china',
1387: 'edge500_china',
1410: 'fr610_japan',
1422: 'edge500_korea',
1436: 'fr70',
1446: 'fr310xt_4t',
1461: 'amx',
1482: 'fr10',
1497: 'edge800_korea',
1499: 'swim',
1537: 'fr910xt_china',
1551: 'fenix',
1555: 'edge200_taiwan',
1561: 'edge510',
1567: 'edge810',
1570: 'tempe',
1600: 'fr910xt_japan',
1623: 'fr620',
1632: 'fr220',
1664: 'fr910xt_korea',
1688: 'fr10_japan',
1721: 'edge810_japan',
1735: 'virb_elite',
1736: 'edge_touring', # Also Edge Touring Plus
1742: 'edge510_japan',
1743: 'hrm_tri',
1752: 'hrm_run',
1765: 'fr920xt',
1821: 'edge510_asia',
1822: 'edge810_china',
1823: 'edge810_taiwan',
1836: 'edge1000',
1837: 'vivo_fit',
1853: 'virb_remote',
1885: 'vivo_ki',
1903: 'fr15',
1907: 'vivo_active',
1918: 'edge510_korea',
1928: 'fr620_japan',
1929: 'fr620_china',
1930: 'fr220_japan',
1931: 'fr220_china',
1936: 'approach_s6',
1956: 'vivo_smart',
1967: 'fenix2',
1988: 'epix',
2050: 'fenix3',
2052: 'edge1000_taiwan',
2053: 'edge1000_japan',
2061: 'fr15_japan',
2067: 'edge520',
2070: 'edge1000_china',
2072: 'fr620_russia',
2073: 'fr220_russia',
2079: 'vector_s',
2100: 'edge1000_korea',
2130: 'fr920xt_taiwan',
2131: 'fr920xt_china',
2132: 'fr920xt_japan',
2134: 'virbx',
2135: 'vivo_smart_apac',
2140: 'etrex_touch',
2147: 'edge25',
2148: 'fr25',
2150: 'vivo_fit2',
2153: 'fr225',
2156: 'fr630',
2157: 'fr230',
2160: 'vivo_active_apac',
2161: 'vector_2',
2162: 'vector_2s',
2172: 'virbxe',
2173: 'fr620_taiwan',
2174: 'fr220_taiwan',
2175: 'truswing',
2188: 'fenix3_china',
2189: 'fenix3_twn',
2192: 'varia_headlight',
2193: 'varia_taillight_old',
2204: 'edge_explore_1000',
2219: 'fr225_asia',
2225: 'varia_radar_taillight',
2226: 'varia_radar_display',
2238: 'edge20',
2262: 'd2_bravo',
2266: 'approach_s20',
2276: 'varia_remote',
2327: 'hrm4_run',
2337: 'vivo_active_hr',
2347: 'vivo_smart_gps_hr',
2348: 'vivo_smart_hr',
2368: 'vivo_move',
2398: 'varia_vision',
2406: 'vivo_fit3',
2413: 'fenix3_hr',
2417: 'virb_ultra_30',
2429: 'index_smart_scale',
2431: 'fr235',
2432: 'fenix3_chronos',
2441: 'oregon7xx',
2444: 'rino7xx',
2496: 'nautix',
2530: 'edge_820',
2531: 'edge_explore_820',
2544: 'fenix5s',
2547: 'd2_bravo_titanium',
2593: 'running_dynamics_pod',
2604: 'fenix5x',
2606: 'vivo_fit_jr',
2691: 'fr935',
2697: 'fenix5',
10007: 'sdm4', # SDM4 footpod
10014: 'edge_remote',
20119: 'training_center',
65531: 'connectiq_simulator',
65532: 'android_antplus_plugin',
65534: 'connect', # Garmin Connect website
},
),
'gender': FieldType(
name='gender',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'female',
1: 'male',
},
),
'goal': FieldType(
name='goal',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'calories',
3: 'frequency',
4: 'steps',
5: 'ascent',
6: 'active_minutes',
},
),
'goal_recurrence': FieldType(
name='goal_recurrence',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'daily',
2: 'weekly',
3: 'monthly',
4: 'yearly',
5: 'custom',
},
),
'goal_source': FieldType(
name='goal_source',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # Device generated
1: 'community', # Social network sourced goal
2: 'user', # Manually generated
},
),
'hr_type': FieldType(
name='hr_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'normal',
1: 'irregular',
},
),
'hr_zone_calc': FieldType(
name='hr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_max_hr',
2: 'percent_hrr',
},
),
'intensity': FieldType(
name='intensity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'active',
1: 'rest',
2: 'warmup',
3: 'cooldown',
},
),
'language': FieldType(
name='language',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'english',
1: 'french',
2: 'italian',
3: 'german',
4: 'spanish',
5: 'croatian',
6: 'czech',
7: 'danish',
8: 'dutch',
9: 'finnish',
10: 'greek',
11: 'hungarian',
12: 'norwegian',
13: 'polish',
14: 'portuguese',
15: 'slovakian',
16: 'slovenian',
17: 'swedish',
18: 'russian',
19: 'turkish',
20: 'latvian',
21: 'ukrainian',
22: 'arabic',
23: 'farsi',
24: 'bulgarian',
25: 'romanian',
26: 'chinese',
27: 'japanese',
28: 'korean',
29: 'taiwanese',
30: 'thai',
31: 'hebrew',
32: 'brazilian_portuguese',
33: 'indonesian',
34: 'malaysian',
35: 'vietnamese',
36: 'burmese',
37: 'mongolian',
254: 'custom',
},
),
'language_bits_0': FieldType( # Bit field corresponding to language enum type (1 << language).
name='language_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'english',
0x02: 'french',
0x04: 'italian',
0x08: 'german',
0x10: 'spanish',
0x20: 'croatian',
0x40: 'czech',
0x80: 'danish',
},
),
'language_bits_1': FieldType(
name='language_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'dutch',
0x02: 'finnish',
0x04: 'greek',
0x08: 'hungarian',
0x10: 'norwegian',
0x20: 'polish',
0x40: 'portuguese',
0x80: 'slovakian',
},
),
'language_bits_2': FieldType(
name='language_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'slovenian',
0x02: 'swedish',
0x04: 'russian',
0x08: 'turkish',
0x10: 'latvian',
0x20: 'ukrainian',
0x40: 'arabic',
0x80: 'farsi',
},
),
'language_bits_3': FieldType(
name='language_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'bulgarian',
0x02: 'romanian',
0x04: 'chinese',
0x08: 'japanese',
0x10: 'korean',
0x20: 'taiwanese',
0x40: 'thai',
0x80: 'hebrew',
},
),
'language_bits_4': FieldType(
name='language_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'brazilian_portuguese',
0x02: 'indonesian',
0x04: 'malaysian',
0x08: 'vietnamese',
0x10: 'burmese',
0x20: 'mongolian',
},
),
'lap_trigger': FieldType(
name='lap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'time',
2: 'distance',
3: 'position_start',
4: 'position_lap',
5: 'position_waypoint',
6: 'position_marked',
7: 'session_end',
8: 'fitness_equipment',
},
),
'left_right_balance': FieldType(
name='left_right_balance',
base_type=BASE_TYPES[0x02], # uint8
values={
0x7F: 'mask', # % contribution
0x80: 'right', # data corresponds to right if set, otherwise unknown
},
),
'left_right_balance_100': FieldType(
name='left_right_balance_100',
base_type=BASE_TYPES[0x84], # uint16
values={
0x3FFF: 'mask', # % contribution scaled by 100
0x8000: 'right', # data corresponds to right if set, otherwise unknown
},
),
'length_type': FieldType(
name='length_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'idle', # Rest period. Length with no strokes
1: 'active', # Length with strokes.
},
),
'local_date_time': FieldType( # seconds since 00:00 Dec 31 1989 in local time zone
name='local_date_time',
base_type=BASE_TYPES[0x86], # uint32
values={
0x10000000: 'min', # if date_time is < 0x10000000 then it is system time (seconds from device power on)
},
),
'localtime_into_day': FieldType( # number of seconds into the day since local 00:00:00
name='localtime_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'manufacturer': FieldType(
name='manufacturer',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'garmin',
2: 'garmin_fr405_antfs', # Do not use. Used by FR405 for ANTFS man id.
3: 'zephyr',
4: 'dayton',
5: 'idt',
6: 'srm',
7: 'quarq',
8: 'ibike',
9: 'saris',
10: 'spark_hk',
11: 'tanita',
12: 'echowell',
13: 'dynastream_oem',
14: 'nautilus',
15: 'dynastream',
16: 'timex',
17: 'metrigear',
18: 'xelic',
19: 'beurer',
20: 'cardiosport',
21: 'a_and_d',
22: 'hmm',
23: 'suunto',
24: 'thita_elektronik',
25: 'gpulse',
26: 'clean_mobile',
27: 'pedal_brain',
28: 'peaksware',
29: 'saxonar',
30: 'lemond_fitness',
31: 'dexcom',
32: 'wahoo_fitness',
33: 'octane_fitness',
34: 'archinoetics',
35: 'the_hurt_box',
36: 'citizen_systems',
37: 'magellan',
38: 'osynce',
39: 'holux',
40: 'concept2',
42: 'one_giant_leap',
43: 'ace_sensor',
44: 'brim_brothers',
45: 'xplova',
46: 'perception_digital',
47: 'bf1systems',
48: 'pioneer',
49: 'spantec',
50: 'metalogics',
51: '4iiiis',
52: 'seiko_epson',
53: 'seiko_epson_oem',
54: 'ifor_powell',
55: 'maxwell_guider',
56: 'star_trac',
57: 'breakaway',
58: 'alatech_technology_ltd',
59: 'mio_technology_europe',
60: 'rotor',
61: 'geonaute',
62: 'id_bike',
63: 'specialized',
64: 'wtek',
65: 'physical_enterprises',
66: 'north_pole_engineering',
67: 'bkool',
68: 'cateye',
69: 'stages_cycling',
70: 'sigmasport',
71: 'tomtom',
72: 'peripedal',
73: 'wattbike',
76: 'moxy',
77: 'ciclosport',
78: 'powerbahn',
79: 'acorn_projects_aps',
80: 'lifebeam',
81: 'bontrager',
82: 'wellgo',
83: 'scosche',
84: 'magura',
85: 'woodway',
86: 'elite',
87: 'nielsen_kellerman',
88: 'dk_city',
89: 'tacx',
90: 'direction_technology',
91: 'magtonic',
92: '1partcarbon',
93: 'inside_ride_technologies',
94: 'sound_of_motion',
95: 'stryd',
96: 'icg', # Indoorcycling Group
97: 'MiPulse',
98: 'bsx_athletics',
99: 'look',
100: 'campagnolo_srl',
101: 'body_bike_smart',
102: 'praxisworks',
103: 'limits_technology', # Limits Technology Ltd.
104: 'topaction_technology', # TopAction Technology Inc.
105: 'cosinuss',
106: 'fitcare',
107: 'magene',
108: 'giant_manufacturing_co',
109: 'tigrasport', # Tigrasport
110: 'salutron',
111: 'technogym',
112: 'bryton_sensors',
255: 'development',
257: 'healthandlife',
258: 'lezyne',
259: 'scribe_labs',
260: 'zwift',
261: 'watteam',
262: 'recon',
263: 'favero_electronics',
264: 'dynovelo',
265: 'strava',
266: 'precor', # Amer Sports
267: 'bryton',
268: 'sram',
269: 'navman', # MiTAC Global Corporation (Mio Technology)
270: 'cobi', # COBI GmbH
271: 'spivi',
272: 'mio_magellan',
273: 'evesports',
274: 'sensitivus_gauge',
275: 'podoon',
276: 'life_time_fitness',
277: 'falco_e_motors', # Falco eMotors Inc.
5759: 'actigraphcorp',
},
),
'mesg_count': FieldType(
name='mesg_count',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'num_per_file',
1: 'max_per_file',
2: 'max_per_file_type',
},
),
'mesg_num': FieldType(
name='mesg_num',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'file_id',
1: 'capabilities',
2: 'device_settings',
3: 'user_profile',
4: 'hrm_profile',
5: 'sdm_profile',
6: 'bike_profile',
7: 'zones_target',
8: 'hr_zone',
9: 'power_zone',
10: 'met_zone',
12: 'sport',
15: 'goal',
18: 'session',
19: 'lap',
20: 'record',
21: 'event',
23: 'device_info',
26: 'workout',
27: 'workout_step',
28: 'schedule',
30: 'weight_scale',
31: 'course',
32: 'course_point',
33: 'totals',
34: 'activity',
35: 'software',
37: 'file_capabilities',
38: 'mesg_capabilities',
39: 'field_capabilities',
49: 'file_creator',
51: 'blood_pressure',
53: 'speed_zone',
55: 'monitoring',
72: 'training_file',
78: 'hrv',
80: 'ant_rx',
81: 'ant_tx',
82: 'ant_channel_id',
101: 'length',
103: 'monitoring_info',
105: 'pad',
106: 'slave_device',
127: 'connectivity',
128: 'weather_conditions',
129: 'weather_alert',
131: 'cadence_zone',
132: 'hr',
142: 'segment_lap',
145: 'memo_glob',
148: 'segment_id',
149: 'segment_leaderboard_entry',
150: 'segment_point',
151: 'segment_file',
158: 'workout_session',
159: 'watchface_settings',
160: 'gps_metadata',
161: 'camera_event',
162: 'timestamp_correlation',
164: 'gyroscope_data',
165: 'accelerometer_data',
167: 'three_d_sensor_calibration',
169: 'video_frame',
174: 'obdii_data',
177: 'nmea_sentence',
178: 'aviation_attitude',
184: 'video',
185: 'video_title',
186: 'video_description',
187: 'video_clip',
188: 'ohr_settings',
200: 'exd_screen_configuration',
201: 'exd_data_field_configuration',
202: 'exd_data_concept_configuration',
206: 'field_description',
207: 'developer_data_id',
208: 'magnetometer_data',
},
),
'message_index': FieldType(
name='message_index',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0FFF: 'mask', # index
0x7000: 'reserved', # reserved (default 0)
0x8000: 'selected', # message is selected if set
},
),
'power_phase_type': FieldType(
name='power_phase_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'power_phase_start_angle',
1: 'power_phase_end_angle',
2: 'power_phase_arc_length',
3: 'power_phase_center',
},
),
'pwr_zone_calc': FieldType(
name='pwr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_ftp',
},
),
'rider_position_type': FieldType(
name='rider_position_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'seated',
1: 'standing',
2: 'transition_to_seated',
3: 'transition_to_standing',
},
),
'schedule': FieldType(
name='schedule',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'workout',
1: 'course',
},
),
'segment_delete_status': FieldType(
name='segment_delete_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'do_not_delete',
1: 'delete_one',
2: 'delete_all',
},
),
'segment_lap_status': FieldType(
name='segment_lap_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'end',
1: 'fail',
},
),
'segment_leaderboard_type': FieldType(
name='segment_leaderboard_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'overall',
1: 'personal_best',
2: 'connections',
3: 'group',
4: 'challenger',
5: 'kom',
6: 'qom',
7: 'pr',
8: 'goal',
9: 'rival',
10: 'club_leader',
},
),
'segment_selection_type': FieldType(
name='segment_selection_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'starred',
1: 'suggested',
},
),
'sensor_type': FieldType(
name='sensor_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'accelerometer',
1: 'gyroscope',
2: 'compass', # Magnetometer
},
),
'session_trigger': FieldType(
name='session_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'activity_end',
1: 'manual', # User changed sport.
2: 'auto_multi_sport', # Auto multi-sport feature is enabled and user pressed lap button to advance session.
3: 'fitness_equipment', # Auto sport change caused by user linking to fitness equipment.
},
),
'side': FieldType(
name='side',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'right',
1: 'left',
},
),
'source_type': FieldType(
name='source_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ant', # External device connected with ANT
1: 'antplus', # External device connected with ANT+
2: 'bluetooth', # External device connected with BT
3: 'bluetooth_low_energy', # External device connected with BLE
4: 'wifi', # External device connected with Wifi
5: 'local', # Onboard device
},
),
'sport': FieldType(
name='sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'basketball',
7: 'soccer',
8: 'tennis',
9: 'american_football',
10: 'training',
11: 'walking',
12: 'cross_country_skiing',
13: 'alpine_skiing',
14: 'snowboarding',
15: 'rowing',
16: 'mountaineering',
17: 'hiking',
18: 'multisport',
19: 'paddling',
20: 'flying',
21: 'e_biking',
22: 'motorcycling',
23: 'boating',
24: 'driving',
25: 'golf',
26: 'hang_gliding',
27: 'horseback_riding',
28: 'hunting',
29: 'fishing',
30: 'inline_skating',
31: 'rock_climbing',
32: 'sailing',
33: 'ice_skating',
34: 'sky_diving',
35: 'snowshoeing',
36: 'snowmobiling',
37: 'stand_up_paddleboarding',
38: 'surfing',
39: 'wakeboarding',
40: 'water_skiing',
41: 'kayaking',
42: 'rafting',
43: 'windsurfing',
44: 'kitesurfing',
45: 'tactical',
46: 'jumpmaster',
47: 'boxing',
48: 'floor_climbing',
254: 'all', # All is for goals only to include all sports.
},
),
'sport_bits_0': FieldType( # Bit field corresponding to sport enum type (1 << sport).
name='sport_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'generic',
0x02: 'running',
0x04: 'cycling',
0x08: 'transition', # Mulitsport transition
0x10: 'fitness_equipment',
0x20: 'swimming',
0x40: 'basketball',
0x80: 'soccer',
},
),
'sport_bits_1': FieldType( # Bit field corresponding to sport enum type (1 << (sport-8)).
name='sport_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'tennis',
0x02: 'american_football',
0x04: 'training',
0x08: 'walking',
0x10: 'cross_country_skiing',
0x20: 'alpine_skiing',
0x40: 'snowboarding',
0x80: 'rowing',
},
),
'sport_bits_2': FieldType( # Bit field corresponding to sport enum type (1 << (sport-16)).
name='sport_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'mountaineering',
0x02: 'hiking',
0x04: 'multisport',
0x08: 'paddling',
0x10: 'flying',
0x20: 'e_biking',
0x40: 'motorcycling',
0x80: 'boating',
},
),
'sport_bits_3': FieldType( # Bit field corresponding to sport enum type (1 << (sport-24)).
name='sport_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'driving',
0x02: 'golf',
0x04: 'hang_gliding',
0x08: 'horseback_riding',
0x10: 'hunting',
0x20: 'fishing',
0x40: 'inline_skating',
0x80: 'rock_climbing',
},
),
'sport_bits_4': FieldType( # Bit field corresponding to sport enum type (1 << (sport-32)).
name='sport_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'sailing',
0x02: 'ice_skating',
0x04: 'sky_diving',
0x08: 'snowshoeing',
0x10: 'snowmobiling',
0x20: 'stand_up_paddleboarding',
0x40: 'surfing',
0x80: 'wakeboarding',
},
),
'sport_bits_5': FieldType( # Bit field corresponding to sport enum type (1 << (sport-40)).
name='sport_bits_5',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'water_skiing',
0x02: 'kayaking',
0x04: 'rafting',
0x08: 'windsurfing',
0x10: 'kitesurfing',
0x20: 'tactical',
0x40: 'jumpmaster',
0x80: 'boxing',
},
),
'sport_bits_6': FieldType( # Bit field corresponding to sport enum type (1 << (sport-48)).
name='sport_bits_6',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'floor_climbing',
},
),
'sport_event': FieldType(
name='sport_event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'uncategorized',
1: 'geocaching',
2: 'fitness',
3: 'recreation',
4: 'race',
5: 'special_event',
6: 'training',
7: 'transportation',
8: 'touring',
},
),
'stroke_type': FieldType(
name='stroke_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_event',
1: 'other', # stroke was detected but cannot be identified
2: 'serve',
3: 'forehand',
4: 'backhand',
5: 'smash',
},
),
'sub_sport': FieldType(
name='sub_sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run/Fitness Equipment
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling/Fitness Equipment
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
19: 'flexibility_training', # Training
20: 'strength_training', # Training
21: 'warm_up', # Tennis
22: 'match', # Tennis
23: 'exercise', # Tennis
24: 'challenge', # Tennis
25: 'indoor_skiing', # Fitness Equipment
26: 'cardio_training', # Training
27: 'indoor_walking', # Walking/Fitness Equipment
28: 'e_bike_fitness', # E-Biking
29: 'bmx', # Cycling
30: 'casual_walking', # Walking
31: 'speed_walking', # Walking
32: 'bike_to_run_transition', # Transition
33: 'run_to_bike_transition', # Transition
34: 'swim_to_bike_transition', # Transition
35: 'atv', # Motorcycling
36: 'motocross', # Motorcycling
37: 'backcountry', # Alpine Skiing/Snowboarding
38: 'resort', # Alpine Skiing/Snowboarding
39: 'rc_drone', # Flying
40: 'wingsuit', # Flying
41: 'whitewater', # Kayaking/Rafting
42: 'skate_skiing', # Cross Country Skiing
43: 'yoga', # Training
44: 'pilates', # Training
45: 'indoor_running', # Run
46: 'gravel_cycling', # Cycling
47: 'e_bike_mountain', # Cycling
48: 'commuting', # Cycling
49: 'mixed_surface', # Cycling
50: 'navigate',
51: 'track_me',
52: 'map',
254: 'all',
},
),
'supported_exd_screen_layouts': FieldType(
name='supported_exd_screen_layouts',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'full_screen',
0x00000002: 'half_vertical',
0x00000004: 'half_horizontal',
0x00000008: 'half_vertical_right_split',
0x00000010: 'half_horizontal_bottom_split',
0x00000020: 'full_quarter_split',
0x00000040: 'half_vertical_left_split',
0x00000080: 'half_horizontal_top_split',
},
),
'swim_stroke': FieldType(
name='swim_stroke',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'freestyle',
1: 'backstroke',
2: 'breaststroke',
3: 'butterfly',
4: 'drill',
5: 'mixed',
6: 'im', # IM is a mixed interval containing the same number of lengths for each of: Butterfly, Backstroke, Breaststroke, Freestyle, swam in that order.
},
),
'switch': FieldType(
name='switch',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'on',
2: 'auto',
},
),
'time_into_day': FieldType( # number of seconds into the day since 00:00:00 UTC
name='time_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'time_mode': FieldType(
name='time_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'hour12',
1: 'hour24', # Does not use a leading zero and has a colon
2: 'military', # Uses a leading zero and does not have a colon
3: 'hour_12_with_seconds',
4: 'hour_24_with_seconds',
5: 'utc',
},
),
'time_zone': FieldType(
name='time_zone',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'almaty',
1: 'bangkok',
2: 'bombay',
3: 'brasilia',
4: 'cairo',
5: 'cape_verde_is',
6: 'darwin',
7: 'eniwetok',
8: 'fiji',
9: 'hong_kong',
10: 'islamabad',
11: 'kabul',
12: 'magadan',
13: 'mid_atlantic',
14: 'moscow',
15: 'muscat',
16: 'newfoundland',
17: 'samoa',
18: 'sydney',
19: 'tehran',
20: 'tokyo',
21: 'us_alaska',
22: 'us_atlantic',
23: 'us_central',
24: 'us_eastern',
25: 'us_hawaii',
26: 'us_mountain',
27: 'us_pacific',
28: 'other',
29: 'auckland',
30: 'kathmandu',
31: 'europe_western_wet',
32: 'europe_central_cet',
33: 'europe_eastern_eet',
34: 'jakarta',
35: 'perth',
36: 'adelaide',
37: 'brisbane',
38: 'tasmania',
39: 'iceland',
40: 'amsterdam',
41: 'athens',
42: 'barcelona',
43: 'berlin',
44: 'brussels',
45: 'budapest',
46: 'copenhagen',
47: 'dublin',
48: 'helsinki',
49: 'lisbon',
50: 'london',
51: 'madrid',
52: 'munich',
53: 'oslo',
54: 'paris',
55: 'prague',
56: 'reykjavik',
57: 'rome',
58: 'stockholm',
59: 'vienna',
60: 'warsaw',
61: 'zurich',
62: 'quebec',
63: 'ontario',
64: 'manitoba',
65: 'saskatchewan',
66: 'alberta',
67: 'british_columbia',
68: 'boise',
69: 'boston',
70: 'chicago',
71: 'dallas',
72: 'denver',
73: 'kansas_city',
74: 'las_vegas',
75: 'los_angeles',
76: 'miami',
77: 'minneapolis',
78: 'new_york',
79: 'new_orleans',
80: 'phoenix',
81: 'santa_fe',
82: 'seattle',
83: 'washington_dc',
84: 'us_arizona',
85: 'chita',
86: 'ekaterinburg',
87: 'irkutsk',
88: 'kaliningrad',
89: 'krasnoyarsk',
90: 'novosibirsk',
91: 'petropavlovsk_kamchatskiy',
92: 'samara',
93: 'vladivostok',
94: 'mexico_central',
95: 'mexico_mountain',
96: 'mexico_pacific',
97: 'cape_town',
98: 'winkhoek',
99: 'lagos',
100: 'riyahd',
101: 'venezuela',
102: 'australia_lh',
103: 'santiago',
253: 'manual',
254: 'automatic',
},
),
'timer_trigger': FieldType( # timer event data
name='timer_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto',
2: 'fitness_equipment',
},
),
'turn_type': FieldType(
name='turn_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'arriving_idx',
1: 'arriving_left_idx',
2: 'arriving_right_idx',
3: 'arriving_via_idx',
4: 'arriving_via_left_idx',
5: 'arriving_via_right_idx',
6: 'bear_keep_left_idx',
7: 'bear_keep_right_idx',
8: 'continue_idx',
9: 'exit_left_idx',
10: 'exit_right_idx',
11: 'ferry_idx',
12: 'roundabout_45_idx',
13: 'roundabout_90_idx',
14: 'roundabout_135_idx',
15: 'roundabout_180_idx',
16: 'roundabout_225_idx',
17: 'roundabout_270_idx',
18: 'roundabout_315_idx',
19: 'roundabout_360_idx',
20: 'roundabout_neg_45_idx',
21: 'roundabout_neg_90_idx',
22: 'roundabout_neg_135_idx',
23: 'roundabout_neg_180_idx',
24: 'roundabout_neg_225_idx',
25: 'roundabout_neg_270_idx',
26: 'roundabout_neg_315_idx',
27: 'roundabout_neg_360_idx',
28: 'roundabout_generic_idx',
29: 'roundabout_neg_generic_idx',
30: 'sharp_turn_left_idx',
31: 'sharp_turn_right_idx',
32: 'turn_left_idx',
33: 'turn_right_idx',
34: 'uturn_left_idx',
35: 'uturn_right_idx',
36: 'icon_inv_idx',
37: 'icon_idx_cnt',
},
),
'user_local_id': FieldType(
name='user_local_id',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0000: 'local_min',
0x000F: 'local_max',
0x0010: 'stationary_min',
0x00FF: 'stationary_max',
0x0100: 'portable_min',
0xFFFE: 'portable_max',
},
),
'watchface_mode': FieldType(
name='watchface_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'digital',
1: 'analog',
2: 'connect_iq',
3: 'disabled',
},
),
'weather_report': FieldType(
name='weather_report',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'current',
1: 'forecast', # Deprecated use hourly_forecast instead
1: 'hourly_forecast',
2: 'daily_forecast',
},
),
'weather_severe_type': FieldType(
name='weather_severe_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unspecified',
1: 'tornado',
2: 'tsunami',
3: 'hurricane',
4: 'extreme_wind',
5: 'typhoon',
6: 'inland_hurricane',
7: 'hurricane_force_wind',
8: 'waterspout',
9: 'severe_thunderstorm',
10: 'wreckhouse_winds',
11: 'les_suetes_wind',
12: 'avalanche',
13: 'flash_flood',
14: 'tropical_storm',
15: 'inland_tropical_storm',
16: 'blizzard',
17: 'ice_storm',
18: 'freezing_rain',
19: 'debris_flow',
20: 'flash_freeze',
21: 'dust_storm',
22: 'high_wind',
23: 'winter_storm',
24: 'heavy_freezing_spray',
25: 'extreme_cold',
26: 'wind_chill',
27: 'cold_wave',
28: 'heavy_snow_alert',
29: 'lake_effect_blowing_snow',
30: 'snow_squall',
31: 'lake_effect_snow',
32: 'winter_weather',
33: 'sleet',
34: 'snowfall',
35: 'snow_and_blowing_snow',
36: 'blowing_snow',
37: 'snow_alert',
38: 'arctic_outflow',
39: 'freezing_drizzle',
40: 'storm',
41: 'storm_surge',
42: 'rainfall',
43: 'areal_flood',
44: 'coastal_flood',
45: 'lakeshore_flood',
46: 'excessive_heat',
47: 'heat',
48: 'weather',
49: 'high_heat_and_humidity',
50: 'humidex_and_health',
51: 'humidex',
52: 'gale',
53: 'freezing_spray',
54: 'special_marine',
55: 'squall',
56: 'strong_wind',
57: 'lake_wind',
58: 'marine_weather',
59: 'wind',
60: 'small_craft_hazardous_seas',
61: 'hazardous_seas',
62: 'small_craft',
63: 'small_craft_winds',
64: 'small_craft_rough_bar',
65: 'high_water_level',
66: 'ashfall',
67: 'freezing_fog',
68: 'dense_fog',
69: 'dense_smoke',
70: 'blowing_dust',
71: 'hard_freeze',
72: 'freeze',
73: 'frost',
74: 'fire_weather',
75: 'flood',
76: 'rip_tide',
77: 'high_surf',
78: 'smog',
79: 'air_quality',
80: 'brisk_wind',
81: 'air_stagnation',
82: 'low_water',
83: 'hydrological',
84: 'special_weather',
},
),
'weather_severity': FieldType(
name='weather_severity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unknown',
1: 'warning',
2: 'watch',
3: 'advisory',
4: 'statement',
},
),
'weather_status': FieldType(
name='weather_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'clear',
1: 'partly_cloudy',
2: 'mostly_cloudy',
3: 'rain',
4: 'snow',
5: 'windy',
6: 'thunderstorms',
7: 'wintry_mix',
8: 'fog',
11: 'hazy',
12: 'hail',
13: 'scattered_showers',
14: 'scattered_thunderstorms',
15: 'unknown_precipitation',
16: 'light_rain',
17: 'heavy_rain',
18: 'light_snow',
19: 'heavy_snow',
20: 'light_rain_snow',
21: 'heavy_rain_snow',
22: 'cloudy',
},
),
'weight': FieldType(
name='weight',
base_type=BASE_TYPES[0x84], # uint16
values={
0xFFFE: 'calculating',
},
),
'wkt_step_duration': FieldType(
name='wkt_step_duration',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'hr_less_than',
3: 'hr_greater_than',
4: 'calories',
5: 'open',
6: 'repeat_until_steps_cmplt',
7: 'repeat_until_time',
8: 'repeat_until_distance',
9: 'repeat_until_calories',
10: 'repeat_until_hr_less_than',
11: 'repeat_until_hr_greater_than',
12: 'repeat_until_power_less_than',
13: 'repeat_until_power_greater_than',
14: 'power_less_than',
15: 'power_greater_than',
16: 'training_peaks_tss',
17: 'repeat_until_power_last_lap_less_than',
18: 'repeat_until_max_power_last_lap_less_than',
19: 'power_3s_less_than',
20: 'power_10s_less_than',
21: 'power_30s_less_than',
22: 'power_3s_greater_than',
23: 'power_10s_greater_than',
24: 'power_30s_greater_than',
25: 'power_lap_less_than',
26: 'power_lap_greater_than',
27: 'repeat_until_training_peaks_tss',
28: 'repetition_time',
},
),
'wkt_step_target': FieldType(
name='wkt_step_target',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'speed',
1: 'heart_rate',
2: 'open',
3: 'cadence',
4: 'power',
5: 'grade',
6: 'resistance',
7: 'power_3s',
8: 'power_10s',
9: 'power_30s',
10: 'power_lap',
11: 'swim_stroke',
12: 'speed_lap',
13: 'heart_rate_lap',
},
),
'workout_capabilities': FieldType(
name='workout_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'interval',
0x00000002: 'custom',
0x00000004: 'fitness_equipment',
0x00000008: 'firstbeat',
0x00000010: 'new_leaf',
0x00000020: 'tcx', # For backwards compatibility. Watch should add missing id fields then clear flag.
0x00000080: 'speed', # Speed source required for workout step.
0x00000100: 'heart_rate', # Heart rate source required for workout step.
0x00000200: 'distance', # Distance source required for workout step.
0x00000400: 'cadence', # Cadence source required for workout step.
0x00000800: 'power', # Power source required for workout step.
0x00001000: 'grade', # Grade source required for workout step.
0x00002000: 'resistance', # Resistance source required for workout step.
0x00004000: 'protected',
},
),
'workout_equipment': FieldType(
name='workout_equipment',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'swim_fins',
2: 'swim_kickboard',
3: 'swim_paddles',
4: 'swim_pull_buoy',
5: 'swim_snorkel',
},
),
'workout_hr': FieldType( # 0 - 100 indicates% of max hr; >100 indicates bpm (255 max) plus 100
name='workout_hr',
base_type=BASE_TYPES[0x86], # uint32
values={
100: 'bpm_offset',
},
),
'workout_power': FieldType( # 0 - 1000 indicates % of functional threshold power; >1000 indicates watts plus 1000.
name='workout_power',
base_type=BASE_TYPES[0x86], # uint32
values={
1000: 'watts_offset',
},
),
}
FIELD_TYPE_TIMESTAMP = Field(name='timestamp', type=FIELD_TYPES['date_time'], def_num=253, units='s')
MESSAGE_TYPES = {
############################ Common Messages #############################
0: MessageType( # Must be first message in file.
name='file_id',
mesg_num=0,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field( # Only set for files that are can be created/erased.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
5: Field( # Only set for files that are not created/erased.
name='number',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
8: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
},
),
#################################### ####################################
1: MessageType(
name='capabilities',
mesg_num=1,
fields={
0: Field( # Use language_bits_x types where x is index of array.
name='languages',
type=BASE_TYPES[0x0A], # uint8z
def_num=0,
),
1: Field( # Use sport_bits_x types where x is index of array.
name='sports',
type=FIELD_TYPES['sport_bits_0'],
def_num=1,
),
21: Field(
name='workouts_supported',
type=FIELD_TYPES['workout_capabilities'],
def_num=21,
),
23: Field(
name='connectivity_supported',
type=FIELD_TYPES['connectivity_capabilities'],
def_num=23,
),
},
),
3: MessageType(
name='user_profile',
mesg_num=3,
fields={
0: Field(
name='friendly_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='gender',
type=FIELD_TYPES['gender'],
def_num=1,
),
2: Field(
name='age',
type=BASE_TYPES[0x02], # uint8
def_num=2,
units='years',
),
3: Field(
name='height',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=100,
units='m',
),
4: Field(
name='weight',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=10,
units='kg',
),
5: Field(
name='language',
type=FIELD_TYPES['language'],
def_num=5,
),
6: Field(
name='elev_setting',
type=FIELD_TYPES['display_measure'],
def_num=6,
),
7: Field(
name='weight_setting',
type=FIELD_TYPES['display_measure'],
def_num=7,
),
8: Field(
name='resting_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='bpm',
),
9: Field(
name='default_max_running_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='bpm',
),
10: Field(
name='default_max_biking_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='bpm',
),
11: Field(
name='default_max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=11,
units='bpm',
),
12: Field(
name='hr_setting',
type=FIELD_TYPES['display_heart'],
def_num=12,
),
13: Field(
name='speed_setting',
type=FIELD_TYPES['display_measure'],
def_num=13,
),
14: Field(
name='dist_setting',
type=FIELD_TYPES['display_measure'],
def_num=14,
),
16: Field(
name='power_setting',
type=FIELD_TYPES['display_power'],
def_num=16,
),
17: Field(
name='activity_class',
type=FIELD_TYPES['activity_class'],
def_num=17,
),
18: Field(
name='position_setting',
type=FIELD_TYPES['display_position'],
def_num=18,
),
21: Field(
name='temperature_setting',
type=FIELD_TYPES['display_measure'],
def_num=21,
),
22: Field(
name='local_id',
type=FIELD_TYPES['user_local_id'],
def_num=22,
),
23: Field(
name='global_id',
type=BASE_TYPES[0x0D], # byte
def_num=23,
),
28: Field( # Typical wake time
name='wake_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=28,
),
29: Field( # Typical bed time
name='sleep_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=29,
),
30: Field(
name='height_setting',
type=FIELD_TYPES['display_measure'],
def_num=30,
),
31: Field( # User defined running step length set to 0 for auto length
name='user_running_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=31,
scale=1000,
units='m',
),
32: Field( # User defined walking step length set to 0 for auto length
name='user_walking_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=32,
scale=1000,
units='m',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
4: MessageType(
name='hrm_profile',
mesg_num=4,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='hrm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='log_hrv',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='hrm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
5: MessageType(
name='sdm_profile',
mesg_num=5,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='sdm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='sdm_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='%',
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Use footpod for speed source instead of GPS
name='speed_source',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='sdm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=5,
),
7: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
6: MessageType(
name='bike_profile',
mesg_num=6,
fields={
0: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=1,
),
2: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=2,
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field(
name='bike_spd_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=4,
),
5: Field(
name='bike_cad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=5,
),
6: Field(
name='bike_spdcad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=6,
),
7: Field(
name='bike_power_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=7,
),
8: Field(
name='custom_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=8,
scale=1000,
units='m',
),
9: Field(
name='auto_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=1000,
units='m',
),
10: Field(
name='bike_weight',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=10,
units='kg',
),
11: Field(
name='power_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=11,
scale=10,
units='%',
),
12: Field(
name='auto_wheel_cal',
type=FIELD_TYPES['bool'],
def_num=12,
),
13: Field(
name='auto_power_zero',
type=FIELD_TYPES['bool'],
def_num=13,
),
14: Field(
name='id',
type=BASE_TYPES[0x02], # uint8
def_num=14,
),
15: Field(
name='spd_enabled',
type=FIELD_TYPES['bool'],
def_num=15,
),
16: Field(
name='cad_enabled',
type=FIELD_TYPES['bool'],
def_num=16,
),
17: Field(
name='spdcad_enabled',
type=FIELD_TYPES['bool'],
def_num=17,
),
18: Field(
name='power_enabled',
type=FIELD_TYPES['bool'],
def_num=18,
),
19: Field(
name='crank_length',
type=BASE_TYPES[0x02], # uint8
def_num=19,
scale=2,
offset=-110,
units='mm',
),
20: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=20,
),
21: Field(
name='bike_spd_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=21,
),
22: Field(
name='bike_cad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=22,
),
23: Field(
name='bike_spdcad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=23,
),
24: Field(
name='bike_power_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=24,
),
37: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=37,
),
38: Field( # Number of front gears
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=38,
),
39: Field( # Number of teeth on each gear 0 is innermost
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=39,
),
40: Field( # Number of rear gears
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=40,
),
41: Field( # Number of teeth on each gear 0 is innermost
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=41,
),
44: Field(
name='shimano_di2_enabled',
type=FIELD_TYPES['bool'],
def_num=44,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
8: MessageType(
name='hr_zone',
mesg_num=8,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
units='bpm',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
9: MessageType(
name='power_zone',
mesg_num=9,
fields={
1: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='watts',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
10: MessageType(
name='met_zone',
mesg_num=10,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='kcal/min',
),
3: Field(
name='fat_calories',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=10,
units='kcal/min',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
12: MessageType(
name='sport',
mesg_num=12,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
},
),
18: MessageType(
name='session',
mesg_num=18,
fields={
0: Field( # session
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field( # stop
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=5,
),
6: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=6,
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=5,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
13: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=13,
units='kcal',
),
14: Field( # total_distance / total_timer_time
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=124,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=15,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=125,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
16: Field( # average heart rate (excludes pause time)
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='bpm',
),
18: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
19: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=19,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=19,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
20: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='watts',
),
22: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=23,
units='m',
),
24: Field(
name='total_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=24,
scale=10,
),
25: Field(
name='first_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=25,
),
26: Field(
name='num_laps',
type=BASE_TYPES[0x84], # uint16
def_num=26,
),
27: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=27,
),
28: Field(
name='trigger',
type=FIELD_TYPES['session_trigger'],
def_num=28,
),
29: Field(
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=29,
units='semicircles',
),
30: Field(
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=30,
units='semicircles',
),
31: Field(
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=31,
units='semicircles',
),
32: Field(
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=32,
units='semicircles',
),
34: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='watts',
),
35: Field(
name='training_stress_score',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=10,
units='tss',
),
36: Field(
name='intensity_factor',
type=BASE_TYPES[0x84], # uint16
def_num=36,
scale=1000,
units='if',
),
37: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=37,
),
41: Field(
name='avg_stroke_count',
type=BASE_TYPES[0x86], # uint32
def_num=41,
scale=10,
units='strokes/lap',
),
42: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=100,
units='m',
),
43: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=43,
units='swim_stroke',
),
44: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=44,
scale=100,
units='m',
),
45: Field(
name='threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=45,
units='watts',
),
46: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=46,
),
47: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=47,
units='lengths',
),
48: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=48,
units='J',
),
49: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=49,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=126,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
50: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=50,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=128,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
51: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=51,
units='m',
),
52: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=52,
scale=100,
units='%',
),
53: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=100,
units='%',
),
54: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=100,
units='%',
),
55: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=100,
units='%',
),
56: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=100,
units='%',
),
57: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=57,
units='C',
),
58: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=58,
units='C',
),
59: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=60,
scale=1000,
units='m/s',
),
61: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=61,
scale=1000,
units='m/s',
),
62: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=62,
scale=1000,
units='m/s',
),
63: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=63,
scale=1000,
units='m/s',
),
64: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=64,
units='bpm',
),
65: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=65,
scale=1000,
units='s',
),
66: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=66,
scale=1000,
units='s',
),
67: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=67,
scale=1000,
units='s',
),
68: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=68,
scale=1000,
units='s',
),
69: Field(
name='avg_lap_time',
type=BASE_TYPES[0x86], # uint32
def_num=69,
scale=1000,
units='s',
),
70: Field(
name='best_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=71,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=127,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
82: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=82,
),
83: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field(
name='opponent_name',
type=BASE_TYPES[0x07], # string
def_num=84,
),
85: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=85,
units='counts',
),
86: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=86,
units='counts',
),
87: Field(
name='max_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=100,
units='m/s',
),
88: Field(
name='avg_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=100,
units='m/s',
),
89: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='mm',
),
90: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=90,
scale=100,
units='percent',
),
91: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=91,
scale=10,
units='ms',
),
92: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=128,
units='rpm',
),
93: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=128,
units='rpm',
),
94: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=128,
units='cycles',
),
95: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=95,
scale=100,
units='g/dL',
),
96: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=96,
scale=100,
units='g/dL',
),
97: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=97,
scale=100,
units='g/dL',
),
98: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=98,
scale=10,
units='%',
),
99: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=99,
scale=10,
units='%',
),
100: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=100,
scale=10,
units='%',
),
101: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=101,
scale=2,
units='percent',
),
102: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=2,
units='percent',
),
103: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=2,
units='percent',
),
104: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=2,
units='percent',
),
105: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=2,
units='percent',
),
111: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=111,
),
112: Field( # Total time spend in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=1000,
units='s',
),
113: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=113,
),
114: Field( # Average platform center offset Left
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=114,
units='mm',
),
115: Field( # Average platform center offset Right
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=115,
units='mm',
),
116: Field( # Average left power phase angles. Indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=116,
scale=0.7111111,
units='degrees',
),
117: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=0.7111111,
units='degrees',
),
118: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=118,
scale=0.7111111,
units='degrees',
),
119: Field( # Average right power phase peak angles data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=119,
scale=0.7111111,
units='degrees',
),
120: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=120,
units='watts',
),
121: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=121,
units='watts',
),
122: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=122,
units='rpm',
),
123: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=123,
units='rpm',
),
124: Field( # total_distance / total_timer_time
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=124,
scale=1000,
units='m/s',
),
125: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=125,
scale=1000,
units='m/s',
),
126: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=126,
scale=5,
offset=500,
units='m',
),
127: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=127,
scale=5,
offset=500,
units='m',
),
128: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=128,
scale=5,
offset=500,
units='m',
),
129: Field( # lev average motor power during session
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=129,
units='watts',
),
130: Field( # lev maximum motor power during session
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=130,
units='watts',
),
131: Field( # lev battery consumption during session
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=131,
scale=2,
units='percent',
),
132: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=132,
scale=100,
units='percent',
),
133: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=133,
scale=100,
units='percent',
),
134: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=134,
scale=10,
units='mm',
),
137: Field(
name='total_anaerobic_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=137,
scale=10,
),
139: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=139,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Sesson end time.
254: Field( # Selected bit is set for the current session.
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
19: MessageType(
name='lap',
mesg_num=19,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=25,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=110,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=111,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=17,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=23,
),
24: Field(
name='lap_trigger',
type=FIELD_TYPES['lap_trigger'],
def_num=24,
),
25: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=25,
),
26: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=26,
),
32: Field( # # of lengths of swim pool
name='num_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=32,
units='lengths',
),
33: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='watts',
),
34: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=34,
),
35: Field(
name='first_length_index',
type=BASE_TYPES[0x84], # uint16
def_num=35,
),
37: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=37,
scale=100,
units='m',
),
38: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=38,
),
39: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=39,
),
40: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=40,
units='lengths',
),
41: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=41,
units='J',
),
42: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=112,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
43: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=43,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=114,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
44: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=44,
units='m',
),
45: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=100,
units='%',
),
46: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=100,
units='%',
),
47: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=100,
units='%',
),
48: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=100,
units='%',
),
49: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=49,
scale=100,
units='%',
),
50: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=50,
units='C',
),
51: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=51,
units='C',
),
52: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=1000,
units='m/s',
),
54: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=1000,
units='m/s',
),
55: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=1000,
units='m/s',
),
56: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=1000,
units='m/s',
),
57: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=57,
scale=1000,
units='s',
),
58: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=58,
scale=1000,
units='s',
),
59: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=60,
scale=1000,
units='s',
),
61: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=61,
),
62: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=62,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=113,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
63: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=63,
units='bpm',
),
71: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=71,
),
74: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=74,
),
75: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=75,
units='counts',
),
76: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=76,
units='counts',
),
77: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=77,
scale=10,
units='mm',
),
78: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=78,
scale=100,
units='percent',
),
79: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=79,
scale=10,
units='ms',
),
80: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=80,
scale=128,
units='rpm',
),
81: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=128,
units='rpm',
),
82: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=82,
scale=128,
units='cycles',
),
83: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='g/dL',
),
85: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=100,
units='g/dL',
),
86: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=86,
scale=100,
units='g/dL',
),
87: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=10,
units='%',
),
88: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=10,
units='%',
),
89: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='%',
),
91: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=91,
scale=2,
units='percent',
),
92: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=2,
units='percent',
),
93: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=2,
units='percent',
),
94: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=2,
units='percent',
),
95: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=95,
scale=2,
units='percent',
),
98: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=98,
scale=1000,
units='s',
),
99: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=99,
),
100: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=100,
units='mm',
),
101: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=101,
units='mm',
),
102: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=0.7111111,
units='degrees',
),
103: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=0.7111111,
units='degrees',
),
104: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=0.7111111,
units='degrees',
),
105: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=0.7111111,
units='degrees',
),
106: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=106,
units='watts',
),
107: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=107,
units='watts',
),
108: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=108,
units='rpm',
),
109: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=109,
units='rpm',
),
110: Field(
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=110,
scale=1000,
units='m/s',
),
111: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=111,
scale=1000,
units='m/s',
),
112: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=5,
offset=500,
units='m',
),
113: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=113,
scale=5,
offset=500,
units='m',
),
114: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=114,
scale=5,
offset=500,
units='m',
),
115: Field( # lev average motor power during lap
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=115,
units='watts',
),
116: Field( # lev maximum motor power during lap
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=116,
units='watts',
),
117: Field( # lev battery consumption during lap
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=2,
units='percent',
),
118: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=118,
scale=100,
units='percent',
),
119: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=119,
scale=100,
units='percent',
),
120: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=120,
scale=10,
units='mm',
),
121: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=121,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
20: MessageType(
name='record',
mesg_num=20,
fields={
0: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=0,
units='semicircles',
),
1: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_altitude',
def_num=78,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=3,
units='bpm',
),
4: Field(
name='cadence',
type=BASE_TYPES[0x02], # uint8
def_num=4,
units='rpm',
),
5: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=100,
units='m',
),
6: Field(
name='speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_speed',
def_num=73,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
7: Field(
name='power',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='watts',
),
8: Field(
name='compressed_speed_distance',
type=BASE_TYPES[0x0D], # byte
def_num=8,
components=(
ComponentField(
name='speed',
def_num=6,
scale=100,
units='m/s',
accumulate=False,
bits=12,
bit_offset=0,
),
ComponentField(
name='distance',
def_num=5,
scale=16,
units='m',
accumulate=True,
bits=12,
bit_offset=12,
),
),
),
9: Field(
name='grade',
type=BASE_TYPES[0x83], # sint16
def_num=9,
scale=100,
units='%',
),
10: Field( # Relative. 0 is none 254 is Max.
name='resistance',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='time_from_course',
type=BASE_TYPES[0x85], # sint32
def_num=11,
scale=1000,
units='s',
),
12: Field(
name='cycle_length',
type=BASE_TYPES[0x02], # uint8
def_num=12,
scale=100,
units='m',
),
13: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
17: Field( # Speed at 1s intervals. Timestamp field indicates time of last array element.
name='speed_1s',
type=BASE_TYPES[0x02], # uint8
def_num=17,
scale=16,
units='m/s',
),
18: Field(
name='cycles',
type=BASE_TYPES[0x02], # uint8
def_num=18,
components=(
ComponentField(
name='total_cycles',
def_num=19,
units='cycles',
accumulate=True,
bits=8,
bit_offset=0,
),
),
),
19: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=19,
units='cycles',
),
28: Field(
name='compressed_accumulated_power',
type=BASE_TYPES[0x84], # uint16
def_num=28,
components=(
ComponentField(
name='accumulated_power',
def_num=29,
units='watts',
accumulate=True,
bits=16,
bit_offset=0,
),
),
),
29: Field(
name='accumulated_power',
type=BASE_TYPES[0x86], # uint32
def_num=29,
units='watts',
),
30: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance'],
def_num=30,
),
31: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=31,
units='m',
),
32: Field(
name='vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=32,
scale=1000,
units='m/s',
),
33: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='kcal',
),
39: Field(
name='vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=39,
scale=10,
units='mm',
),
40: Field(
name='stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=40,
scale=100,
units='percent',
),
41: Field(
name='stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=41,
scale=10,
units='ms',
),
42: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=42,
),
43: Field(
name='left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=43,
scale=2,
units='percent',
),
44: Field(
name='right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=44,
scale=2,
units='percent',
),
45: Field(
name='left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=45,
scale=2,
units='percent',
),
46: Field(
name='right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=46,
scale=2,
units='percent',
),
47: Field(
name='combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=47,
scale=2,
units='percent',
),
48: Field(
name='time128',
type=BASE_TYPES[0x02], # uint8
def_num=48,
scale=128,
units='s',
),
49: Field(
name='stroke_type',
type=FIELD_TYPES['stroke_type'],
def_num=49,
),
50: Field(
name='zone',
type=BASE_TYPES[0x02], # uint8
def_num=50,
),
51: Field(
name='ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=51,
scale=100,
units='m/s',
),
52: Field( # Log cadence and fractional cadence for backwards compatability
name='cadence256',
type=BASE_TYPES[0x84], # uint16
def_num=52,
scale=256,
units='rpm',
),
53: Field(
name='fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=53,
scale=128,
units='rpm',
),
54: Field( # Total saturated and unsaturated hemoglobin
name='total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=100,
units='g/dL',
),
55: Field( # Min saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_min',
type=BASE_TYPES[0x84], # uint16
def_num=55,
scale=100,
units='g/dL',
),
56: Field( # Max saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_max',
type=BASE_TYPES[0x84], # uint16
def_num=56,
scale=100,
units='g/dL',
),
57: Field( # Percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=57,
scale=10,
units='%',
),
58: Field( # Min percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_min',
type=BASE_TYPES[0x84], # uint16
def_num=58,
scale=10,
units='%',
),
59: Field( # Max percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_max',
type=BASE_TYPES[0x84], # uint16
def_num=59,
scale=10,
units='%',
),
62: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=62,
),
67: Field( # Left platform center offset
name='left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=67,
units='mm',
),
68: Field( # Right platform center offset
name='right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=68,
units='mm',
),
69: Field( # Left power phase angles. Data value indexes defined by power_phase_type.
name='left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=69,
scale=0.7111111,
units='degrees',
),
70: Field( # Left power phase peak angles. Data value indexes defined by power_phase_type.
name='left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=70,
scale=0.7111111,
units='degrees',
),
71: Field( # Right power phase angles. Data value indexes defined by power_phase_type.
name='right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=71,
scale=0.7111111,
units='degrees',
),
72: Field( # Right power phase peak angles. Data value indexes defined by power_phase_type.
name='right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=72,
scale=0.7111111,
units='degrees',
),
73: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=73,
scale=1000,
units='m/s',
),
78: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=78,
scale=5,
offset=500,
units='m',
),
81: Field( # lev battery state of charge
name='battery_soc',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=2,
units='percent',
),
82: Field( # lev motor power
name='motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=82,
units='watts',
),
83: Field(
name='vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=83,
scale=100,
units='percent',
),
84: Field(
name='stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='percent',
),
85: Field(
name='step_length',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=10,
units='mm',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
21: MessageType(
name='event',
mesg_num=21,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='data16',
type=BASE_TYPES[0x84], # uint16
def_num=2,
components=(
ComponentField(
name='data',
def_num=3,
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='data',
type=BASE_TYPES[0x86], # uint32
def_num=3,
subfields=(
SubField(
name='battery_level',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='V',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='battery',
raw_value=11,
),
),
),
SubField(
name='cad_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_high_alert',
raw_value=17,
),
),
),
SubField(
name='cad_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_low_alert',
raw_value=18,
),
),
),
SubField(
name='calorie_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='calorie_duration_alert',
raw_value=25,
),
),
),
SubField(
name='comm_timeout',
def_num=3,
type=FIELD_TYPES['comm_timeout_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='comm_timeout',
raw_value=47,
),
),
),
SubField(
name='course_point_index',
def_num=3,
type=FIELD_TYPES['message_index'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='course_point',
raw_value=10,
),
),
),
SubField(
name='distance_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='distance_duration_alert',
raw_value=24,
),
),
),
SubField(
name='fitness_equipment_state',
def_num=3,
type=FIELD_TYPES['fitness_equipment_state'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='fitness_equipment',
raw_value=27,
),
),
),
SubField(
name='gear_change_data',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='front_gear_change',
raw_value=42,
),
ReferenceField(
name='event',
def_num=0,
value='rear_gear_change',
raw_value=43,
),
),
components=(
ComponentField(
name='rear_gear_num',
def_num=11,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='rear_gear',
def_num=12,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='front_gear_num',
def_num=9,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='front_gear',
def_num=10,
accumulate=False,
bits=8,
bit_offset=24,
),
),
),
SubField(
name='hr_high_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_high_alert',
raw_value=13,
),
),
),
SubField(
name='hr_low_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_low_alert',
raw_value=14,
),
),
),
SubField(
name='power_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_high_alert',
raw_value=19,
),
),
),
SubField(
name='power_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_low_alert',
raw_value=20,
),
),
),
SubField( # Indicates the rider position value.
name='rider_position',
def_num=3,
type=FIELD_TYPES['rider_position_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='rider_position_change',
raw_value=44,
),
),
),
SubField(
name='speed_high_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_high_alert',
raw_value=15,
),
),
),
SubField(
name='speed_low_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_low_alert',
raw_value=16,
),
),
),
SubField(
name='sport_point',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='sport_point',
raw_value=33,
),
),
components=(
ComponentField(
name='score',
def_num=7,
accumulate=False,
bits=16,
bit_offset=0,
),
ComponentField(
name='opponent_score',
def_num=8,
accumulate=False,
bits=16,
bit_offset=16,
),
),
),
SubField(
name='time_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='time_duration_alert',
raw_value=23,
),
),
),
SubField(
name='timer_trigger',
def_num=3,
type=FIELD_TYPES['timer_trigger'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='timer',
raw_value=0,
),
),
),
SubField(
name='virtual_partner_speed',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='virtual_partner_pace',
raw_value=12,
),
),
),
),
),
4: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
7: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='score',
type=BASE_TYPES[0x84], # uint16
def_num=7,
),
8: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=8,
),
9: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Front gear number. 1 is innermost.
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=9,
),
10: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of front teeth.
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=10,
),
11: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Rear gear number. 1 is innermost.
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=11,
),
12: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of rear teeth.
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=12,
),
13: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=13,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
23: MessageType(
name='device_info',
mesg_num=23,
fields={
0: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field(
name='device_type',
type=FIELD_TYPES['antplus_device_type'], # uint8
def_num=1,
subfields=(
SubField(
name='ant_device_type',
def_num=1,
type=BASE_TYPES[0x02], # uint8
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='ant',
raw_value=0,
),
),
),
SubField(
name='antplus_device_type',
def_num=1,
type=FIELD_TYPES['antplus_device_type'],
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='antplus',
raw_value=1,
),
),
),
),
),
2: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=4,
subfields=(
SubField(
name='garmin_product',
def_num=4,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=2,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
5: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
),
6: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Reset by new battery or charge.
name='cum_operating_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='s',
),
10: Field(
name='battery_voltage',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=256,
units='V',
),
11: Field(
name='battery_status',
type=FIELD_TYPES['battery_status'],
def_num=11,
),
18: Field( # Indicates the location of the sensor
name='sensor_position',
type=FIELD_TYPES['body_location'],
def_num=18,
),
19: Field( # Used to describe the sensor or location
name='descriptor',
type=BASE_TYPES[0x07], # string
def_num=19,
),
20: Field(
name='ant_transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=20,
),
21: Field(
name='ant_device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=21,
),
22: Field(
name='ant_network',
type=FIELD_TYPES['ant_network'],
def_num=22,
),
25: Field(
name='source_type',
type=FIELD_TYPES['source_type'],
def_num=25,
),
27: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=27,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
27: MessageType(
name='workout_step',
mesg_num=27,
fields={
0: Field(
name='wkt_step_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='duration_type',
type=FIELD_TYPES['wkt_step_duration'],
def_num=1,
),
2: Field(
name='duration_value',
type=BASE_TYPES[0x86], # uint32
def_num=2,
subfields=(
SubField(
name='duration_calories',
def_num=2,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='calories',
raw_value=4,
),
),
),
SubField(
name='duration_distance',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='distance',
raw_value=1,
),
),
),
SubField(
name='duration_hr',
def_num=2,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='hr_less_than',
raw_value=2,
),
ReferenceField(
name='duration_type',
def_num=1,
value='hr_greater_than',
raw_value=3,
),
),
),
SubField(
name='duration_power',
def_num=2,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='power_less_than',
raw_value=14,
),
ReferenceField(
name='duration_type',
def_num=1,
value='power_greater_than',
raw_value=15,
),
),
),
SubField( # message_index of step to loop back to. Steps are assumed to be in the order by message_index. custom_name and intensity members are undefined for this duration type.
name='duration_step',
def_num=2,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField(
name='duration_time',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='time',
raw_value=0,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repetition_time',
raw_value=28,
),
),
),
),
),
3: Field(
name='target_type',
type=FIELD_TYPES['wkt_step_target'],
def_num=3,
),
4: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=4,
subfields=(
SubField(
name='repeat_calories',
def_num=4,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
),
),
SubField(
name='repeat_distance',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
),
),
SubField(
name='repeat_hr',
def_num=4,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
),
),
SubField(
name='repeat_power',
def_num=4,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField( # # of repetitions
name='repeat_steps',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
),
),
SubField(
name='repeat_time',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
),
),
SubField( # Zone (1-?); Custom = 0;
name='target_cadence_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField( # hr zone (1-5);Custom =0;
name='target_hr_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField( # Power Zone ( 1-7); Custom = 0;
name='target_power_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField( # speed zone (1-10);Custom =0;
name='target_speed_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
SubField(
name='target_stroke_type',
def_num=4,
type=FIELD_TYPES['swim_stroke'],
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='swim_stroke',
raw_value=11,
),
),
),
),
),
5: Field(
name='custom_target_value_low',
type=BASE_TYPES[0x86], # uint32
def_num=5,
subfields=(
SubField(
name='custom_target_cadence_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_low',
def_num=5,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_low',
def_num=5,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
6: Field(
name='custom_target_value_high',
type=BASE_TYPES[0x86], # uint32
def_num=6,
subfields=(
SubField(
name='custom_target_cadence_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_high',
def_num=6,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_high',
def_num=6,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
7: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=7,
),
8: Field(
name='notes',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='equipment',
type=FIELD_TYPES['workout_equipment'],
def_num=9,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
32: MessageType(
name='course_point',
mesg_num=32,
fields={
1: Field(
name='timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=100,
units='m',
),
5: Field(
name='type',
type=FIELD_TYPES['course_point'],
def_num=5,
),
6: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=6,
),
8: Field(
name='favorite',
type=FIELD_TYPES['bool'],
def_num=8,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
37: MessageType(
name='file_capabilities',
mesg_num=37,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='flags',
type=FIELD_TYPES['file_flags'],
def_num=1,
),
2: Field(
name='directory',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='max_count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='max_size',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='bytes',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
38: MessageType(
name='mesg_capabilities',
mesg_num=38,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='count_type',
type=FIELD_TYPES['mesg_count'],
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
subfields=(
SubField(
name='max_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file',
raw_value=1,
),
),
),
SubField(
name='max_per_file_type',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file_type',
raw_value=2,
),
),
),
SubField(
name='num_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='num_per_file',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
39: MessageType(
name='field_capabilities',
mesg_num=39,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='field_num',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
49: MessageType(
name='file_creator',
mesg_num=49,
fields={
0: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
},
),
53: MessageType(
name='speed_zone',
mesg_num=53,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='m/s',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
55: MessageType(
name='monitoring',
mesg_num=55,
fields={
0: Field( # Associates this data to device_info message. Not required for file with single device (sensor).
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field( # Accumulated total calories. Maintained by MonitoringReader for each activity_type. See SDK documentation
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='kcal',
),
2: Field( # Accumulated distance. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=2,
scale=100,
units='m',
),
3: Field( # Accumulated cycles. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='cycles',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=2,
units='cycles',
subfields=(
SubField(
name='steps',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='steps',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='walking',
raw_value=6,
),
ReferenceField(
name='activity_type',
def_num=5,
value='running',
raw_value=1,
),
),
),
SubField(
name='strokes',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=2,
units='strokes',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='cycling',
raw_value=2,
),
ReferenceField(
name='activity_type',
def_num=5,
value='swimming',
raw_value=5,
),
),
),
),
),
4: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=5,
),
6: Field(
name='activity_subtype',
type=FIELD_TYPES['activity_subtype'],
def_num=6,
),
7: Field(
name='activity_level',
type=FIELD_TYPES['activity_level'],
def_num=7,
),
8: Field(
name='distance_16',
type=BASE_TYPES[0x84], # uint16
def_num=8,
units='100*m',
),
9: Field(
name='cycles_16',
type=BASE_TYPES[0x84], # uint16
def_num=9,
units='2*cycles or steps',
),
10: Field(
name='active_time_16',
type=BASE_TYPES[0x84], # uint16
def_num=10,
units='s',
),
11: Field( # Must align to logging interval, for example, time must be 00:00:00 for daily log.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=11,
),
12: Field( # Avg temperature during the logging interval ended at timestamp
name='temperature',
type=BASE_TYPES[0x83], # sint16
def_num=12,
scale=100,
units='C',
),
14: Field( # Min temperature during the logging interval ended at timestamp
name='temperature_min',
type=BASE_TYPES[0x83], # sint16
def_num=14,
scale=100,
units='C',
),
15: Field( # Max temperature during the logging interval ended at timestamp
name='temperature_max',
type=BASE_TYPES[0x83], # sint16
def_num=15,
scale=100,
units='C',
),
16: Field( # Indexed using minute_activity_level enum
name='activity_time',
type=BASE_TYPES[0x84], # uint16
def_num=16,
units='minutes',
),
19: Field(
name='active_calories',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='kcal',
),
24: Field( # Indicates single type / intensity for duration since last monitoring message.
name='current_activity_type_intensity',
type=BASE_TYPES[0x0D], # byte
def_num=24,
components=(
ComponentField(
name='activity_type',
def_num=5,
accumulate=False,
bits=5,
bit_offset=0,
),
ComponentField(
name='intensity',
def_num=28,
accumulate=False,
bits=3,
bit_offset=5,
),
),
),
25: Field(
name='timestamp_min_8',
type=BASE_TYPES[0x02], # uint8
def_num=25,
units='min',
),
26: Field(
name='timestamp_16',
type=BASE_TYPES[0x84], # uint16
def_num=26,
units='s',
),
27: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=27,
units='bpm',
),
28: Field(
name='intensity',
type=BASE_TYPES[0x02], # uint8
def_num=28,
scale=10,
),
29: Field(
name='duration_min',
type=BASE_TYPES[0x84], # uint16
def_num=29,
units='min',
),
30: Field(
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=30,
units='s',
),
31: Field(
name='ascent',
type=BASE_TYPES[0x86], # uint32
def_num=31,
scale=1000,
units='m',
),
32: Field(
name='descent',
type=BASE_TYPES[0x86], # uint32
def_num=32,
scale=1000,
units='m',
),
33: Field(
name='moderate_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='minutes',
),
34: Field(
name='vigorous_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='minutes',
),
253: FIELD_TYPE_TIMESTAMP, # Must align to logging interval, for example, time must be 00:00:00 for daily log.
},
),
72: MessageType( # Corresponds to file_id of workout or course.
name='training_file',
mesg_num=72,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
78: MessageType( # Heart rate variability
name='hrv',
mesg_num=78,
fields={
0: Field( # Time between beats
name='time',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='s',
),
},
),
80: MessageType(
name='ant_rx',
mesg_num=80,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
81: MessageType(
name='ant_tx',
mesg_num=81,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
82: MessageType(
name='ant_channel_id',
mesg_num=82,
fields={
0: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='device_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=1,
),
2: Field(
name='device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=2,
),
3: Field(
name='transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
4: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=4,
),
},
),
101: MessageType(
name='length',
mesg_num=101,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=1000,
units='s',
),
4: Field(
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='total_strokes',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='strokes',
),
6: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
),
7: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=7,
units='swim_stroke',
),
9: Field(
name='avg_swimming_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='strokes/min',
),
10: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field(
name='length_type',
type=FIELD_TYPES['length_type'],
def_num=12,
),
18: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=18,
),
19: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=19,
),
20: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='counts',
),
21: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='counts',
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
106: MessageType(
name='slave_device',
mesg_num=106,
fields={
0: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
},
),
127: MessageType(
name='connectivity',
mesg_num=127,
fields={
0: Field( # Use Bluetooth for connectivity features
name='bluetooth_enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field( # Use Bluetooth Low Energy for connectivity features
name='bluetooth_le_enabled',
type=FIELD_TYPES['bool'],
def_num=1,
),
2: Field( # Use ANT for connectivity features
name='ant_enabled',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='live_tracking_enabled',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='weather_conditions_enabled',
type=FIELD_TYPES['bool'],
def_num=5,
),
6: Field(
name='weather_alerts_enabled',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='auto_activity_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=7,
),
8: Field(
name='course_download_enabled',
type=FIELD_TYPES['bool'],
def_num=8,
),
9: Field(
name='workout_download_enabled',
type=FIELD_TYPES['bool'],
def_num=9,
),
10: Field(
name='gps_ephemeris_download_enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='incident_detection_enabled',
type=FIELD_TYPES['bool'],
def_num=11,
),
12: Field(
name='grouptrack_enabled',
type=FIELD_TYPES['bool'],
def_num=12,
),
},
),
128: MessageType(
name='weather_conditions',
mesg_num=128,
fields={
0: Field( # Current or forecast
name='weather_report',
type=FIELD_TYPES['weather_report'],
def_num=0,
),
1: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=1,
units='C',
),
2: Field( # Corresponds to GSC Response weatherIcon field
name='condition',
type=FIELD_TYPES['weather_status'],
def_num=2,
),
3: Field(
name='wind_direction',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='degrees',
),
4: Field(
name='wind_speed',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=1000,
units='m/s',
),
5: Field( # range 0-100
name='precipitation_probability',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field( # Heat Index if GCS heatIdx above or equal to 90F or wind chill if GCS windChill below or equal to 32F
name='temperature_feels_like',
type=BASE_TYPES[0x01], # sint8
def_num=6,
units='C',
),
7: Field(
name='relative_humidity',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
8: Field( # string corresponding to GCS response location string
name='location',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='observed_at_time',
type=FIELD_TYPES['date_time'],
def_num=9,
),
10: Field(
name='observed_location_lat',
type=BASE_TYPES[0x85], # sint32
def_num=10,
units='semicircles',
),
11: Field(
name='observed_location_long',
type=BASE_TYPES[0x85], # sint32
def_num=11,
units='semicircles',
),
12: Field(
name='day_of_week',
type=FIELD_TYPES['day_of_week'],
def_num=12,
),
13: Field(
name='high_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
14: Field(
name='low_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=14,
units='C',
),
253: FIELD_TYPE_TIMESTAMP, # time of update for current conditions, else forecast time
},
),
129: MessageType(
name='weather_alert',
mesg_num=129,
fields={
0: Field( # Unique identifier from GCS report ID string, length is 12
name='report_id',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Time alert was issued
name='issue_time',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field( # Time alert expires
name='expire_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field( # Warning, Watch, Advisory, Statement
name='severity',
type=FIELD_TYPES['weather_severity'],
def_num=3,
),
4: Field( # Tornado, Severe Thunderstorm, etc.
name='type',
type=FIELD_TYPES['weather_severe_type'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
131: MessageType(
name='cadence_zone',
mesg_num=131,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x02], # uint8
def_num=0,
units='rpm',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
132: MessageType(
name='hr',
mesg_num=132,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='time256',
type=BASE_TYPES[0x02], # uint8
def_num=1,
components=(
ComponentField(
name='fractional_timestamp',
def_num=0,
scale=256,
units='s',
accumulate=False,
bits=8,
bit_offset=0,
),
),
),
6: Field(
name='filtered_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
9: Field(
name='event_timestamp',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=1024,
units='s',
),
10: Field(
name='event_timestamp_12',
type=BASE_TYPES[0x0D], # byte
def_num=10,
components=(
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=0,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=12,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=24,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=36,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=48,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=60,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=72,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=84,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=96,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=108,
),
),
),
253: FIELD_TYPE_TIMESTAMP,
},
),
142: MessageType(
name='segment_lap',
mesg_num=142,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strokes',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strokes',
ref_fields=(
ReferenceField(
name='sport',
def_num=23,
value='cycling',
raw_value=2,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=23,
),
24: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=24,
),
25: Field( # North east corner latitude.
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=25,
units='semicircles',
),
26: Field( # North east corner longitude.
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=26,
units='semicircles',
),
27: Field( # South west corner latitude.
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=27,
units='semicircles',
),
28: Field( # South west corner latitude.
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=28,
units='semicircles',
),
29: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=29,
),
30: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=30,
units='watts',
),
31: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=31,
),
32: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=32,
),
33: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=33,
units='J',
),
34: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=34,
scale=5,
offset=500,
units='m',
),
35: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=5,
offset=500,
units='m',
),
36: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=36,
units='m',
),
37: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=37,
scale=100,
units='%',
),
38: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=38,
scale=100,
units='%',
),
39: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=39,
scale=100,
units='%',
),
40: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=40,
scale=100,
units='%',
),
41: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=41,
scale=100,
units='%',
),
42: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=42,
units='C',
),
43: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=43,
units='C',
),
44: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=44,
scale=1000,
units='s',
),
45: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=1000,
units='m/s',
),
46: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=1000,
units='m/s',
),
47: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=1000,
units='m/s',
),
48: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=1000,
units='m/s',
),
49: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=49,
scale=1000,
units='s',
),
50: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=50,
scale=1000,
units='s',
),
51: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=51,
scale=1000,
units='s',
),
52: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=53,
),
54: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=5,
offset=500,
units='m',
),
55: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=55,
units='bpm',
),
56: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=56,
scale=1000,
units='s',
),
57: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=57,
),
58: Field(
name='sport_event',
type=FIELD_TYPES['sport_event'],
def_num=58,
),
59: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=59,
scale=2,
units='percent',
),
60: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=60,
scale=2,
units='percent',
),
61: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=61,
scale=2,
units='percent',
),
62: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=62,
scale=2,
units='percent',
),
63: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=63,
scale=2,
units='percent',
),
64: Field(
name='status',
type=FIELD_TYPES['segment_lap_status'],
def_num=64,
),
65: Field(
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=65,
),
66: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=66,
scale=128,
units='rpm',
),
67: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=67,
scale=128,
units='rpm',
),
68: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=68,
scale=128,
units='cycles',
),
69: Field(
name='front_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=69,
),
70: Field(
name='rear_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=71,
scale=1000,
units='s',
),
72: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=72,
),
73: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=73,
units='mm',
),
74: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=74,
units='mm',
),
75: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=75,
scale=0.7111111,
units='degrees',
),
76: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=76,
scale=0.7111111,
units='degrees',
),
77: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=77,
scale=0.7111111,
units='degrees',
),
78: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=78,
scale=0.7111111,
units='degrees',
),
79: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=79,
units='watts',
),
80: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=80,
units='watts',
),
81: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=81,
units='rpm',
),
82: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=82,
units='rpm',
),
83: Field( # Manufacturer that produced the segment
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=83,
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
149: MessageType( # Unique Identification data for an individual segment leader within a segment file
name='segment_leaderboard_entry',
mesg_num=149,
fields={
0: Field( # Friendly name assigned to leader
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Leader classification
name='type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=1,
),
2: Field( # Primary user ID of this leader
name='group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=2,
),
3: Field( # ID of the activity associated with this leader time
name='activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Segment Time (includes pauses)
name='segment_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field( # String version of the activity_id. 21 characters long, express in decimal
name='activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
150: MessageType( # Navigation and race evaluation point for a segment decribing a point along the segment path and time it took each segment leader to reach that point
name='segment_point',
mesg_num=150,
fields={
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field( # Accumulated distance along the segment at the described point
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Accumulated altitude along the segment at the described point
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5,
offset=500,
units='m',
),
5: Field( # Accumualted time each leader board member required to reach the described point. This value is zero for all leader board members at the starting point of the segment.
name='leader_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=1000,
units='s',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
158: MessageType(
name='workout_session',
mesg_num=158,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='first_step_index',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='m',
),
5: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
159: MessageType(
name='watchface_settings',
mesg_num=159,
fields={
0: Field(
name='mode',
type=FIELD_TYPES['watchface_mode'],
def_num=0,
),
1: Field(
name='layout',
type=BASE_TYPES[0x0D], # byte
def_num=1,
subfields=(
SubField(
name='analog_layout',
def_num=1,
type=FIELD_TYPES['analog_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='analog',
raw_value=1,
),
),
),
SubField(
name='digital_layout',
def_num=1,
type=FIELD_TYPES['digital_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='digital',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
160: MessageType(
name='gps_metadata',
mesg_num=160,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=5,
offset=500,
units='m',
),
4: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='m/s',
),
5: Field(
name='heading',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='degrees',
),
6: Field( # Used to correlate UTC to system time if the timestamp of the message is in system time. This UTC time is derived from the GPS data.
name='utc_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
units='s',
),
7: Field( # velocity[0] is lon velocity. Velocity[1] is lat velocity. Velocity[2] is altitude velocity.
name='velocity',
type=BASE_TYPES[0x83], # sint16
def_num=7,
scale=100,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
161: MessageType(
name='camera_event',
mesg_num=161,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='camera_event_type',
type=FIELD_TYPES['camera_event_type'],
def_num=1,
),
2: Field(
name='camera_file_uuid',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='camera_orientation',
type=FIELD_TYPES['camera_orientation_type'],
def_num=3,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
162: MessageType(
name='timestamp_correlation',
mesg_num=162,
fields={
0: Field( # Fractional part of the UTC timestamp at the time the system timestamp was recorded.
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field( # Whole second part of the system timestamp
name='system_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
units='s',
),
2: Field( # Fractional part of the system timestamp
name='fractional_system_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=32768,
units='s',
),
3: Field( # timestamp epoch expressed in local time used to convert timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=3,
units='s',
),
4: Field( # Millisecond part of the UTC timestamp at the time the system timestamp was recorded.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='ms',
),
5: Field( # Millisecond part of the system timestamp
name='system_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of UTC timestamp at the time the system timestamp was recorded.
},
),
164: MessageType(
name='gyroscope_data',
mesg_num=164,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the gyro sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in gyro_x and gyro_y and gyro_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated gyro reading
name='calibrated_gyro_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='deg/s',
),
6: Field( # Calibrated gyro reading
name='calibrated_gyro_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='deg/s',
),
7: Field( # Calibrated gyro reading
name='calibrated_gyro_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='deg/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
165: MessageType(
name='accelerometer_data',
mesg_num=165,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the accelerometer sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in accel_x and accel_y and accel_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated accel reading
name='calibrated_accel_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='g',
),
6: Field( # Calibrated accel reading
name='calibrated_accel_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='g',
),
7: Field( # Calibrated accel reading
name='calibrated_accel_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='g',
),
8: Field( # Calibrated accel reading
name='compressed_calibrated_accel_x',
type=BASE_TYPES[0x83], # sint16
def_num=8,
units='mG',
),
9: Field( # Calibrated accel reading
name='compressed_calibrated_accel_y',
type=BASE_TYPES[0x83], # sint16
def_num=9,
units='mG',
),
10: Field( # Calibrated accel reading
name='compressed_calibrated_accel_z',
type=BASE_TYPES[0x83], # sint16
def_num=10,
units='mG',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
167: MessageType(
name='three_d_sensor_calibration',
mesg_num=167,
fields={
0: Field( # Indicates which sensor the calibration is for
name='sensor_type',
type=FIELD_TYPES['sensor_type'],
def_num=0,
),
1: Field( # Calibration factor used to convert from raw ADC value to degrees, g, etc.
name='calibration_factor',
type=BASE_TYPES[0x86], # uint32
def_num=1,
subfields=(
SubField( # Accelerometer calibration factor
name='accel_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='g',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='accelerometer',
raw_value=0,
),
),
),
SubField( # Gyro calibration factor
name='gyro_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='deg/s',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='gyroscope',
raw_value=1,
),
),
),
),
),
2: Field( # Calibration factor divisor
name='calibration_divisor',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='counts',
),
3: Field( # Level shift value used to shift the ADC value back into range
name='level_shift',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Internal calibration factors, one for each: xy, yx, zx
name='offset_cal',
type=BASE_TYPES[0x85], # sint32
def_num=4,
),
5: Field( # 3 x 3 rotation matrix (row major)
name='orientation_matrix',
type=BASE_TYPES[0x85], # sint32
def_num=5,
scale=65535,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
169: MessageType(
name='video_frame',
mesg_num=169,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Number of the frame that the timestamp and timestamp_ms correlate to
name='frame_number',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
174: MessageType(
name='obdii_data',
mesg_num=174,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Offset of PID reading [i] from start_timestamp+start_timestamp_ms. Readings may span accross seconds.
name='time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # Parameter ID
name='pid',
type=BASE_TYPES[0x0D], # byte
def_num=2,
),
3: Field( # Raw parameter data
name='raw_data',
type=BASE_TYPES[0x0D], # byte
def_num=3,
),
4: Field( # Optional, data size of PID[i]. If not specified refer to SAE J1979.
name='pid_data_size',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field( # System time associated with sample expressed in ms, can be used instead of time_offset. There will be a system_time value for each raw_data element. For multibyte pids the system_time is repeated.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Timestamp of first sample recorded in the message. Used with time_offset to generate time of each sample
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
),
7: Field( # Fractional part of start_timestamp
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
177: MessageType(
name='nmea_sentence',
mesg_num=177,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # NMEA sentence
name='sentence',
type=BASE_TYPES[0x07], # string
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
178: MessageType(
name='aviation_attitude',
mesg_num=178,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # System time associated with sample expressed in ms.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='ms',
),
2: Field( # Range -PI/2 to +PI/2
name='pitch',
type=BASE_TYPES[0x83], # sint16
def_num=2,
scale=10430.38,
units='radians',
),
3: Field( # Range -PI to +PI
name='roll',
type=BASE_TYPES[0x83], # sint16
def_num=3,
scale=10430.38,
units='radians',
),
4: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_lateral',
type=BASE_TYPES[0x83], # sint16
def_num=4,
scale=100,
units='m/s^2',
),
5: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_normal',
type=BASE_TYPES[0x83], # sint16
def_num=5,
scale=100,
units='m/s^2',
),
6: Field( # Range -8.727 to +8.727 (-500 degs/sec to +500 degs/sec)
name='turn_rate',
type=BASE_TYPES[0x83], # sint16
def_num=6,
scale=1024,
units='radians/second',
),
7: Field(
name='stage',
type=FIELD_TYPES['attitude_stage'],
def_num=7,
),
8: Field( # The percent complete of the current attitude stage. Set to 0 for attitude stages 0, 1 and 2 and to 100 for attitude stage 3 by AHRS modules that do not support it. Range - 100
name='attitude_stage_complete',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='%',
),
9: Field( # Track Angle/Heading Range 0 - 2pi
name='track',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=10430.38,
units='radians',
),
10: Field(
name='validity',
type=FIELD_TYPES['attitude_validity'],
def_num=10,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
184: MessageType(
name='video',
mesg_num=184,
fields={
0: Field(
name='url',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='hosting_provider',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Playback time of video
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='ms',
),
},
),
185: MessageType(
name='video_title',
mesg_num=185,
fields={
0: Field( # Total number of title parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long titles will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
186: MessageType(
name='video_description',
mesg_num=186,
fields={
0: Field( # Total number of description parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long descriptions will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
187: MessageType(
name='video_clip',
mesg_num=187,
fields={
0: Field(
name='clip_number',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='end_timestamp',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='end_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
),
6: Field( # Start of clip in video time
name='clip_start',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='ms',
),
7: Field( # End of clip in video time
name='clip_end',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='ms',
),
},
),
188: MessageType(
name='ohr_settings',
mesg_num=188,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['switch'],
def_num=0,
),
},
),
200: MessageType(
name='exd_screen_configuration',
mesg_num=200,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # number of fields in screen
name='field_count',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='layout',
type=FIELD_TYPES['exd_layout'],
def_num=2,
),
3: Field(
name='screen_enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
},
),
201: MessageType(
name='exd_data_field_configuration',
mesg_num=201,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_count',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_count',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='display_type',
type=FIELD_TYPES['exd_display_type'],
def_num=4,
),
5: Field(
name='title',
type=BASE_TYPES[0x07], # string
def_num=5,
),
},
),
202: MessageType(
name='exd_data_concept_configuration',
mesg_num=202,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_index',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data_page',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='concept_key',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field(
name='scaling',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
8: Field(
name='data_units',
type=FIELD_TYPES['exd_data_units'],
def_num=8,
),
9: Field(
name='qualifier',
type=FIELD_TYPES['exd_qualifiers'],
def_num=9,
),
10: Field(
name='descriptor',
type=FIELD_TYPES['exd_descriptors'],
def_num=10,
),
11: Field(
name='is_signed',
type=FIELD_TYPES['bool'],
def_num=11,
),
},
),
206: MessageType( # Must be logged before developer field is used
name='field_description',
mesg_num=206,
fields={
0: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='field_definition_number',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='fit_base_type_id',
type=FIELD_TYPES['fit_base_type'],
def_num=2,
),
3: Field(
name='field_name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='array',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='components',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='scale',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field(
name='offset',
type=BASE_TYPES[0x01], # sint8
def_num=7,
),
8: Field(
name='units',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='bits',
type=BASE_TYPES[0x07], # string
def_num=9,
),
10: Field(
name='accumulate',
type=BASE_TYPES[0x07], # string
def_num=10,
),
13: Field(
name='fit_base_unit_id',
type=FIELD_TYPES['fit_base_unit'],
def_num=13,
),
14: Field(
name='native_mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=14,
),
15: Field(
name='native_field_num',
type=BASE_TYPES[0x02], # uint8
def_num=15,
),
},
),
207: MessageType( # Must be logged before field description
name='developer_data_id',
mesg_num=207,
fields={
0: Field(
name='developer_id',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field(
name='application_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='manufacturer_id',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='application_version',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
},
),
208: MessageType(
name='magnetometer_data',
mesg_num=208,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the compass sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in cmps_x and cmps_y and cmps_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated Magnetometer reading
name='calibrated_mag_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='G',
),
6: Field( # Calibrated Magnetometer reading
name='calibrated_mag_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='G',
),
7: Field( # Calibrated Magnetometer reading
name='calibrated_mag_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='G',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
######################### Activity File Messages #########################
34: MessageType(
name='activity',
mesg_num=34,
fields={
0: Field( # Exclude pauses
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
scale=1000,
units='s',
),
1: Field(
name='num_sessions',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field(
name='type',
type=FIELD_TYPES['activity'],
def_num=2,
),
3: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=3,
),
4: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=4,
),
5: Field( # timestamp epoch expressed in local time, used to convert activity timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=5,
),
6: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
###################### Blood Pressure File Messages ######################
51: MessageType(
name='blood_pressure',
mesg_num=51,
fields={
0: Field(
name='systolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='mmHg',
),
1: Field(
name='diastolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='mmHg',
),
2: Field(
name='mean_arterial_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='mmHg',
),
3: Field(
name='map_3_sample_mean',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='mmHg',
),
4: Field(
name='map_morning_values',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='mmHg',
),
5: Field(
name='map_evening_values',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='mmHg',
),
6: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
7: Field(
name='heart_rate_type',
type=FIELD_TYPES['hr_type'],
def_num=7,
),
8: Field(
name='status',
type=FIELD_TYPES['bp_status'],
def_num=8,
),
9: Field( # Associates this blood pressure message to a user. This corresponds to the index of the user profile message in the blood pressure file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
########################## Course File Messages ##########################
31: MessageType(
name='course',
mesg_num=31,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='capabilities',
type=FIELD_TYPES['course_capabilities'],
def_num=6,
),
7: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=7,
),
},
),
########################## Device File Messages ##########################
35: MessageType(
name='software',
mesg_num=35,
fields={
3: Field(
name='version',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
),
5: Field(
name='part_number',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
########################## Goals File Messages ###########################
15: MessageType(
name='goal',
mesg_num=15,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='start_date',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='end_date',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='type',
type=FIELD_TYPES['goal'],
def_num=4,
),
5: Field(
name='value',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field(
name='repeat',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=7,
),
8: Field(
name='recurrence',
type=FIELD_TYPES['goal_recurrence'],
def_num=8,
),
9: Field(
name='recurrence_value',
type=BASE_TYPES[0x84], # uint16
def_num=9,
),
10: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='source',
type=FIELD_TYPES['goal_source'],
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################## Monitoring File Messages ########################
103: MessageType(
name='monitoring_info',
mesg_num=103,
fields={
0: Field( # Use to convert activity timestamps to local time if device does not support time zone and daylight savings time correction.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=0,
units='s',
),
1: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=1,
),
3: Field( # Indexed by activity_type
name='cycles_to_distance',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=5000,
units='m/cycle',
),
4: Field( # Indexed by activity_type
name='cycles_to_calories',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5000,
units='kcal/cycle',
),
5: Field(
name='resting_metabolic_rate',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='kcal/day',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
############################# Other Messages #############################
145: MessageType(
name='memo_glob',
mesg_num=145,
fields={
0: Field( # Block of utf8 bytes
name='memo',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field( # Allows relating glob to another mesg If used only required for first part of each memo_glob
name='message_number',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field( # Index of external mesg
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=2,
),
250: Field( # Sequence number of memo blocks
name='part_index',
type=BASE_TYPES[0x86], # uint32
def_num=250,
),
},
),
######################### Schedule File Messages #########################
28: MessageType(
name='schedule',
mesg_num=28,
fields={
0: Field( # Corresponds to file_id of scheduled workout / course.
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field( # Corresponds to file_id of scheduled workout / course.
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
2: Field( # Corresponds to file_id of scheduled workout / course.
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=2,
),
3: Field( # Corresponds to file_id of scheduled workout / course.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field( # TRUE if this activity has been started
name='completed',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='type',
type=FIELD_TYPES['schedule'],
def_num=5,
),
6: Field(
name='scheduled_time',
type=FIELD_TYPES['local_date_time'],
def_num=6,
),
},
),
######################### Segment File Messages ##########################
148: MessageType( # Unique Identification data for a segment file
name='segment_id',
mesg_num=148,
fields={
0: Field( # Friendly name assigned to segment
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # UUID of the segment
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Sport associated with the segment
name='sport',
type=FIELD_TYPES['sport'],
def_num=2,
),
3: Field( # Segment enabled for evaluation
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
5: Field( # ID of the device that created the segment
name='device_id',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Indicates if any segments should be deleted
name='delete_status',
type=FIELD_TYPES['segment_delete_status'],
def_num=7,
),
8: Field( # Indicates how the segment was selected to be sent to the device
name='selection_type',
type=FIELD_TYPES['segment_selection_type'],
def_num=8,
),
},
),
####################### Segment List File Messages #######################
151: MessageType( # Summary of the unique segment and leaderboard information associated with a segment file. This message is used to compile a segment list file describing all segment files on a device. The segment list file is used when refreshing the contents of a segment file with the latest available leaderboard information.
name='segment_file',
mesg_num=151,
fields={
1: Field( # UUID of the segment file
name='file_uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
3: Field( # Enabled state of the segment file
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment file
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
7: Field( # Leader type of each leader in the segment file
name='leader_type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=7,
),
8: Field( # Group primary key of each leader in the segment file
name='leader_group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=8,
),
9: Field( # Activity ID of each leader in the segment file
name='leader_activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=9,
),
10: Field( # String version of the activity ID of each leader in the segment file. 21 characters long for each ID, express in decimal
name='leader_activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=10,
),
11: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################### Settings File Messages #########################
2: MessageType(
name='device_settings',
mesg_num=2,
fields={
0: Field( # Index into time zone arrays.
name='active_time_zone',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # Offset from system time. Required to convert timestamp from system time to UTC.
name='utc_offset',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
2: Field( # Offset from system time.
name='time_offset',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='s',
),
4: Field( # Display mode for the time
name='time_mode',
type=FIELD_TYPES['time_mode'],
def_num=4,
),
5: Field( # timezone offset in 1/4 hour increments
name='time_zone_offset',
type=BASE_TYPES[0x01], # sint8
def_num=5,
scale=4,
units='hr',
),
12: Field( # Mode for backlight
name='backlight_mode',
type=FIELD_TYPES['backlight_mode'],
def_num=12,
),
36: Field( # Enabled state of the activity tracker functionality
name='activity_tracker_enabled',
type=FIELD_TYPES['bool'],
def_num=36,
),
39: Field( # UTC timestamp used to set the devices clock and date
name='clock_time',
type=FIELD_TYPES['date_time'],
def_num=39,
),
40: Field( # Bitfield to configure enabled screens for each supported loop
name='pages_enabled',
type=BASE_TYPES[0x84], # uint16
def_num=40,
),
46: Field( # Enabled state of the move alert
name='move_alert_enabled',
type=FIELD_TYPES['bool'],
def_num=46,
),
47: Field( # Display mode for the date
name='date_mode',
type=FIELD_TYPES['date_mode'],
def_num=47,
),
55: Field(
name='display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=55,
),
56: Field(
name='mounting_side',
type=FIELD_TYPES['side'],
def_num=56,
),
57: Field( # Bitfield to indicate one page as default for each supported loop
name='default_page',
type=BASE_TYPES[0x84], # uint16
def_num=57,
),
58: Field( # Minimum steps before an autosync can occur
name='autosync_min_steps',
type=BASE_TYPES[0x84], # uint16
def_num=58,
units='steps',
),
59: Field( # Minimum minutes before an autosync can occur
name='autosync_min_time',
type=BASE_TYPES[0x84], # uint16
def_num=59,
units='minutes',
),
80: Field( # Enable auto-detect setting for the lactate threshold feature.
name='lactate_threshold_autodetect_enabled',
type=FIELD_TYPES['bool'],
def_num=80,
),
86: Field( # Automatically upload using BLE
name='ble_auto_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=86,
),
89: Field( # Helps to conserve battery by changing modes
name='auto_sync_frequency',
type=FIELD_TYPES['auto_sync_frequency'],
def_num=89,
),
90: Field( # Allows setting specific activities auto-activity detect enabled/disabled settings
name='auto_activity_detect',
type=FIELD_TYPES['auto_activity_detect'],
def_num=90,
),
94: Field( # Number of screens configured to display
name='number_of_screens',
type=BASE_TYPES[0x02], # uint8
def_num=94,
),
95: Field( # Smart Notification display orientation
name='smart_notification_display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=95,
),
},
),
###################### Sport Settings File Messages ######################
7: MessageType(
name='zones_target',
mesg_num=7,
fields={
1: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='threshold_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='functional_threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
5: Field(
name='hr_calc_type',
type=FIELD_TYPES['hr_zone_calc'],
def_num=5,
),
7: Field(
name='pwr_calc_type',
type=FIELD_TYPES['pwr_zone_calc'],
def_num=7,
),
},
),
########################## Totals File Messages ##########################
33: MessageType(
name='totals',
mesg_num=33,
fields={
0: Field( # Excludes pauses
name='timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
units='s',
),
1: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='m',
),
2: Field(
name='calories',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='kcal',
),
3: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=3,
),
4: Field( # Includes pauses
name='elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='s',
),
5: Field(
name='sessions',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
6: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='s',
),
9: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
####################### Weight Scale File Messages #######################
30: MessageType(
name='weight_scale',
mesg_num=30,
fields={
0: Field(
name='weight',
type=FIELD_TYPES['weight'],
def_num=0,
scale=100,
units='kg',
),
1: Field(
name='percent_fat',
type=BASE_TYPES[0x84], # uint16
def_num=1,
scale=100,
units='%',
),
2: Field(
name='percent_hydration',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=100,
units='%',
),
3: Field(
name='visceral_fat_mass',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
units='kg',
),
4: Field(
name='bone_mass',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='kg',
),
5: Field(
name='muscle_mass',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='kg',
),
7: Field(
name='basal_met',
type=BASE_TYPES[0x84], # uint16
def_num=7,
scale=4,
units='kcal/day',
),
8: Field(
name='physique_rating',
type=BASE_TYPES[0x02], # uint8
def_num=8,
),
9: Field( # ~4kJ per kcal, 0.25 allows max 16384 kcal
name='active_met',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=4,
units='kcal/day',
),
10: Field(
name='metabolic_age',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='years',
),
11: Field(
name='visceral_fat_rating',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
12: Field( # Associates this weight scale message to a user. This corresponds to the index of the user profile message in the weight scale file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=12,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
######################### Workout File Messages ##########################
26: MessageType(
name='workout',
mesg_num=26,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='capabilities',
type=FIELD_TYPES['workout_capabilities'],
def_num=5,
),
6: Field( # number of valid steps
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=6,
),
8: Field(
name='wkt_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
11: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=11,
),
14: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=100,
units='m',
),
15: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=15,
),
},
),
}
| 33.844565 | 336 | 0.397916 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88,433 | 0.262025 |
3e5b857f8383e340919c32b08170a5b4cd5f70b7 | 820 | py | Python | python-basic-project/unit08/myfinance.py | sharebook-kr/learningspoons-bootcamp-finance | 0288f3f3b39f54420e4e9987f1de12892dc680ea | [
"MIT"
] | 9 | 2020-10-25T15:13:32.000Z | 2022-03-26T11:27:21.000Z | python-basic-project/unit08/myfinance.py | sharebook-kr/learningspoons-bootcamp-finance | 0288f3f3b39f54420e4e9987f1de12892dc680ea | [
"MIT"
] | null | null | null | python-basic-project/unit08/myfinance.py | sharebook-kr/learningspoons-bootcamp-finance | 0288f3f3b39f54420e4e9987f1de12892dc680ea | [
"MIT"
] | 7 | 2021-03-01T11:06:45.000Z | 2022-03-14T07:06:04.000Z | import requests
from bs4 import BeautifulSoup
def get_tickers(market=2):
url = f"http://comp.fnguide.com/SVO2/common/lookup_data.asp?mkt_gb={market}&comp_gb=1"
resp = requests.get(url)
data = resp.json()
codes = []
for comp in data:
code = comp['cd'][-6:]
codes.append(code)
return codes
def get_dvr(code):
try:
url = f"https://finance.naver.com/item/main.nhn?code={code}"
resp = requests.get(url)
html = resp.text
soup = BeautifulSoup(html, "html5lib")
tags = soup.select("#_dvr")
dvr = float(tags[0].text)
except:
dvr = 0
return dvr
if __name__ == "__main__":
kospi = get_tickers(market=2)
kosdaq = get_tickers(market=3)
print(len(kospi))
print(len(kosdaq))
print(get_dvr("005930")) | 24.117647 | 90 | 0.603659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 173 | 0.210976 |
3e5c8076b3c080597643c7f2efec1d74b5c8f190 | 1,882 | py | Python | elsie/draw.py | Kobzol/elsie | b7b784d8d04c9e0d545e18504cf4ad23b9e7e8c4 | [
"MIT"
] | null | null | null | elsie/draw.py | Kobzol/elsie | b7b784d8d04c9e0d545e18504cf4ad23b9e7e8c4 | [
"MIT"
] | null | null | null | elsie/draw.py | Kobzol/elsie | b7b784d8d04c9e0d545e18504cf4ad23b9e7e8c4 | [
"MIT"
] | null | null | null | def set_font_from_style(xml, style):
if "font" in style:
xml.set("font-family", style["font"])
if "size" in style:
xml.set("font-size", style["size"])
s = ""
if "color" in style:
s += "fill:{};".format(style["color"])
if style.get("bold", False):
s += "font-weight: bold;"
if style.get("italic", False):
s += "font-style: italic;"
if s:
xml.set("style", s)
def draw_text(xml, x, y, parsed_text, style, styles, id=None):
xml.element("text")
if id is not None:
xml.set("id", id)
xml.set("x", x)
xml.set("y", y)
anchor = {
"left": "start",
"middle": "middle",
"right": "end"
}
xml.set("text-anchor", anchor[style["align"]])
set_font_from_style(xml, style)
line_size = style["size"] * style["line_spacing"]
active_styles = [style]
xml.element("tspan")
for token_type, value in parsed_text:
if token_type == "text":
xml.text(value)
elif token_type == "newline":
for s in active_styles:
xml.close("tspan") # tspan
for i, s in enumerate(active_styles):
xml.element("tspan")
xml.set("xml:space", "preserve")
if i == 0:
xml.set("x", x)
xml.set("dy", line_size * value)
set_font_from_style(xml, s)
elif token_type == "begin":
s = styles[value]
active_styles.append(s)
xml.element("tspan")
xml.set("xml:space", "preserve")
set_font_from_style(xml, s)
elif token_type == "end":
xml.close("tspan")
active_styles.pop()
else:
raise Exception("Invalid token")
for s in active_styles:
xml.close("tspan")
xml.close("text")
| 26.885714 | 62 | 0.508502 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 379 | 0.201382 |
3e5d6df95ec953ae6051ebc8540af72617f83181 | 7,209 | py | Python | TravelWebsite/travello/admin.py | DSAnup/Django | 76025d181bafbb41783912577f80ec728884549d | [
"MIT"
] | 1 | 2020-03-15T05:22:30.000Z | 2020-03-15T05:22:30.000Z | TravelWebsite/travello/admin.py | DSAnup/Django | 76025d181bafbb41783912577f80ec728884549d | [
"MIT"
] | null | null | null | TravelWebsite/travello/admin.py | DSAnup/Django | 76025d181bafbb41783912577f80ec728884549d | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.utils.html import format_html
from django.shortcuts import redirect
from .models import *
# Register your models here.
admin.site.register(CategoryDes)
class DestinationAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn-primary" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn-danger" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def description(self, obj):
str_slice = obj.desc[:40]+'...'
return format_html(str_slice)
list_display = ('name', 'category_id', 'description', 'price', 'edit', 'delete')
admin.site.register(Destination, DestinationAdmin)
class BesttripAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('title', 'date', 'desc', 'edit', 'delete')
admin.site.register(Besttrip, BesttripAdmin)
class TestomonialAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('quote', 'author', 'designation', 'edit', 'delete')
admin.site.register(Testominal, TestomonialAdmin)
class SubcriptionAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('name', 'email', 'edit', 'delete')
admin.site.register(Subscibtion, SubcriptionAdmin)
class HomesliderAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('title', 'edit', 'delete')
admin.site.register(Homeslider, HomesliderAdmin)
class IntroAdmin(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('title', 'subtitle', 'edit', 'delete')
def has_add_permission(self, request):
count = Intro.objects.all().count()
if count <= 2:
return True
return False
admin.site.register(Intro, IntroAdmin)
class FooterContent(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('title', 'contact_info', 'edit', 'delete')
def has_add_permission(self, request):
count = FooterContact.objects.all().count()
if count <= 2:
return True
return False
admin.site.register(FooterContact, FooterContent)
class HomeFixedContent(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
list_display = ('testiBackground', 'bestSide', 'footerBack', 'edit', 'delete')
def has_add_permission(self, request):
count = HomeStatic.objects.all().count()
if count == 0:
return True
return False
admin.site.register(HomeStatic, HomeFixedContent)
class AboutUs(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def description(self, obj):
str_slice = obj.brief[:40]+'...'
return format_html(str_slice)
list_display = ('description', 'aboutimg', 'whybackground', 'edit', 'delete')
def has_add_permission(self, request):
count = about_us_fixed.objects.all().count()
if count == 0:
return True
return False
admin.site.register(about_us_fixed, AboutUs)
class WhyChoose(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def description(self, obj):
str_slice = obj.shortdesc[:40]+'...'
return format_html(str_slice)
list_display = ( 'title', 'description', 'img', 'edit', 'delete')
def has_add_permission(self, request):
count = why_choose_us.objects.all().count()
if count <= 2:
return True
return False
admin.site.register(why_choose_us, WhyChoose)
class OurTeam(admin.ModelAdmin):
def edit(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/change/">Change</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def delete(self, obj):
return format_html('<a class="btn" href="/admin/{0}/{1}/{2}/delete/">Delete</a>'.format(obj._meta.app_label, obj._meta.object_name, obj.id).lower())
def description(self, obj):
str_slice = obj.shortdesc[:40]+'...'
return format_html(str_slice)
list_display = ( 'name', 'description', 'img', 'date', 'edit', 'delete')
admin.site.register(team, OurTeam) | 49.717241 | 164 | 0.659315 | 6,478 | 0.898599 | 0 | 0 | 0 | 0 | 0 | 0 | 1,835 | 0.254543 |
3e5e4207adc8922463d0a98148721a7ee4e6e6eb | 1,428 | py | Python | demos/cookie-clicker/cookie-clicker.py | Coding-Kakis/Automating-Shenanigans-in-Python | c8e00231468668fbe231e0b35e32b9e99d5bd458 | [
"MIT"
] | 1 | 2021-09-11T13:05:17.000Z | 2021-09-11T13:05:17.000Z | demos/cookie-clicker/cookie-clicker.py | Coding-Kakis/Automating-Shenanigans-in-Python | c8e00231468668fbe231e0b35e32b9e99d5bd458 | [
"MIT"
] | null | null | null | demos/cookie-clicker/cookie-clicker.py | Coding-Kakis/Automating-Shenanigans-in-Python | c8e00231468668fbe231e0b35e32b9e99d5bd458 | [
"MIT"
] | null | null | null | # Cookie clicker auto-clicker
# Works for the classic version here: https://orteil.dashnet.org/experiments/cookie/
import pyautogui
def locate_cookie():
"""
Returns the locations of the Big Cookie
Does not return until the cookie is found
"""
loc = None
while loc == None:
loc = pyautogui.locateCenterOnScreen('rsrc/bigcookie.png')
return loc
def click_cookie(loc, ntimes):
"""
Moves mouse to `loc` and clicks `ntimes`
"""
x,y = loc
pyautogui.moveTo(x,y)
for _ in range(ntimes):
pyautogui.click()
def round():
"""
Does 1 round.
Returns `Yes` if user wants to continue
Returns `No` otherwise.
"""
loc = locate_cookie()
pyautogui.alert(
title = "Found cookie!",
text = str(loc))
while True:
number_of_times = pyautogui.prompt(
title = "Continue?",
text = "Click how many times?")
if not number_of_times.isdigit():
pyautogui.alert(
title = "Error!",
text = "Input isn't an integer!")
continue
break
number_of_times = int(number_of_times)
click_cookie(loc, number_of_times)
reply = pyautogui.confirm(
title = "Done!",
text = "Another round?",
buttons = ["Yes", "No"])
return reply
while True:
reply = round()
if reply == "No":
break
| 19.833333 | 84 | 0.573529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 510 | 0.357143 |
3e5e941943139ba0623e31d497e78bf7beb9106d | 1,485 | py | Python | esupa/templatetags/esupa.py | Abando/esupa | 84888ff7d7879437659fd06a8707ac033f25b8ab | [
"Apache-2.0"
] | null | null | null | esupa/templatetags/esupa.py | Abando/esupa | 84888ff7d7879437659fd06a8707ac033f25b8ab | [
"Apache-2.0"
] | 4 | 2015-11-09T02:01:15.000Z | 2016-01-20T14:51:13.000Z | esupa/templatetags/esupa.py | ekevoo/esupa | 84888ff7d7879437659fd06a8707ac033f25b8ab | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2015, Ekevoo.com.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
#
from datetime import datetime
from django.template import Library
from django.template.defaultfilters import date
from django.utils.safestring import mark_safe
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import ugettext
register = Library()
@register.filter(expects_localtime=True)
def relative(when, include_span_tag=True):
if not when:
return ''
delta = (when - datetime.now(tz=when.tzinfo)).total_seconds()
if abs(delta) < 10: # 10 seconds threshold
text = ugettext(u"just now")
elif delta < 0:
text = ugettext(u"%s ago") % timesince(when)
else:
text = ugettext(u"in %s") % timeuntil(when)
if include_span_tag:
text = mark_safe(u"<span title='%(absolute)s'>%(relative)s</span>"
% {'relative': text, 'absolute': date(when, 'r')})
return text
| 37.125 | 107 | 0.703704 | 0 | 0 | 0 | 0 | 596 | 0.401347 | 0 | 0 | 712 | 0.479461 |
3e62b645957319fa784b6eef70fbe8c8812a5575 | 3,305 | py | Python | ivy/pages.py | swsch/ivy | 4932cf7541acff13815be613b0f3335b21c86670 | [
"Unlicense"
] | null | null | null | ivy/pages.py | swsch/ivy | 4932cf7541acff13815be613b0f3335b21c86670 | [
"Unlicense"
] | null | null | null | ivy/pages.py | swsch/ivy | 4932cf7541acff13815be613b0f3335b21c86670 | [
"Unlicense"
] | null | null | null | # ------------------------------------------------------------------------------
# This module renders and writes HTML pages to disk.
# ------------------------------------------------------------------------------
import re
import os
from . import site
from . import events
from . import filters
from . import utils
from . import templates
from . import hashes
from typing import List
from .nodes import Node
# A Page instance represents a single HTML page in the rendered site.
class Page(dict):
# Each Page is initialized with an associated Node instance. This node's
# location in the parse tree determines the output filepath for the page.
def __init__(self, node: Node):
self['node'] = node
self['site'] = site.config
self['inc'] = site.includes()
self['is_homepage'] = node.parent is None
# Render the page into HTML and write the HTML to disk.
def write(self):
self['filepath'] = self.get_filepath()
self['classes'] = self.get_class_list()
self['templates'] = self.get_template_list()
# Render the page into HTML.
events.fire('render_page', self)
html = templates.render(self)
site.rendered(1)
# Filter the HTML before writing it to disk.
html = filters.apply('page_html', html, self)
# Rewrite all @root/ urls.
html = utils.rewrite_urls(html, self['filepath'])
# Write the page to disk. Avoid overwriting identical files.
if not hashes.match(self['filepath'], html):
utils.writefile(self['filepath'], html)
site.written(1)
# Determine the output filepath for the page.
def get_filepath(self) -> str:
slugs = self['node'].path or ['index']
suffix = site.config['extension']
if suffix == '/':
if slugs[-1] == 'index':
slugs[-1] += '.html'
else:
slugs.append('index.html')
else:
slugs[-1] += suffix
filepath = site.out(*slugs)
return filters.apply('page_path', filepath, self)
# Assemble an ordered list of hyphenated slugs for generating CSS classes
# and running template lookups.
# E.g. <Node @root/foo/bar//> -> ['node-foo-bar', 'node-foo', 'node'].
def get_slug_list(self) -> List[str]:
slugs = []
stack = ['node'] + self['node'].path
while stack:
slugs.append('-'.join(stack))
stack.pop()
return filters.apply('page_slugs', slugs, self)
# Assemble a list of potential template names for the page.
def get_template_list(self) -> List[str]:
template_list = self.get_slug_list()
if 'template' in self['node']:
template_list.insert(0, self['node']['template'])
return filters.apply('page_templates', template_list, self)
# Assemble a list of CSS classes for the page's <body> element.
def get_class_list(self) -> List[str]:
class_list = self.get_slug_list()
if self['is_homepage']:
class_list.append('homepage')
if 'classes' in self['node']:
for item in str(self['node']['classes']).split(','):
class_list.append(item.strip())
return filters.apply('page_classes', class_list, self)
| 35.537634 | 80 | 0.579728 | 2,819 | 0.85295 | 0 | 0 | 0 | 0 | 0 | 0 | 1,303 | 0.394251 |
3e64ce743607e76cfc572cc4ea2cfe77fba2b173 | 5,646 | py | Python | mvyaml/mvyaml.py | gchiesa/mvyaml | 6d4c580bc596d220b45e6a6ccf9b2c3ef582f554 | [
"MIT"
] | null | null | null | mvyaml/mvyaml.py | gchiesa/mvyaml | 6d4c580bc596d220b45e6a6ccf9b2c3ef582f554 | [
"MIT"
] | null | null | null | mvyaml/mvyaml.py | gchiesa/mvyaml | 6d4c580bc596d220b45e6a6ccf9b2c3ef582f554 | [
"MIT"
] | null | null | null | """Main module."""
from copy import deepcopy
from datetime import datetime
from difflib import Differ
from io import StringIO
from typing import IO, Iterable, AnyStr
from datadiff.tools import assert_equal
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
class MVYamlVersionNotFoundException(Exception):
pass
class MVYamlFileException(Exception):
pass
def as_yaml(data: Iterable) -> AnyStr:
yaml = YAML()
output = StringIO()
yaml.dump(data, output)
return output.getvalue()
class MVYaml(object):
protected_keys = ('__current', '__type', )
def __init__(self, base64=False):
self._b64 = base64
self._raw = CommentedMap()
self._yaml = YAML()
self._curr_version = None
self._curr_data = None
self._create()
def _create(self):
tag = self._make_tag()
self._raw[tag] = CommentedMap()
self._raw.insert(0, '__current', tag, 'current version')
self._raw.insert(1, '__type', None, 'base64 if value are base64')
self._commit(tag=tag, comment='Initial version')
def import_yaml(self, file: AnyStr = None, stream: AnyStr = None):
data = None
if file:
with open(file, 'r') as fp:
data = fp.read()
imported_data = self._yaml.load(data or stream)
self.override(imported_data)
return self
def load(self, file_handler: AnyStr = None, stream_data: AnyStr = None):
data = None
if file_handler:
with open(file_handler, 'r') as fp:
data = fp.read()
self._raw = self._yaml.load(data or stream_data)
if self.protected_keys not in self._raw.keys():
raise MVYamlFileException(f'Not a valid mvyaml file. Perhaps is a yaml you want to import with '
f'import_yaml()?')
return self
def write(self, file_handler: IO = None, comment: AnyStr = None) -> [AnyStr, None]:
if not self._raw:
return
if self._has_changes():
self._commit(comment=comment)
output = file_handler or StringIO()
self._yaml.dump(self._raw, output)
return output.getvalue() if not file_handler else None
@property
def versions(self):
if not self._raw:
return []
return [k for k in self._raw.keys() if k not in self.protected_keys]
@property
def current(self):
return self._raw['__current']
@property
def data(self):
if not self._curr_data:
self._curr_data = deepcopy(self._raw[self._curr_version or self.current])
return self._curr_data
def with_version(self, version: str = '__current'):
if version not in self.versions:
raise MVYamlVersionNotFoundException(f'version {version} not found')
self._curr_version = version
self._curr_data = None
return self
@staticmethod
def _make_tag() -> str:
d = datetime.utcnow().isoformat()
return d
def override(self, data: [Iterable]):
self._curr_data = CommentedMap()
self._curr_data.update(data)
self._commit(comment='Overridden')
return self
def _commit(self, *args, **kwargs):
return self._commit_head(*args, **kwargs)
def _commit_head(self, tag: AnyStr = None, comment: AnyStr = None):
"""
apply the modifications on curr_data to the underling opened version
and create a new tag
"""
commented_map = CommentedMap()
commented_map.update(self._curr_data or self.data)
if tag:
self._raw[tag] = commented_map
self._raw['__current'] = tag
else:
new_tag = self._make_tag()
self._raw.insert(2, new_tag, commented_map, comment=comment)
self._raw['__current'] = new_tag
self._curr_version = None
self._curr_data = None
return self
def _commit_tail(self, tag: AnyStr = None, comment: AnyStr = None):
"""
apply the modifications on curr_data to the underling opened version
and create a new tag
"""
commented_map = CommentedMap()
commented_map.update(self._curr_data or self.data)
if tag:
self._raw[tag] = commented_map
self._raw['__current'] = tag
else:
new_tag = self._make_tag()
self._raw.insert(len(self._raw.keys()), new_tag, commented_map, comment=comment)
self._raw['__current'] = new_tag
self._curr_version = None
self._curr_data = None
return self
def _has_changes(self):
orig = self._raw[self._curr_version or self.current]
current = self._curr_data or self.data
try:
assert_equal(orig, current)
except AssertionError:
return True
return False
@property
def changes(self) -> AnyStr:
if not self._has_changes():
return ''
yaml_orig = as_yaml(self._raw[self._curr_version or self.current])
yaml_curr = as_yaml(self._curr_data)
differ = Differ()
result = list(differ.compare(
yaml_orig.splitlines(),
yaml_curr.splitlines()
))
return '\n'.join(result)
def set_current(self, version_label: AnyStr):
if version_label not in self.versions:
raise MVYamlVersionNotFoundException(f'request version [{version_label}] not found')
self._raw['__current'] = version_label
self.with_version(version_label)
return self
| 32.079545 | 108 | 0.613355 | 5,215 | 0.923663 | 0 | 0 | 905 | 0.16029 | 0 | 0 | 624 | 0.110521 |
3e667e1d0cd277296f1a4241baaa4af12a192a1e | 1,889 | py | Python | tests/test_monkeypatch.py | marcdemers/py_vollib_vectorized | 0c2519ff58e3caf2caee37ca37d878e6e5e1eefd | [
"MIT"
] | 40 | 2020-12-17T16:36:32.000Z | 2022-02-07T20:11:26.000Z | tests/test_monkeypatch.py | marcdemers/py_vollib_vectorized | 0c2519ff58e3caf2caee37ca37d878e6e5e1eefd | [
"MIT"
] | 8 | 2021-01-20T04:17:50.000Z | 2022-02-18T07:02:27.000Z | tests/test_monkeypatch.py | marcdemers/py_vollib_vectorized | 0c2519ff58e3caf2caee37ca37d878e6e5e1eefd | [
"MIT"
] | 13 | 2020-12-30T21:05:10.000Z | 2022-03-27T12:30:26.000Z | import unittest
from unittest import TestCase
class Test(TestCase):
def test_02_vectorized_imports(self):
print("test02")
import py_vollib.black.implied_volatility
import py_vollib.black_scholes.implied_volatility
import py_vollib.black_scholes_merton.implied_volatility
import py_vollib.black
import py_vollib.black_scholes
import py_vollib.black_scholes_merton
import py_vollib.black.greeks.numerical
import py_vollib.black_scholes.greeks.numerical
import py_vollib.black_scholes_merton.greeks.numerical
import py_vollib_vectorized
#IVs
self.assertTrue(
py_vollib.black.implied_volatility.implied_volatility.__module__ == "py_vollib_vectorized.implied_volatility")
self.assertTrue(
py_vollib.black_scholes.implied_volatility.implied_volatility.__module__ == "py_vollib_vectorized.implied_volatility")
self.assertTrue(
py_vollib.black_scholes_merton.implied_volatility.implied_volatility.__module__ == "py_vollib_vectorized.implied_volatility")
#Models
self.assertTrue(
py_vollib.black.black.__module__ == "py_vollib_vectorized.models")
self.assertTrue(
py_vollib.black_scholes.black_scholes.__module__ == "py_vollib_vectorized.models")
self.assertTrue(
py_vollib.black_scholes_merton.black_scholes_merton.__module__ == "py_vollib_vectorized.models")
#Greeks
self.assertTrue(
py_vollib.black.greeks.numerical.delta.__module__ == "py_vollib_vectorized.greeks")
self.assertTrue(
py_vollib.black_scholes.greeks.numerical.delta.__module__ == "py_vollib_vectorized.greeks")
self.assertTrue(
py_vollib.black_scholes_merton.greeks.numerical.delta.__module__ == "py_vollib_vectorized.greeks")
| 42.931818 | 137 | 0.728957 | 1,840 | 0.97406 | 0 | 0 | 0 | 0 | 0 | 0 | 323 | 0.17099 |
3e6846fed01d2e5081085a1f9b9ca2203cbb1dad | 1,137 | py | Python | b2share/modules/deposit/search.py | hjhsalo/b2share-new | 2a2a961f7cc3a5353850e9a409fd7e879c715b0b | [
"MIT"
] | null | null | null | b2share/modules/deposit/search.py | hjhsalo/b2share-new | 2a2a961f7cc3a5353850e9a409fd7e879c715b0b | [
"MIT"
] | null | null | null | b2share/modules/deposit/search.py | hjhsalo/b2share-new | 2a2a961f7cc3a5353850e9a409fd7e879c715b0b | [
"MIT"
] | 1 | 2020-09-29T10:56:03.000Z | 2020-09-29T10:56:03.000Z |
from elasticsearch_dsl import Q, TermsFacet
from flask import has_request_context
from flask_login import current_user
from invenio_search import RecordsSearch
from invenio_search.api import DefaultFilter
from .permissions import admin_permission_factory
def deposits_filter():
"""Filter list of deposits.
Permit to the user to see all if:
* The user is an admin (see
func:`invenio_deposit.permissions:admin_permission_factory`).
* It's called outside of a request.
Otherwise, it filters out any deposit where user is not the owner.
"""
if not has_request_context() or admin_permission_factory().can():
return Q()
else:
return Q(
'match', **{'_deposit.owners': getattr(current_user, 'id', 0)}
)
class DepositSearch(RecordsSearch):
"""Default search class."""
class Meta:
"""Configuration for deposit search."""
index = 'deposits'
doc_types = None
fields = ('*', )
facets = {
'status': TermsFacet(field='_deposit.status'),
}
default_filter = DefaultFilter(deposits_filter) | 25.266667 | 74 | 0.664908 | 354 | 0.311346 | 0 | 0 | 0 | 0 | 0 | 0 | 422 | 0.371152 |
3e69d58aa5e27029fd5fb9a2126945c9c542b4c9 | 1,586 | py | Python | code/find_nconfsources.py | fornax-navo/fornax-demo-notebooks | 49525d5bed3440d0d1903c29b9a1af8e0ff7e975 | [
"BSD-3-Clause"
] | 1 | 2022-02-03T18:12:59.000Z | 2022-02-03T18:12:59.000Z | code/find_nconfsources.py | fornax-navo/fornax-demo-notebooks | 49525d5bed3440d0d1903c29b9a1af8e0ff7e975 | [
"BSD-3-Clause"
] | 1 | 2022-03-11T21:17:35.000Z | 2022-03-11T22:28:46.000Z | code/find_nconfsources.py | fornax-navo/fornax-demo-notebooks | 49525d5bed3440d0d1903c29b9a1af8e0ff7e975 | [
"BSD-3-Clause"
] | 2 | 2022-02-01T00:57:35.000Z | 2022-02-13T22:20:55.000Z | import numpy as np
from determine_source_type import determine_source_type
#function to figure out how many sources are in cutout
#and set up necessary tractor input for those sources
def find_nconfsources(raval, decval, gal_type, fluxval, x1, y1, cutout_width, subimage_wcs, df):
#setup to collect sources
objsrc = []
#keep the main source
objsrc.append(determine_source_type(raval, decval, gal_type, fluxval, x1, y1))
#find confusing sources with real fluxes
radiff = (df.ra-raval)*np.cos(decval)
decdiff= df.dec-decval
posdiff= np.sqrt(radiff**2+decdiff**2)*3600.
det = df.ks_flux_aper2 > 0 #make sure they have fluxes
#make an index into the dataframe for those objects within the same cutout
good = (abs(radiff*3600.) < cutout_width/2) & (abs(decdiff*3600.) < cutout_width/2) & (posdiff > 0.2) & det
nconfsrcs = np.size(posdiff[good])
#add confusing sources
#if there are any confusing sources
if nconfsrcs > 0:
ra_conf = df.ra[good].values
dec_conf = df.dec[good].values
flux_conf = df.ks_flux_aper2[good].values #should all be real fluxes
type_conf = df.type[good].values
for n in range(nconfsrcs):
#now need to set the values of x1, y1 at the location of the target *in the cutout*
xn, yn = subimage_wcs.all_world2pix(ra_conf[n], dec_conf[n],1)
objsrc.append(determine_source_type(ra_conf[n], dec_conf[n], type_conf[n], flux_conf[n], xn, yn))
return objsrc, nconfsrcs
| 38.682927 | 111 | 0.663934 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 470 | 0.296343 |
3e6ad0d35aefd868861d6a14144cf80665b8e7ea | 274 | py | Python | setup.py | dalejung/earthdragon | 6fc9308288361bbe54d1d0107b4a77e3f27cd9be | [
"MIT"
] | 1 | 2019-12-02T15:10:49.000Z | 2019-12-02T15:10:49.000Z | setup.py | dalejung/earthdragon | 6fc9308288361bbe54d1d0107b4a77e3f27cd9be | [
"MIT"
] | 5 | 2015-08-13T16:00:04.000Z | 2016-03-14T18:43:11.000Z | setup.py | dalejung/earthdragon | 6fc9308288361bbe54d1d0107b4a77e3f27cd9be | [
"MIT"
] | null | null | null | from distutils.core import setup
DISTNAME='earthdragon'
FULLVERSION='0.1'
setup(
name=DISTNAME,
version=FULLVERSION,
packages=['earthdragon'],
install_requires = [
'asttools',
'toolz',
'typeguard',
'more_itertools',
]
)
| 16.117647 | 32 | 0.605839 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.273723 |
3e6ad6e1c6ce978983b60511c62b60c613bacb9a | 92 | py | Python | script.py | juand1809/JuanVelasquez_Ejercicio23 | 40b20dc3db6e3a9a884265a950dd3ccac1f7a615 | [
"MIT"
] | null | null | null | script.py | juand1809/JuanVelasquez_Ejercicio23 | 40b20dc3db6e3a9a884265a950dd3ccac1f7a615 | [
"MIT"
] | null | null | null | script.py | juand1809/JuanVelasquez_Ejercicio23 | 40b20dc3db6e3a9a884265a950dd3ccac1f7a615 | [
"MIT"
] | null | null | null | import os
a = os.system("g++ sumatoria.cpp -o sumatoria.x")
a = os.system("./sumatoria.x")
| 18.4 | 49 | 0.652174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.532609 |
3e6b0a9948d6ab9ae3bf82cdb88963f7746825d0 | 334 | py | Python | consultas/urls.py | Valarr/django-app | 2faac602ce5f36dc9007d4af7a3acd38504f4f95 | [
"MIT"
] | null | null | null | consultas/urls.py | Valarr/django-app | 2faac602ce5f36dc9007d4af7a3acd38504f4f95 | [
"MIT"
] | null | null | null | consultas/urls.py | Valarr/django-app | 2faac602ce5f36dc9007d4af7a3acd38504f4f95 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('consultaticket', views.consultaticket, name='consultaticket'),
path('consultadecredito', views.consultadecredito, name='consultadecredito'),
path('mostrarticket', views.mostrarticket, name='mostrarticket'),
] | 33.4 | 81 | 0.730539 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 109 | 0.326347 |
3e6c1c6b5fbe5a4ffcca63260b56292216d80f44 | 1,973 | py | Python | order_history.py | zylizy/DBMS_Project | d6ff25d566a362495e3b4eb68d48d8400f2f20e6 | [
"MIT"
] | null | null | null | order_history.py | zylizy/DBMS_Project | d6ff25d566a362495e3b4eb68d48d8400f2f20e6 | [
"MIT"
] | null | null | null | order_history.py | zylizy/DBMS_Project | d6ff25d566a362495e3b4eb68d48d8400f2f20e6 | [
"MIT"
] | null | null | null | import streamlit as st
from db_functions import *
def order_history():
st.title("Order History")
sorts = ['None','category','time']
sql_userids = f"select pk_user_id from Users"
user_ids = query_db(sql_userids)['pk_user_id'].tolist()
user_id = st.selectbox("Please select your userid", user_ids)
# get user info
sql_user_info = f"select * from Users where pk_user_id={user_id}"
df_user_info = query_db(sql_user_info)
username = df_user_info['username'].tolist()[0]
st.header(username+ '\'s Basic Information')
st.write(df_user_info)
sort = st.selectbox('How would you like to sort them?',sorts)
if sort == 'None':
# display order history
sql_order_history = f"select o.pk_order_id as oid, i.name as item, o.create_date as date, p.quantity " \
f"from orders as o, purchased_items as p, items as i " \
f"where o.user_id={user_id} and o.pk_order_id=p.order_id and i.pk_item_id=p.item_id " \
f"order by o.pk_order_id;"
if sort == 'category':
sql_order_history = f"select count(*) as count, cat.name as category " \
f"from orders as o, purchased_items as p, items as i, categories as cat " \
f"where o.user_id={user_id} and o.pk_order_id=p.order_id and i.pk_item_id=p.item_id " \
f"and i.category_id = cat.pk_category_id group by cat.name;"
if sort == 'time':
sql_order_history = f"select count(*) as count, o.create_date as time " \
f"from orders as o, purchased_items as p, items as i " \
f"where o.user_id={user_id} and o.pk_order_id=p.order_id and i.pk_item_id=p.item_id " \
f"group by o.create_date;"
df_order_history = query_db(sql_order_history)
st.header(username + "\'s Order History:")
st.write(df_order_history) | 51.921053 | 115 | 0.611759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,034 | 0.524075 |
3e6d175a2c46fd4c086a5aa6dbda506eabe35fd4 | 1,415 | py | Python | cogs/commands/utility/8ball.py | teSill/temflix | 31d40265fa71695966c6178145a1057cd2aeda27 | [
"MIT"
] | 3 | 2020-12-21T20:51:56.000Z | 2022-01-04T11:55:45.000Z | cogs/commands/utility/8ball.py | teSill/temflix | 31d40265fa71695966c6178145a1057cd2aeda27 | [
"MIT"
] | null | null | null | cogs/commands/utility/8ball.py | teSill/temflix | 31d40265fa71695966c6178145a1057cd2aeda27 | [
"MIT"
] | null | null | null | import discord
from discord.ext import commands
import random
class EightBall(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(aliases=["8ball", "8-ball"], description="Have the magic 8-ball answer your most burning questions.")
async def eight_ball(self, ctx):
responses = ["It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes - definitely.",
"I'm the most certain I've ever been that the answer is yes.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Definitely, maybe.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good",
"Very doubtful",
"Certainly not.",
"How could you even suggest otherwise?",
"I'm the most certain I've ever been that the answer is no.",
]
await ctx.send(random.choice(responses))
def setup(client):
client.add_cog(EightBall(client))
| 36.282051 | 123 | 0.466431 | 1,291 | 0.912367 | 0 | 0 | 1,194 | 0.843816 | 1,070 | 0.756184 | 557 | 0.39364 |
3e70d3317c13de0952315e701a55b920df03ec85 | 1,441 | py | Python | 20210607_fizzbuzz.py | sayloren/codechallenges | b31b64c176a1c03c937e915f3b60657669495681 | [
"Apache-2.0"
] | null | null | null | 20210607_fizzbuzz.py | sayloren/codechallenges | b31b64c176a1c03c937e915f3b60657669495681 | [
"Apache-2.0"
] | null | null | null | 20210607_fizzbuzz.py | sayloren/codechallenges | b31b64c176a1c03c937e915f3b60657669495681 | [
"Apache-2.0"
] | null | null | null | # code golf challenge
# https://code.golf/fizz-buzz#python
# wren 20210607
# Print the numbers from 1 to 100 inclusive, each on their own line.
# If, however, the number is a multiple of three then print Fizz instead,
# and if the number is a multiple of five then print Buzz.
# For numbers which are multiples of both three and five then print FizzBuzz.
# iterate through reange of required numbers - plus one because range is
# zero based if 3 print fizz, if 5 print buzz, and if both fizzbuzz
# i feel like there should be a way to have the 15/fizzbuzz not be a
# seperate condition
for number in range(1,101):
# if there are no remainders for the number in the range divied by 15
# condition for both 5 and 3 - 15 is lowest divisor, where both fizz and buzz
if number%15==0):print('FizzBuzz')
# if there are no remainders for the number in the range divied by 3
# elif condition for just 3 - fizz
elif number%3 ==0:print('Fizz')
# if there are no remainders for the number in the range divied by 5
# elif condition for just 5, buzz
elif number%5==0:print('Buzz')
# else none of the conditions, just print the number
else:print(number)
# alternative approach in one liner
# for loop the same, but conditions where there are no remainders for the
# divisability are all wrapped in a print statement
for number in range(1, 101): print("Fizz"*(number%3==0)+"Buzz"*(number%5==0) or str(number))
| 46.483871 | 92 | 0.725191 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,169 | 0.811242 |
3e730fa82d3520ad13dc948a854e1cd1df0331d4 | 275 | py | Python | setup.py | grro/install-raspberry | f6db2d451c1277127a77fdc6b00ea55708f0bd17 | [
"Apache-2.0"
] | null | null | null | setup.py | grro/install-raspberry | f6db2d451c1277127a77fdc6b00ea55708f0bd17 | [
"Apache-2.0"
] | null | null | null | setup.py | grro/install-raspberry | f6db2d451c1277127a77fdc6b00ea55708f0bd17 | [
"Apache-2.0"
] | null | null | null | from setuptools import setup
setup(
name='install-raspberry',
version='',
packages=[''],
url='https://github.com/grro/httpstreamproxy',
license='Apache Software License',
author='grro',
author_email='gregor.roth@web.de',
description='test'
)
| 21.153846 | 50 | 0.658182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 121 | 0.44 |
3e73f6fed18b11f2933d0b20530ca1d6b4de649e | 2,701 | py | Python | py_privatekonomi/tests/swedbank/test_swedbank_db.py | nilsFK/py-privatekonomi | 9172dfa85e439e18558a60fdb3b69e956e70e783 | [
"MIT"
] | 2 | 2015-01-04T21:27:45.000Z | 2015-01-05T13:31:52.000Z | py_privatekonomi/tests/swedbank/test_swedbank_db.py | nilsFK/py-privatekonomi | 9172dfa85e439e18558a60fdb3b69e956e70e783 | [
"MIT"
] | 28 | 2015-01-04T22:13:24.000Z | 2019-11-29T13:41:01.000Z | py_privatekonomi/tests/swedbank/test_swedbank_db.py | nilsFK/py-privatekonomi | 9172dfa85e439e18558a60fdb3b69e956e70e783 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import inspect
from py_privatekonomi.utilities import common
from py_privatekonomi.tests.test_base import TestBase
from py_privatekonomi.tests.dataset.swedbank.sample1 import test_data as test_data_1
from py_privatekonomi.tests.dataset.swedbank.sample2 import test_data as test_data_2
from py_privatekonomi.tests.dataset.swedbank.sample3 import test_data as test_data_3
from py_privatekonomi.tests.dataset.swedbank.sample5 import test_data as test_data_5
class TestSwedbankDB(TestBase):
def setUp(self):
pass
def test_sample1_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample1',
'swedbank',
'swedbank',
persist=True,
config=self.get_default_config())
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_1, format_as_mapper=True)
self.assertPersisted(test_data_1)
def test_sample2_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample2',
'swedbank',
'swedbank',
persist=True,
config=self.get_default_config())
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_2, format_as_mapper=True)
self.assertPersisted(test_data_2)
def test_sample3_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample3',
'swedbank',
'swedbank',
config=self.get_default_config(),
persist=True)
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_3, format_as_mapper=True)
self.assertPersisted(test_data_3)
def test_sample5_db(self):
results = self.executeApp('py_privatekonomi.core.apps.example3',
'samples/swedbank/sample5',
'swedbank',
'swedbank',
config=self.get_default_config(),
persist=True)
if results is False:
print(("Skipping:", inspect.stack()[0][3]))
else:
self.assertFormatted(results, test_data_5, format_as_mapper=True)
self.assertPersisted(test_data_5)
if __name__ == '__main__':
unittest.main() | 37 | 84 | 0.652721 | 2,017 | 0.74676 | 0 | 0 | 0 | 0 | 0 | 0 | 430 | 0.1592 |
3e74eb605f50a2789671592734f1dea5fd163012 | 918 | py | Python | gharchive/parse_json.py | IAMABOY/Mining-Github | cf11c94e72b11f3ce9d638b562df438c8e56d149 | [
"MIT"
] | 8 | 2019-12-08T11:57:59.000Z | 2022-01-24T06:26:56.000Z | gharchive/parse_json.py | IAMABOY/Mining-Github | cf11c94e72b11f3ce9d638b562df438c8e56d149 | [
"MIT"
] | null | null | null | gharchive/parse_json.py | IAMABOY/Mining-Github | cf11c94e72b11f3ce9d638b562df438c8e56d149 | [
"MIT"
] | 2 | 2019-12-17T02:38:55.000Z | 2021-12-16T01:53:11.000Z | import sys
import os
import json
import gzip
def jsonReader(inputJsonFilePath,pos):
flag = False
with gzip.open(inputJsonFilePath, 'r') as jsonContent:
for rowNumber, line in enumerate(jsonContent, start=1):
try:
#此处加上flag的目的在于,当程序挂掉时候,可以根据域名从指定位置开始,不必重头开始跑
if rowNumber == pos:
flag = True
if not flag:
continue
line = line.strip()
if len(line) <= 0:
continue
jsonObject = json.loads(line)
repoInfo = jsonObject.get('repo',None)
if repoInfo == '' or repoInfo == None:
continue
print(repoInfo)
except Exception as e:
print(e)
if __name__ == '__main__':
jsonReader('2019-09-19-10.json.gz',1) | 24.157895 | 63 | 0.484749 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 166 | 0.166667 |
3e753e4b76a7bccde83190218fa4e3ea302764fe | 393 | py | Python | iotalib/check_roof.py | WWGolay/iota | f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d | [
"MIT"
] | null | null | null | iotalib/check_roof.py | WWGolay/iota | f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d | [
"MIT"
] | null | null | null | iotalib/check_roof.py | WWGolay/iota | f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d | [
"MIT"
] | null | null | null | #!/usr/bin/python
import pycurl
from io import BytesIO
def checkOpen():
isOpen = False
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'https://www.winer.org/Site/Roof.php')
c.setopt(c.WRITEDATA, buffer)
c.perform()
c.close()
body = buffer.getvalue()
if body.find(b'ROOFPOSITION=OPEN') > -1:
isOpen = True
return(isOpen) | 21.833333 | 59 | 0.592875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.19084 |
3e7863d676fdd4741e30575b304165077d18541c | 2,238 | py | Python | egg/app.py | eanorambuena/Driver | 3cb14f5d741c6bae364326305ae0ded04e10e9d4 | [
"MIT"
] | null | null | null | egg/app.py | eanorambuena/Driver | 3cb14f5d741c6bae364326305ae0ded04e10e9d4 | [
"MIT"
] | null | null | null | egg/app.py | eanorambuena/Driver | 3cb14f5d741c6bae364326305ae0ded04e10e9d4 | [
"MIT"
] | null | null | null | # Imports
from egg.resources.console import get, clearConsole
from egg.resources.constants import *
from egg.resources.modules import install, upgrade, Repo
from egg.resources.help import help
from egg.resources.auth import login, register
"""
FUNCTION eggConsole(condition: bool = True)
Display the Egg Console
Currently, the Egg Console commands are:
$nqs Start the NQS Depeloper console
$new Start the News Journalist console
$login Log in Egg-cosystem *comming soon*
$register Register in Egg-cosystem *comming soon*
$install Install a pip package
$upgrade Upgrade a pip package
$pull Import a package stored on a GitHUb repository *comming soon: currently, just use github_com package*
$help Get started command
$clear Clear the Egg Console
$end End the Egg Console
WARNING:
Always use $end command in every console you run
*ONLY use a condition different to True as an argument of eggConsole(condition) if you know what are you doing**
This is the reason why condition only allows <<bool>> as data type
"""
def eggConsole(condition: bool = True):
print(white+"Egg Console is now running")
logged=0
while condition:
i=get("egg")
if i=="$nqs":
from nqs.developer.app import developerConsole
developerConsole()
elif i=="$new":
from news.app import journalistConsole
journalistConsole()
elif i=="$login":
login()
elif i=="$register":
register()
elif i=="$install":
print(white+"Package:")
name=get("egg")
install(name)
elif i=="$upgrade":
print(white+"Package:")
name=get("egg")
upgrade(name)
elif i=="$pull":
print(white+"Repo:")
name=get("egg")
repo=Repo(name)
print(white+"Package:")
package=get("egg")
last=repo.pull(package)
# *comming soon*
elif i=="$help":
help()
elif i=="$clear":
clearConsole()
elif i=="$end":
print(white+"Egg Console stopped running")
return "done"
else:
pass | 32.434783 | 113 | 0.605004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,056 | 0.47185 |
3e78c123f36641a6b522ac2d459248b01e28de60 | 1,204 | py | Python | hello/hello_pil.py | East196/hello-py | a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21 | [
"Apache-2.0"
] | 1 | 2017-10-23T14:58:47.000Z | 2017-10-23T14:58:47.000Z | hello/hello_pil.py | East196/hello-py | a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21 | [
"Apache-2.0"
] | null | null | null | hello/hello_pil.py | East196/hello-py | a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21 | [
"Apache-2.0"
] | 1 | 2018-04-06T07:49:18.000Z | 2018-04-06T07:49:18.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from PIL import Image, ImageDraw, ImageFont, ImageFilter
import random
im = Image.open('F:/workspace/python/data/backpink.jpg')
im2 = im.filter(ImageFilter.BLUR)
im2.save('F:/workspace/python/data/backpink_blur.png', 'png')
im2.save('F:/workspace/python/data/backpink_blur.jpg', 'jpeg')
# 随机字母:
def random_char():
return chr(random.randint(65, 90))
# 随机颜色1:
def random_color():
return random.randint(64, 255), random.randint(64, 255), random.randint(64, 255)
# 随机颜色2:
def random_color2():
return random.randint(32, 127), random.randint(32, 127), random.randint(32, 127)
# 240 x 60:
width = 60 * 4
height = 60
image = Image.new('RGB', (width, height), (255, 255, 255))
# 创建Font对象:
font = ImageFont.truetype('C:/Windows/Fonts/Arial.ttf', 36)
# 创建Draw对象:
draw = ImageDraw.Draw(image)
# 填充每个像素:
for x in range(width):
for y in range(height):
draw.point((x, y), fill=random_color())
# 输出文字:
for t in range(4):
draw.text((60 * t + 10, 10), random_char(), font=font, fill=random_color2())
# 模糊:
image = image.filter(ImageFilter.BLUR)
image.save('code.jpg', 'jpeg')
print((image.format, image.size, image.mode))
# image.show()
| 24.571429 | 84 | 0.680233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 386 | 0.304416 |
3e7a2d3d3d5314bdb0dff02d7d69496583791bdc | 382 | py | Python | terra_layer/migrations/0028_layergroup_exclusive.py | Terralego/terra-layer | 6564a63d389503d3ae1f63ce46e674b228d6764b | [
"MIT"
] | 1 | 2019-08-08T15:17:32.000Z | 2019-08-08T15:17:32.000Z | terra_layer/migrations/0028_layergroup_exclusive.py | Terralego/terra-layer | 6564a63d389503d3ae1f63ce46e674b228d6764b | [
"MIT"
] | 65 | 2019-10-21T10:05:00.000Z | 2022-03-08T14:08:27.000Z | terra_layer/migrations/0028_layergroup_exclusive.py | Terralego/terra-layer | 6564a63d389503d3ae1f63ce46e674b228d6764b | [
"MIT"
] | null | null | null | # Generated by Django 2.1.9 on 2019-07-16 10:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("terra_layer", "0027_auto_20190715_1631")]
operations = [
migrations.AddField(
model_name="layergroup",
name="exclusive",
field=models.BooleanField(default=False),
)
]
| 22.470588 | 63 | 0.636126 | 289 | 0.756545 | 0 | 0 | 0 | 0 | 0 | 0 | 108 | 0.282723 |
3e7a7495c3ea96af4211a7bee33396978138c459 | 605 | py | Python | docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py | ran-isenberg/aws-lambda-handler-cookbook | adfe58dacd87315151265818869bb842c7eb4971 | [
"MIT"
] | 61 | 2022-02-07T05:21:14.000Z | 2022-03-27T14:11:30.000Z | docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py | ran-isenberg/aws-lambda-handler-cookbook | adfe58dacd87315151265818869bb842c7eb4971 | [
"MIT"
] | 17 | 2022-02-26T05:25:31.000Z | 2022-03-16T20:02:46.000Z | docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py | ran-isenberg/aws-lambda-handler-cookbook | adfe58dacd87315151265818869bb842c7eb4971 | [
"MIT"
] | 4 | 2022-02-17T16:35:27.000Z | 2022-03-07T03:13:07.000Z | from aws_cdk import Stack
from aws_lambda_handler_cookbook.service_stack.configuration.configuration_construct import ConfigurationStore
from aws_lambda_handler_cookbook.service_stack.constants import CONFIGURATION_NAME, ENVIRONMENT, SERVICE_NAME
from constructs import Construct
class CookBookConfigurationStack(Stack):
# pylint: disable=redefined-builtin
def __init__(self, scope: Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
self.dynamic_configuration = ConfigurationStore(self, 'dynamic_conf', ENVIRONMENT, SERVICE_NAME, CONFIGURATION_NAME)
| 43.214286 | 124 | 0.809917 | 322 | 0.532231 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.080992 |
3e7d231b81300bc8be65b86f6758957fdbb26baa | 653 | py | Python | backend-project/small_eod/users/models.py | merito/small_eod | ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2 | [
"MIT"
] | 64 | 2019-12-30T11:24:03.000Z | 2021-06-24T01:04:56.000Z | backend-project/small_eod/users/models.py | merito/small_eod | ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2 | [
"MIT"
] | 465 | 2018-06-13T21:43:43.000Z | 2022-01-04T23:33:56.000Z | backend-project/small_eod/users/models.py | merito/small_eod | ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2 | [
"MIT"
] | 72 | 2018-12-02T19:47:03.000Z | 2022-01-04T22:54:49.000Z | from django.contrib.auth.models import AbstractUser
from ..notifications.utils import TemplateKey, TemplateMailManager
class User(AbstractUser):
def notify(self, **kwargs):
kwargs["user"] = self
enabled = self.get_enabled_notifications()
key = getattr(
TemplateKey, f"{kwargs['source']}_{kwargs['action']}".upper(), None
)
if key not in enabled:
return False
return TemplateMailManager.send(
template_key=key, recipient_list=[self.email], context=kwargs
)
def get_enabled_notifications(self):
return TemplateMailManager.TEMPLATE_MAP.keys()
| 29.681818 | 79 | 0.658499 | 530 | 0.811639 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.070444 |
3e7efc62df24d3372d57ba9f3602f16dfbfbeff6 | 2,689 | py | Python | rtlsdr_sstv/utils.py | martinber/rtlsdr_sstv | f59ca523408e949f98c4b81b09b2d46232111f4a | [
"MIT"
] | 3 | 2019-03-16T01:20:09.000Z | 2020-12-31T12:31:17.000Z | rtlsdr_sstv/utils.py | martinber/rtlsdr_sstv | f59ca523408e949f98c4b81b09b2d46232111f4a | [
"MIT"
] | null | null | null | rtlsdr_sstv/utils.py | martinber/rtlsdr_sstv | f59ca523408e949f98c4b81b09b2d46232111f4a | [
"MIT"
] | 1 | 2020-12-27T02:31:18.000Z | 2020-12-27T02:31:18.000Z | import collections
import math
import numpy as np
def mapeadora(value):
valor_mapeado = int((value-1500)/800*255)
return valor_mapeado
def escribir_pixel(img, columna, linea, canal, valor):
'''funcion encargada de escribir pixel por pixel la imagen'''
if linea >= img.height:
return
if canal == "lum":
prev = img.getpixel((columna,linea-1))
datapixel = (mapeadora(valor), prev[1], prev[2])
img.putpixel((columna,linea-1), datapixel)
if canal == "cr":
prev = img.getpixel((columna,linea-1))
nxt_prev = img.getpixel((columna,linea))
datapixel = (prev[0], prev[1], mapeadora(valor))
nxt_datapixel = (nxt_prev[0], nxt_prev[1], mapeadora(valor))
img.putpixel((columna,linea-1), datapixel)
img.putpixel((columna,linea), nxt_datapixel)
if canal == "cb":
prev = img.getpixel((columna,linea-1))
nxt_prev = img.getpixel((columna,linea))
datapixel = (prev[0], mapeadora(valor), prev[2])
nxt_datapixel = (nxt_prev[0], mapeadora(valor), nxt_prev[2])
img.putpixel((columna,linea-1), datapixel)
img.putpixel((columna,linea), nxt_datapixel)
if canal == "nxt_lum":
prev = img.getpixel((columna,linea))
datapixel = (mapeadora(valor), prev[1], prev[2])
img.putpixel((columna,linea), datapixel)
def lowpass(cutout, delta_w, atten):
'''
cutout y delta_w en fracciones de pi radianes por segundo.
atten en decibeles positivos.
'''
beta = 0
if atten > 50:
beta = 0.1102 * (atten - 8.7)
elif atten < 21:
beta = 0
else:
beta = 0.5842 * (atten - 21)**0.4 + 0.07886 * (atten - 21)
length = math.ceil((atten - 8) / (2.285 * delta_w * math.pi)) + 1;
if length % 2 == 0:
length += 1
coeffs = np.kaiser(length, beta)
# i es el indice en el vector, n es el eje con el cero centrado en el medio
# del filtro
for i, n in enumerate(range(
int(-(length - 1) / 2),
int((length - 1) / 2)+1)):
if n == 0:
coeffs[i] *= cutout
else:
coeffs[i] *= math.sin(n * math.pi * cutout) / (n * math.pi)
return coeffs
def filtrar(input, cutout, delta_w, atten):
'''
La derecha del buff tiene la muestra mas reciente y tiene el indice mas alto
'''
coeffs = lowpass(cutout, delta_w, atten)
# plot(coeffs, numpy.abs(numpy.fft.fft(coeffs)))
buf = collections.deque([0] * len(coeffs))
for s in input:
buf.popleft()
buf.append(s)
sum = 0
for j in range(len(coeffs)):
sum += buf[-j - 1] * coeffs[j]
yield sum
| 29.549451 | 80 | 0.581629 | 0 | 0 | 469 | 0.174414 | 0 | 0 | 0 | 0 | 420 | 0.156192 |
3e7f9f610ed95d40e15a8580e0dd70e9219fb93d | 3,653 | py | Python | Pong.py | Mishkanian/pong_game | 5a04b4b5fc36af2159e60fb85941034a2325996c | [
"MIT"
] | null | null | null | Pong.py | Mishkanian/pong_game | 5a04b4b5fc36af2159e60fb85941034a2325996c | [
"MIT"
] | null | null | null | Pong.py | Mishkanian/pong_game | 5a04b4b5fc36af2159e60fb85941034a2325996c | [
"MIT"
] | 1 | 2021-11-15T20:21:53.000Z | 2021-11-15T20:21:53.000Z | """
Pong game by Michael Mishkanian
"""
import turtle
wn = turtle.Screen()
wn.title("Pong by Michael Mishkanian")
wn.bgcolor("black")
wn.setup(width=800, height=600)
wn.tracer(0)
# Paddle A
paddle_a = turtle.Turtle()
paddle_a.speed(0)
paddle_a.shape("square")
paddle_a.color("white")
paddle_a.shapesize(stretch_wid=5, stretch_len=1) # make paddle a rectangle
paddle_a.penup()
paddle_a.goto(-350, 0) # starting location of paddle on left side of screen
# Paddle B
paddle_b = turtle.Turtle()
paddle_b.speed(0)
paddle_b.shape("square")
paddle_b.color("white")
paddle_b.shapesize(stretch_wid=5, stretch_len=1)
paddle_b.penup()
paddle_b.goto(350, 0) # starting location of paddle on right side of screen
# Ball
ball = turtle.Turtle()
ball.speed(0)
ball.shape("square")
ball.color("white")
ball.penup()
ball.goto(0, 0) # ball starts in middle of screen
ball.dx = .33 # movement speed of the ball dx
ball.dy = .33 # movement speed of the ball dy
# Score Display
pen = turtle.Turtle()
pen.speed(0)
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Player 1: 0 Player 2: 0", align="center", font=("Courier", 24, "normal"))
# Start Tracking Scores
score_a = 0
score_b = 0
def paddle_a_up():
"""
This function takes in the current y-coordinate of paddle A
and then increases the position by 20 (AKA "go up")
"""
y = paddle_a.ycor()
y += 20
paddle_a.sety(y)
def paddle_a_down():
"""
This function takes in the current y-coordinate of paddle A
and then decreases the position down 20 (AKA "go down")
"""
y = paddle_a.ycor()
y -= 20
paddle_a.sety(y)
def paddle_b_up():
"""
This function takes in the current y-coordinate of paddle B
and then increases the position by 20 (AKA "go up")
"""
y = paddle_b.ycor()
y += 20
paddle_b.sety(y)
def paddle_b_down():
"""
This function takes in the current y-coordinate of paddle B
and then decreases the position by 20 (AKA "go down")
"""
y = paddle_b.ycor()
y -= 20
paddle_b.sety(y)
# Key bindings
wn.listen()
wn.onkeypress(paddle_a_up, "w")
wn.onkeypress(paddle_a_down, "s")
wn.onkeypress(paddle_b_up, "Up")
wn.onkeypress(paddle_b_down, "Down")
# Main game loop
while True:
wn.update()
# Ball movement
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Border checks
if ball.ycor() > 290:
ball.sety(290)
ball.dy *= -1 # reverse direction if ball is too high
if ball.ycor() < -290:
ball.sety(-290)
ball.dy *= -1 # reverse direction if ball is too low
# retart game when the ball passes a paddle
if ball.xcor() > 390:
ball.goto(0, 0)
ball.dx *= -1
score_a += 1
pen.clear() # clear score
pen.write("Player 1: {} Player 2: {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
if ball.xcor() < -390:
ball.goto(0, 0)
ball.dx *= -1
score_b += 1
pen.clear() # clear score
pen.write("Player 1: {} Player 2: {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
# Collisions
if (ball.xcor() > 340 and ball.xcor() < 350) and (ball.ycor() < paddle_b.ycor() + 40
and ball.ycor() > paddle_b.ycor() - 40):
ball.setx(340)
ball.dx *= -1
if (ball.xcor() < -340 and ball.xcor() > -350) and (ball.ycor() < paddle_a.ycor() + 40
and ball.ycor() > paddle_a.ycor() - 40):
ball.setx(-340)
ball.dx *= -1
| 26.280576 | 120 | 0.611005 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,336 | 0.365727 |
3e7fe9149a1b5f7c3cd431d38f69f6e9b05ff08e | 108 | py | Python | loops_part2/sequence_2k+1.py | MaggieIllustrations/softuni-github-programming | f5695cb14602f3d2974359f6d8734332acc650d3 | [
"MIT"
] | null | null | null | loops_part2/sequence_2k+1.py | MaggieIllustrations/softuni-github-programming | f5695cb14602f3d2974359f6d8734332acc650d3 | [
"MIT"
] | null | null | null | loops_part2/sequence_2k+1.py | MaggieIllustrations/softuni-github-programming | f5695cb14602f3d2974359f6d8734332acc650d3 | [
"MIT"
] | 1 | 2022-01-14T17:12:44.000Z | 2022-01-14T17:12:44.000Z | number = int(input())
counter = 1
while counter <= number:
print(counter)
counter = 2 * counter + 1 | 18 | 29 | 0.62963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3e82a49073596a4c986e7e70b3ddc02848ac39cb | 385 | py | Python | tests/fixtures.py | hdsr-mid/string_finder | 64aa38afa562beddc897a0fcb84bf39e53b935fd | [
"MIT"
] | null | null | null | tests/fixtures.py | hdsr-mid/string_finder | 64aa38afa562beddc897a0fcb84bf39e53b935fd | [
"MIT"
] | null | null | null | tests/fixtures.py | hdsr-mid/string_finder | 64aa38afa562beddc897a0fcb84bf39e53b935fd | [
"MIT"
] | null | null | null | from pathlib import Path
from string_finder.constants import TEST_DATA_DIR
from typing import List
import pytest
@pytest.fixture
def caw_xml_paths() -> List[Path]:
xml_dir = TEST_DATA_DIR / "xmls"
assert xml_dir.is_dir()
_paths = [x for x in xml_dir.iterdir()]
assert len(_paths) == 4, f"expected 4 xml files in {xml_dir}, but found {len(_paths)}"
return _paths
| 25.666667 | 90 | 0.719481 | 0 | 0 | 0 | 0 | 268 | 0.696104 | 0 | 0 | 67 | 0.174026 |
3e83c39b04f2c10f748cc83b7509198a99b52216 | 1,432 | py | Python | clean.py | glqstrauss/oopsgenie | d1984e332b11f972db2008867f1aba0917457b9b | [
"MIT"
] | 5 | 2020-01-02T21:15:31.000Z | 2020-07-29T18:01:51.000Z | clean.py | glqstrauss/oopsgenie | d1984e332b11f972db2008867f1aba0917457b9b | [
"MIT"
] | 2 | 2020-01-07T15:36:44.000Z | 2020-01-13T20:38:45.000Z | clean.py | glqstrauss/oopsgenie | d1984e332b11f972db2008867f1aba0917457b9b | [
"MIT"
] | 1 | 2020-07-29T17:10:32.000Z | 2020-07-29T17:10:32.000Z | import csv
from utils import get_valid_colum_indices
class Cleaner():
def clean(file, clean_columns, remove):
print ("Cleaning {}".format(file))
print ("For columns {}".format(clean_columns))
new_file = file[0:-7] + "clean.csv"
with open(file, 'r') as raw_file:
reader = csv.reader(raw_file, delimiter=',')
headers = next(reader)
col_count = len(clean_columns)
if remove:
clean_columns.append("Message")
indices = get_valid_colum_indices(headers, clean_columns)
if indices is None:
print ("invalid column specified for in {}".format(file))
return
with open(new_file, 'w') as clean_file:
writer = csv.writer(clean_file, delimiter=',')
writer.writerow(clean_columns)
for row in reader:
if remove:
blacklisted = False
for r in remove:
if r in row[indices[-1]]:
blacklisted = True
if blacklisted:
continue
cleaned_row = []
for i in range(col_count):
cleaned_row.append(row[indices[i]])
writer.writerow(cleaned_row)
print("Done") | 34.926829 | 73 | 0.48743 | 1,378 | 0.962291 | 0 | 0 | 0 | 0 | 0 | 0 | 103 | 0.071927 |
3e869ea6160f40dc58804e7f852689a43590b0fc | 516 | py | Python | issues_list/migrations/0003_auto_20181106_1541.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | null | null | null | issues_list/migrations/0003_auto_20181106_1541.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | 7 | 2020-06-05T19:50:41.000Z | 2022-03-11T23:39:39.000Z | issues_list/migrations/0003_auto_20181106_1541.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | 1 | 2019-02-19T15:30:16.000Z | 2019-02-19T15:30:16.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-06 15:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('issues_list', '0002_votefor'),
]
operations = [
migrations.AlterUniqueTogether(
name='votefor',
unique_together=set([('user', 'item')]),
),
]
| 23.454545 | 66 | 0.649225 | 332 | 0.643411 | 0 | 0 | 0 | 0 | 0 | 0 | 120 | 0.232558 |
3e87d53a9a7c9e621189fb9905a20df232be3db0 | 6,203 | py | Python | sentiment_analysis.py | bernardomelo/projeto-final | f0207144282b04e1b781604bf96d69634ca68ee8 | [
"MIT"
] | null | null | null | sentiment_analysis.py | bernardomelo/projeto-final | f0207144282b04e1b781604bf96d69634ca68ee8 | [
"MIT"
] | null | null | null | sentiment_analysis.py | bernardomelo/projeto-final | f0207144282b04e1b781604bf96d69634ca68ee8 | [
"MIT"
] | null | null | null | ###############################################################################
# Univesidade Federal de Pernambuco -- UFPE (http://www.ufpe.br)
# Centro de Informatica -- CIn (http://www.cin.ufpe.br)
# Bacharelado em Sistemas de Informacao
# IF968 -- Programacao 1
#
# Autor: Bernardo Gomes de Melo
# Aluno de S.I do Cin
#
# Email: bgm@cin.ufpe.br
#
#
# Data: 2016-06-10
#
# Descricao: Este e' um modelo de arquivo para ser utilizado para a implementacao
# do projeto pratico da disciplina de Programacao 1.
# A descricao do projeto encontra-se no site da disciplina e trata-se
# de uma adaptacao do projeto disponivel em
# /t http://nifty.stanford.edu/2016/manley-urness-movie-review-sentiment/
# O objetivo deste projeto e' implementar um sistema de analise de
# sentimentos de comentarios de filmes postados no site Rotten Tomatoes.
#
# Licenca: The MIT License (MIT)
# Copyright(c) 2016 Bernardo Gomes de Melo, Aluno do CIn
#
###############################################################################
import sys
import re
def clean_up(s):
''' Retorna uma versao da string 's' na qual todas as letras sao
convertidas para minusculas e caracteres de pontuacao sao removidos
de ambos os extremos. A pontuacao presente no interior da string
e' mantida intacta.
'''
punctuation = ''''!"',;:.-?)([]<>*#\n\t\r'''
result = s.lower().strip(punctuation)
return result
def split_on_separators(original, separators):
''' Retorna um vetor de strings nao vazias obtido a partir da quebra
da string original em qualquer dos caracteres contidos em 'separators'.
'separtors' e' uma string formada com caracteres unicos a serem usados
como separadores. Por exemplo, '^$' e' uma string valida, indicando que
a string original sera quebrada em '^' e '$'.
'''
return filter(lambda x: x != '',re.split('[{0}]'.format(separators),original))
def StopWords(fname):
arq = open(fname,'r')
stopWords = arq.readlines()
return stopWords
def readTrainingSet(fname):
''' Recebe o caminho do arquivo com o conjunto de treinamento como parametro
e retorna um dicionario com triplas (palavra,freq,escore) com o escore
medio das palavras no comentarios.
'''
stopWords = StopWords(#caminho das stopwords aqui)
score = 0
words = dict()
temp = []
arq = open(fname,'r')
for line in arq:
linha = split_on_separators(line, ' ')
for x in linha:
if x == '0' or x =='1' or x == '2' or x == '3' or x == '4':
score = int(x)
continue
palavra = clean_up(x)
if palavra in temp:
for i in words.keys():
if i[0] == palavra:
words[(palavra, int(i[1]+1),int(i[2]+score))] = words[i]
del words[i]
break
else:
if palavra not in stopWords:
temp.append(palavra)
words[(palavra, 1, score)] = 0
arq.close()
for x in words.keys():
words[x] = x[2]/x[1]
return words
def readTestSet(fname):
reviews = []
arq = open(fname,'r')
for line in arq:
reviews.append((int(line[0]),clean_up(line[1:-1])))
arq.close()
return reviews
def computeSentiment(review,words):
''' Retorna o sentimento do comentario recebido como parametro.
O sentimento de um comentario e' a media dos escores de suas
palavras. Se uma palavra nao estiver no conjunto de palavras do
conjunto de treinamento, entao seu escore e' 2.
Review e' a parte textual de um comentario.
Words e' o dicionario com as palavras e seus escores medios no conjunto
de treinamento.
'''
score = 0.0
count = 0
s = split_on_separators(review,' ')
for i in s:
palavra = clean_up(i)
for p in words.keys():
if p[0] == palavra:
score += p[2]
count += 1
else:
score += 2
count += 1
return score/count
def computeSumSquaredErrors(reviews,words):
''' Computa a soma dos quadrados dos erros dos comentarios recebidos
como parametro. O sentimento de um comentario e' obtido com a
funcao computeSentiment.
Reviews e' um vetor de pares (escore,texto)
Words e' um dicionario com as palavras e seus escores medios no conjunto
de treinamento.
'''
sse = 0
final = 0
for line in reviews:
sentiment = computeSentiment(line[1],words)
if line[0] != sentiment:
diference = float(line[0]) - sentiment
result = diference**2
final += result
sse = final / len(reviews)
return sse
def main():
# Os arquivos sao passados como argumentos da linha de comando para o programa
# Voce deve buscar mais informacoes sobre o funcionamento disso (e' parte do
# projeto).
# A ordem dos parametros e' a seguinte: o primeiro e' o nome do arquivo
# com o conjunto de treinamento, em seguida o arquivo do conjunto de teste.
if len(sys.argv) < 3:
print ('Numero invalido de argumentos')
print ('O programa deve ser executado como python sentiment_analysis.py <arq-treino> <arq-teste>')
sys.exit(0)
# Lendo conjunto de treinamento e computando escore das palavras
words = readTrainingSet(sys.argv[1])
# Lendo conjunto de teste
reviews = readTestSet(sys.argv[2])
# Inferindo sentimento e computando soma dos quadrados dos erros
sse = computeSumSquaredErrors(reviews,words)
print( 'A soma do quadrado dos erros e\': {0}'.format(sse))
if __name__ == '__main__':
main()
| 35.855491 | 107 | 0.563759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,514 | 0.5665 |
3e8a5b0b6fc0612db9638f1736e52adef498431d | 37,129 | py | Python | morm/db.py | neurobin/python-morm | 2b6dcedc7090a9e642331300a24dfcca41ea1afe | [
"BSD-3-Clause"
] | 4 | 2021-03-12T16:36:24.000Z | 2022-03-06T09:26:14.000Z | morm/db.py | neurobin/python-morm | 2b6dcedc7090a9e642331300a24dfcca41ea1afe | [
"BSD-3-Clause"
] | null | null | null | morm/db.py | neurobin/python-morm | 2b6dcedc7090a9e642331300a24dfcca41ea1afe | [
"BSD-3-Clause"
] | null | null | null | """DB utilities.
"""
__author__ = 'Md Jahidul Hamid <jahidulhamid@yahoo.com>'
__copyright__ = 'Copyright © Md Jahidul Hamid <https://github.com/neurobin/>'
__license__ = '[BSD](http://www.opensource.org/licenses/bsd-license.php)'
__version__ = '0.1.0'
import collections
import re
import asyncio
import nest_asyncio # type: ignore
import atexit
import logging
import asyncpg # type: ignore
from asyncpg import Record, Connection # type: ignore
from typing import Optional, Dict, List, Tuple, TypeVar, Union, Any
from morm import exceptions
from morm.model import ModelType, Model, ModelBase, _FieldNames
from morm.q import Q
from morm.types import Void
LOGGER_NAME = 'morm.db-'
log = logging.getLogger(LOGGER_NAME)
nest_asyncio.apply()
def record_to_model(record: Record, model_class: ModelType) -> Model:
"""Convert a Record object to Model object.
Args:
record (Record): Record object.
model_class (ModelType): Model class
Returns:
Model: Model instance.
"""
new_record = model_class()
for k,v in record.items():
new_record.Meta._fromdb_.append(k)
setattr(new_record, k, v)
return new_record
class Pool(object):
"""Open database connection pool.
```python
from morm.db import Pool
DB_POOL = Pool(
dsn='postgres://',
host='localhost',
port=5432,
user='jahid', # change accordingly
password='jahid', # change accordingly
database='test', # change accordingly
min_size=10, # change accordingly
max_size=90, # change accordingly
)
```
This will create and open an asyncpg pool which will be automatically closed at exit.
You should set this in a settings file from where you can import the `DB_POOL`
Args:
dsn (str, optional): DSN string. Defaults to None.
min_size (int, optional): Minimum connection in the pool. Defaults to 10.
max_size (int, optional): Maximum connection in the pool. Defaults to 100.
max_queries (int, optional): Number of queries after a connection is closed and replaced with a new connection. Defaults to 50000.
max_inactive_connection_lifetime (float, optional): Number of seconds after which inactive connections in the pool will be closed. Pass `0` to disable this mechanism. Defaults to 300.0.
setup ([type], optional): A coroutine to prepare a connection right before it is returned from `Pool.acquire()`. Defaults to None.
init ([type], optional): A coroutine to initialize a connection when it is created. Defaults to None.
loop ([type], optional): Asyncio even loop instance. Defaults to None.
connection_class ([type], optional): The class to use for connections. Must be a subclass of `asyncpg.connection.Connection`. Defaults to asyncpg.connection.Connection.
"""
def __init__(self, dsn: str = None,
min_size: int = 10,
max_size: int = 100,
max_queries: int = 50000,
max_inactive_connection_lifetime: float = 300.0,
setup=None,
init=None,
loop=None,
connection_class=Connection,
**connect_kwargs):
self.dsn = dsn
self.min_size = min_size
self.max_size = max_size
self.max_queries = max_queries
self.max_inactive_connection_lifetime = max_inactive_connection_lifetime
self.setup = setup
self.init = init
self.loop = loop
self.connection_class = connection_class
self.connect_kwargs = connect_kwargs
self._pool = None
self._open()
atexit.register(self._close)
@property
def pool(self) -> asyncpg.pool.Pool:
"""Property pool that is an asyncpg.pool.Pool object
"""
return self._pool
async def __create_pool(self) -> asyncpg.pool.Pool:
"""Create a asyncpg.pool.Pool for this Pool object.
Returns:
asyncpg.pool.Pool: Pool object (singleton)
"""
return await asyncpg.create_pool(
dsn=self.dsn,
min_size=self.min_size,
max_size=self.max_size,
max_queries=self.max_queries,
max_inactive_connection_lifetime=self.max_inactive_connection_lifetime,
setup=self.setup,
init=self.init,
loop=self.loop,
connection_class=self.connection_class,
**self.connect_kwargs)
def _open(self):
"""Open the pool. Called on init so not need to call this
method explicitly.
"""
if not self._pool:
self._pool = asyncio.get_event_loop().run_until_complete(self.__create_pool())
log.debug("Pool opened")
def _close(self):
"""Attempt to close the pool gracefully. registered with atexit.
You do not need to call this method explicitly.
"""
if self._pool:
asyncio.get_event_loop().run_until_complete(self._pool.close())
self._pool = None
log.debug("Pool closed")
class DB(object):
"""Initialize a DB object setting a pool to get connection from.
If connection is given, it is used instead.
The `corp()` method returns an asyncpg.pool.Pool or an
asyncpg.Connection
Args:
pool (Pool): A connection pool
con (Connection): Connection. Defaults to None.
"""
def __init__(self, pool: Pool, con: Connection=None):
self._pool = pool
self._con = con
self.DATA_NO_CHANGE = 'DATA_NO_CHANGE_TRIGGERED'
def corp(self) -> Union[asyncpg.pool.Pool, Connection]:
"""Return the connection if available, otherwise return a Pool.
Note: The name reads 'c or p'
Returns:
asyncpg.Connection or asyncpg.pool.Pool object
"""
if self._con:
return self._con
return self._pool.pool
async def fetch(self, query: str, *args,
timeout: float = None,
model_class: ModelType=None
) -> Union[List[ModelBase], List[Record]]:
"""Make a query and get the results.
Resultant records can be mapped to model_class objects.
Args:
query (str): Query string.
args (*list or *tuple): Query arguments.
timeout (float, optional): Timeout value. Defaults to None.
model_class (Model, optional): Defaults to None.
Returns:
List[Model] or List[Record] : List of model instances if model_class is given, otherwise list of Record instances.
"""
pool = self.corp()
records = await pool.fetch(query, *args, timeout=timeout)
if not model_class:
return records
else:
new_records = []
for record in records:
new_record = record_to_model(record, model_class)
new_records.append(new_record)
return new_records
async def fetchrow(self, query: str, *args,
timeout: float = None,
model_class: ModelType=None
) -> Union[ModelBase, Record]:
"""Make a query and get the first row.
Resultant record can be mapped to model_class objects.
Args:
query (str): Query string.
args (*list or *tuple): Query arguments.
timeout (float, optional): Timeout value. Defaults to None.
model_class (Model, optional): Defaults to None.
Returns:
Record or model_clas object or None if no rows were selected.
"""
pool = self.corp()
record = await pool.fetchrow(query, *args, timeout=timeout)
if not model_class:
return record
else:
if not record:
return record
new_record = record_to_model(record, model_class)
return new_record
async def fetchval(self, query: str, *args,
column: int = 0,
timeout: float = None
) -> Any:
"""Run a query and return a column value in the first row.
Args:
query (str): Query to run.
args (*list or *tuple): Query arguments.
column (int, optional): Column index. Defaults to 0.
timeout (float, optional): Timeout. Defaults to None.
Returns:
Any: Coulmn (indentified by index) value of first row.
"""
pool = self.corp()
return await pool.fetchval(query, *args, column=column, timeout=timeout)
async def execute(self, query: str, *args,
timeout: float = None
) -> str:
"""Execute a query.
Args:
query (str): Query to run.
args (*list or *tuple): Query arguments.
timeout (float, optional): Timeout. Defaults to None.
Returns:
str: Status of the last SQL command
"""
pool = self.corp()
return await pool.execute(query, *args, timeout=timeout)
def get_insert_query(self, mob: ModelBase, reset=False) -> Tuple[str, List[Any]]:
"""Get insert query for the model object (mob) with its current data
Args:
mob (ModelBase): Model object
reset (bool): Reset the value change counter. Defaults to False
Returns:
(str, list): query, args
"""
data = mob.Meta._fields_
new_data_gen = mob.__class__._get_FieldValue_data_valid_(data, up=True)
columns = []
values = []
markers = []
c = 0
for n,v in new_data_gen:
c += 1
if reset:
v.value_change_count = 0
mob.Meta._fromdb_ = []
columns.append(n)
values.append(v.value)
markers.append(f'${c}')
column_q = '","'.join(columns)
if column_q:
column_q = f'"{column_q}"'
marker_q = ', '.join(markers)
query = f'INSERT INTO "{mob.__class__._get_db_table_()}" ({column_q}) VALUES ({marker_q}) RETURNING "{mob.__class__._get_pk_()}"'
else:
query = ''
return query, values
def get_update_query(self, mob: ModelBase, reset=False) -> Tuple[str, List[Any]]:
"""Get the update query for the changed data in the model object (mob)
Args:
mob (ModelBase): Model object
reset (bool): If True, this method can be called just once to get the changes done on mob. Subsequent call will return empty query.
Raises:
AttributeError: If primary key does not exists i.e if not updatable
Returns:
str, args: tuple of query, args
"""
pkval = getattr(mob, mob.__class__._get_pk_()) #save method depends on it's AttributeError
data = mob.Meta._fields_
new_data_gen = mob.__class__._get_FieldValue_data_valid_(data, up=True)
colval = []
values = []
c = 0
for n,v in new_data_gen:
if n == mob.__class__._get_pk_(): continue
if v.value_change_count > 0:
c += 1
colval.append(f'"{n}"=${c}')
values.append(v.value)
if reset:
v.value_change_count = 0
colval_q = ', '.join(colval)
if colval_q:
where = f'"{mob.__class__._get_pk_()}"=${c+1}'
values.append(pkval)
query = f'UPDATE "{mob.__class__._get_db_table_()}" SET {colval_q} WHERE {where}'
else:
query = ''
return query, values
def get_delete_query(self, mob: ModelBase) -> Tuple[str, List[Any]]:
"""Get the delete query for the model object.
Args:
mob (ModelBase): model object.
Returns:
Tuple[str, List[Any]]: quey, args
"""
pkval = getattr(mob, mob.__class__._get_pk_())
query = f'DELETE FROM "{mob.__class__._get_db_table_()}" WHERE "{mob.__class__._get_pk_()}"=$1'
return query, [pkval]
async def delete(self, mob: ModelBase, timeout: float = None) -> str:
"""Delete the model object data from database.
Args:
mob (ModelBase): Model object
timeout (float): timeout value. Defaults to None.
Returns:
(str): status of last sql command.
"""
query, args = self.get_delete_query(mob)
await mob._pre_delete_(self)
res = await self.execute(query, *args, timeout=timeout)
await mob._post_delete_(self)
return res
async def insert(self, mob: ModelBase, timeout: float = None) -> Any:
"""Insert the current data state of mob into db.
Args:
mob (ModelBase): Model object
timeout (float): timeout value. Defaults to None.
Returns:
(Any): Value of primary key of the inserted row
"""
query, args = self.get_insert_query(mob, reset=True)
await mob._pre_insert_(self)
pkval = await self.fetchval(query, *args, timeout=timeout)
if pkval is not None:
setattr(mob, mob.__class__._get_pk_(), pkval)
await mob._post_insert_(self)
return pkval
async def update(self, mob: ModelBase, timeout: float = None) -> str:
"""Update the current changed data of mob onto db
Args:
mob (ModelBase): Model object
timeout (float): timeout value. Defaults to None.
Raises:
AttributeError: If primary key does not exists.
Returns:
str: status of last sql command.
Successful status starts with the word 'UPDATE' followed by
number of rows updated, which should be 1 in this case.
"""
query, args = self.get_update_query(mob, reset=True)
if query:
await mob._pre_update_(self)
res = await self.execute(query, *args, timeout=timeout)
await mob._post_update_(self)
else:
res = self.DATA_NO_CHANGE
return res
async def save(self, mob: ModelBase, timeout: float = None) -> Union[str, Any]:
"""Insert if not exists and update if exists.
update is tried first, if fails (if pk does not exist), insert
is called.
Args:
mob (ModelBase): Model object
timeout (float): timeout value. Defaults to None.
Returns:
int or str: The value of the primary key for insert or
status for update.
"""
await mob._pre_save_(self)
try:
res = await self.update(mob, timeout=timeout)
except AttributeError:
res = await self.insert(mob, timeout=timeout)
await mob._post_save_(self)
return res
def q(self, model: ModelType = None) -> 'ModelQuery':
"""Return a ModelQuery for model
If `None` is passed, it will give a `ModelQuery` without setting
`self.model` on the `ModelQuery` object.
Args:
model (ModelType, optional): model class. Defaults to None.
Raises:
TypeError: If invalid model type is passed
Returns:
ModelQuery: ModelQuery object
"""
return self(model)
def __call__(self, model: ModelType = None) -> 'ModelQuery':
"""Return a ModelQuery for model
If `None` is passed, it will give a `ModelQuery` without setting
`self.model` on the `ModelQuery` object.
Args:
model (ModelType, optional): model class. Defaults to None.
Raises:
TypeError: If invalid model type is passed
Returns:
ModelQuery: ModelQuery object
"""
if isinstance(model, ModelType) or model is None:
return ModelQuery(self, model)
raise TypeError(f"Invalid model: {model}. model must be of type {ModelType.__name__}. Make sure you did not pass a model object by mistake.")
class ModelQuery():
"""Query builder for model class.
Calling `db(Model)` gives you a model query handler which have several query methods to help you make queries.
Use `q(query, *args)` method to make queries with positional arguments. If you want named arguments, use the uderscored version of these methods. For example, `q(query, *args)` has an underscored version `q_(query, *args, **kwargs)` that can take named arguments.
You can add a long query part by part:
```python
from morm.db import DB
db = DB(DB_POOL) # get a db handle.
qh = db(User) # get a query handle.
query, args = qh.q(f'SELECT * FROM {qh.db_table}')\
.q(f'WHERE {qh.f.profession} = ${qh.c}', 'Teacher')\
.q_(f'AND {qh.f.age} = :age', age=30)\
.getq()
print(query, args)
# fetch:
await qh.fetch()
```
The `q` family of methods (`q, qc, qu etc..`) can be used to
build a query step by step. These methods can be chained
together to break down the query building in multiple steps.
Several properties are available to get information of the model
such as:
1. `qh.db_table`: Quoted table name e.g `"my_user_table"`.
2. `qh.pk`: Quoted primary key name e.g `"id"`.
3. `qh.ordering`: ordering e.g `"price" ASC, "quantity" DESC`.
4. `qh.f.<field_name>`: quoted field names e.g`"profession"`.
5. `qh.c`: Current available position for positional argument (Instead of hardcoded `$1`, `$2`, use `f'${qh.c}'`, `f'${qh.c+1}'`).
`qh.c` is a counter that gives an integer representing the
last existing argument position plus 1.
`reset()` can be called to reset the query to start a new.
To execute a query, you need to run one of the execution methods
: `fetch, fetchrow, fetchval, execute`.
**Notable convenience methods:**
* `qupdate(data)`: Initialize a update query for data
* `qfilter()`: Initialize a filter query upto WHERE clasue.
* `get(pkval)`: Get an item by primary key.
Args:
db (DB): DB object
model_class (ModelType): model
"""
def __init__(self, db: DB, model_class: ModelType = None):
self.reset()
self.db = db
self.model = model_class # can be None
def func(k):
return Q(model_class._check_field_name_(k))
self._f = _FieldNames(func) # no reset
def __repr__(self):
return f'ModelQuery({self.db}, {self.model})'
def reset(self) -> 'ModelQuery':
"""Reset the model query by returning it to its initial state.
Returns:
self (Enables method chaining)
"""
self._query_str_queue: List[str] = []
self.end_query_str = ''
self.start_query_str = ''
self._args: List[Any] = []
self._arg_count = 0
self._named_args: Dict[str, Any] = {}
self._named_args_mapper: Dict[str, int] = {}
self.__filter_initiated = False
self._ordering = ''
self.__update_initiated = False
return self
@property
def c(self) -> int:
"""Current available argument position in the query
arg_count + 1 i.e if $1 and $2 has been used so far, then
self.c is 3 so that you can use it to make $3.
Returns:
int
"""
return self._arg_count + 1
@property
def db_table(self) -> str:
"""Table name of the model (quoted)
"""
return Q(self.model._get_db_table_()) #type: ignore
@property
def pk(self) -> str:
"""Primary key name (quoted)
"""
return Q(self.model._get_pk_()) #type: ignore
@property
def ordering(self) -> str:
"""Ordering query in SQL, does not include `ORDER BY`.
Example: `"price" ASC, "quantity" DESC`
"""
if not self._ordering:
self._ordering = ','.join([' '.join(y) for y in self.model._get_ordering_(quote='"')]) # type: ignore
return self._ordering
@property
def f(self) -> _FieldNames:
"""Field name container where names are quoted.
It can be used to avoid spelling mistakes in writing query.
Example: query `'select "profesion" from "table_name"'`
will only produce error after actually running the query against
a correctly spelled column 'profession'.
while,
query `f'select {self.f.profesion} from {self.db_table}'`
will throw python exception telling you that there is no
misspelled 'profesion' field.
Note: you have to change `self` in above to the current
`ModelQuery` instance
"""
return self._f
def _process_positional_args(self, *args):
if args:
self._args.extend(args)
self._arg_count += len(args)
def _process_keyword_args(self, q: str, **kwargs) -> str:
# TODO: improvents need to be done
# 1. needs to handle only unquoted keyword :field_name
# and ignore ':field_name' or ":field_name"
self._named_args.update(kwargs)
for k,v in self._named_args.items():
if k in self._named_args_mapper:
q, mc = re.subn(f':{k}\\b', f'${self._named_args_mapper[k]}', q)
else:
q, mc = re.subn(f':{k}\\b', f'${self._arg_count+1}', q)
if mc > 0:
self._args.append(v)
self._arg_count += 1
self._named_args_mapper[k] = self._arg_count
return q
def q(self, q: str, *args: Any) -> 'ModelQuery':
"""Add raw query stub without parsing to check for keyword arguments
Use `$1`, `$2` etc. for arguments.
Use `self.c` (instance property, use fstring) to get the current
available argument position.
This is an efficient way to add query that do not have any
keyword arguments to handle, compared to `q_()` which checks for
keyword arguments everytime it is called.
Example:
```python
mq = db(SomeModel)
mq\
.q('SELECT * FROM "table" WHERE $1', True)\
.q('AND "price" >= $2', 33)\
.q(f'OR "price" = ${mq.c}', 0) # mq.c=3 (now)\
.q_('OR "status" = :status', status='OK')\
# :status is $4:
.q('OR "active" = $5', 0)\
.q_('AND "status" = :status')\
# status='OK' from previous call
.q('OR "price" = $2')\
# $2=33 from previous call
#using format string and mq.c to get the argument position:
.q(f'OR "price" > ${mq.c} OR "quantity" > ${mq.c+1}', 12, 3)
# mq.c=6 ^
```
Args:
q (str): raw query string
*args (Any): positional arguments
Returns:
ModelQuery: self, enables method chaining.
"""
self._process_positional_args(*args)
self._query_str_queue.append(q)
return self
def q_(self, q: str, *args, **kwargs) -> 'ModelQuery':
"""Add a query stub having keyword params.
Use the format `:field_name` for keyword parameter.
`:field_name` is converted to positional parameter (`$n`).
This method checks the query against all keyword arguments
that has been added so far with other `q*()` methods.
Args:
q (str): query string (SQL)
Returns:
ModelQuery: returns `self` to enable method chaining
"""
self._process_positional_args(*args)
q = self._process_keyword_args(q, **kwargs)
self._query_str_queue.append(q)
return self
def qq(self, word: str) -> 'ModelQuery':
"""Quote and add a word to the query.
Enable to add names with auto-quote. For example, if the name
for a field value is `status`, it can be added to the query
with auto-quoting, i.e for postgresql it will be added
as `"status"`.
Example:
```python
.qq('price').q('>= $1',34)
```
Args:
word (str): the word that needs to be added with quote.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
if word:
self._query_str_queue.append(Q(word))
return self
def qc(self, word: str, rest: str, *args) -> 'ModelQuery':
"""Add query by quoting `word` while adding the `rest` as is.
This is a shorthand for making where clause conditions.
For example: `qc('price', '>=$1', 34)` is a safe way to write
a where condition like: `"price" >=34`.
The same can be achieved by using a combination of
`qq()` and `q()` or manually quoting and using
with `q()`
Example:
```python
.qc('price', '>= $1', 34)
```
Args:
word (str): left part of query that needs to be quoted
rest (str): right part of query that does not need to be quoted
*args (any): args
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.qq(word).q(rest, *args)
def qc_(self, word: str, rest: str, *args, **kwargs) -> 'ModelQuery':
"""Add query by quoting `word` while adding the `rest` as is.
Same as `qc()` except this method parses the `rest` query string
for keyword params in the format: `:field_name`
Args:
word (str): left part of query that needs to be quoted
rest (str): right part of query that does not need to be quoted
*args (any): args
*kwargs: keyword args
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.qq(word).q_(rest, *args, **kwargs)
def qorder(self):
"""Add ORDER BY
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.q('ORDER BY')
def qo(self, order: str) -> 'ModelQuery':
"""Convert `+/-field_name,` to proper order_by criteria and add to query.
Example: `-field_name,` will become: `"field_name" DESC,`
* `+` at beginning means ascending order (default)
* `-` at beginning means descending order
* `,` at end means you will add more order criteria
Ommit the comma (`,`) when it is the last ordering criteria.
Args:
order (str): order criteria in the format `+/-field_name,`
Returns:
ModelQuery: returns `self` to enable method chaining
"""
direction = 'ASC'
if order.startswith('-'):
order = order[1:]
direction = 'DESC'
elif order.startswith('+'):
order = order[1:]
if order.endswith(','):
order = order[0:-1]
direction += ','
return self.qq(order).q(direction)
def qu(self, data: dict) -> 'ModelQuery':
"""Convert data to `"column"=$n` query with args as the
values and add to the main query.
The counter of positional arguments increases by the number of
items in `data`. Make use of `self.c` counter to add more
queries after using this method.
Args:
data (dict): data in format: `{'column': value}`
Returns:
ModelQuery: returns `self` to enable method chaining
"""
setq = ', '.join([f'"{c}"=${i}' for i,c in enumerate(data, self.c)])
return self.q(setq, *data.values())
def qreturning(self, *column_names) -> 'ModelQuery':
"""Convenience to add a `RETURNING` clause.
Args:
column_names: column names.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
q = '","'.join(column_names)
if q:
q = f'RETURNING "{q}"'
return self.q(q)
def qwhere(self) -> 'ModelQuery':
"""Convenience to add 'WHERE' to the main query.
Make use of `qc()` method to add conditions.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.q('WHERE')
def qfilter(self, no_ordering=False) -> 'ModelQuery':
"""Initiate a filter.
This initiates a `SELECT` query upto `WHERE`. You can then use the
`q()`, `qc()`, etc. methods to add conditions and finally
execute the `fetch()` method to get all results or execute the
`fetchrow()` method to get a single row.
Example:
```python
.qfilter().q('"price" >= $1 AND "status" = $2', 32.12, 'OK')
```
Args:
no_ordering (bool): Whether to remove the default ordering SQL. Defaults to False.
Returns:
ModelQuery: returns self to enable method chaining
"""
if not self.__filter_initiated:
down_fields = ','.join([Q(x) for x in self.model._get_fields_(up=False)]) #type: ignore
self.reset().q(f'SELECT {down_fields} FROM "{self.model._get_db_table_()}" WHERE') #type: ignore
self.__filter_initiated = True
order_by = self.ordering
if order_by and not no_ordering:
self.end_query_str = f'ORDER BY {order_by}'
else:
raise ValueError(f"Filter is already initiated for this {self.__class__.__name__} query object: {self}")
return self
def qupdate(self, data: dict) -> 'ModelQuery':
"""Initiate a UPDATE query for data.
This initiates an `UPDATE` query upto `WHERE` and leaves you to
add conditions with other methods such as `qc` or the generic
method `q()`.
Finally call the `execute()` method to execute the query or
call the `fetchval()` method if using `RETURNING` clause.
Args:
data (dict): data in key value dictionary
Returns:
ModelQuery: returns `self` to enable method chaining
"""
if not self.__update_initiated:
self.reset().q(f'UPDATE {self.db_table} SET').qu(data).qwhere()
self.__update_initiated = True
else:
raise ValueError(f"update is already initiated for this {self.__class__.__name__} query: {self}")
return self
def getq(self) -> Tuple[str, List[Any]]:
"""Return query string and arg list
Returns:
tuple: (str, list) : (query, args)
"""
query = ' '.join(self._query_str_queue)
self._query_str_queue = [query]
query = f'{self.start_query_str} {query} {self.end_query_str}'
return query, self._args
async def fetch(self, timeout: float = None) -> Union[List[ModelBase], List[Record]]:
"""Run query method `fetch` that returns the results in model class objects
Returns the results in model class objects.
Args:
timeout (float, optional): Timeout in seconds. Defaults to None.
Returns:
List[Model]: List of model instances.
"""
query, args = self.getq()
return await self.db.fetch(query, *args, timeout=timeout, model_class=self.model)
async def fetchrow(self, timeout: float = None) -> Union[ModelBase, Record]:
"""Make a query and get the first row.
Resultant record is mapped to model_class object.
Args:
timeout (float, optional): Timeout value. Defaults to None.
Returns:
model_clas object or None if no rows were selected.
"""
query, args = self.getq()
return await self.db.fetchrow(query, *args, timeout=timeout, model_class=self.model)
async def fetchval(self, column: int = 0, timeout: float = None) -> Any:
"""Run the query and return a column value in the first row.
Args:
column (int, optional): Column index. Defaults to 0.
timeout (float, optional): Timeout. Defaults to None.
Returns:
Any: Coulmn (indentified by index) value of first row.
"""
query, args = self.getq()
return await self.db.fetchval(query, *args, column=column, timeout=timeout)
async def execute(self, timeout: float = None) -> str:
"""Execute the query.
Args:
timeout (float, optional): Timeout. Defaults to None.
Returns:
str: Status of the last SQL command
"""
query, args = self.getq()
return await self.db.execute(query, *args, timeout=timeout)
async def get(self, *vals: Any, col: str = '', comp: str = '=$1') -> Union[ModelBase, Record]:
"""Get the first row found by column and value.
If `col` is not given, it defaults to the primary key (`pk`) of
the model.
If comparison is not given, it defaults to `=$1`
Example:
```python
from morm.db import DB
db = DB(DB_POOL) # get a db handle.
# get by pk:
user5 = await db(User).get(5)
# price between 5 and 2000
user = await db(User).get(5, 2000, col='price', comp='BETWEEN $1 AND $2')
```
Args:
*vals (any): Values to compare. Must be referenced with $1, $2 etc.. in `comp`.
col (str, optional): Column name. Defaults to the primary key.
comp (str, optional): Comparison. Defaults to '=$1'.
Returns:
model_clas object or None if no rows were selected.
"""
if not col:
col = self.model.Meta.pk #type: ignore
return await self.reset().qfilter().qc(col, comp, *vals).fetchrow()
SERIALIZABLE = 'serializable'
REPEATABLE_READ = 'repeatable_read'
READ_COMMITTED = 'read_committed'
class Transaction():
"""Start a transaction.
Example:
```python
from morm.db import Transaction
async with Transaction(DB_POOL) as tdb:
# use tdb just like you use db
user6 = await tdb(User).get(6)
user6.age = 34
await tdb.save(user6)
user5 = await tdb(User).get(5)
user5.age = 34
await tdb.save(user6)
```
Args:
pool (Pool): Pool instance.
isolation (str, optional): Transaction isolation mode, can be one of:
'serializable',
'repeatable_read',
'read_committed'.
Defaults to 'read_committed'.
See https://www.postgresql.org/docs/9.5/transaction-iso.html
readonly (bool, optional): Specifies whether this transaction is read-only. Defaults to False.
deferrable (bool, optional): Specifies whether this transaction is deferrable. Defaults to False.
"""
def __init__(self, pool: Pool, *,
isolation: str=READ_COMMITTED,
readonly: bool=False,
deferrable: bool=False):
self._pool = pool
self.db = DB(None) # type: ignore
self.tr = None
self.tr_args = {
'isolation': isolation,
'readonly': readonly,
'deferrable': deferrable,
}
async def __aenter__(self) -> DB:
return await self.start()
async def start(self) -> DB:
"""Start transaction.
Raises:
exceptions.TransactionError: When same object is used simultaneously for transaction
Returns:
DB: DB object.
"""
if self.db._con:
raise exceptions.TransactionError('Another transaction is running (or not ended properly) with this Transaction object')
self.db._con = await self._pool.pool.acquire() # type: ignore
self.tr = self.db._con.transaction(**self.tr_args) # type: ignore
await self.tr.start() # type: ignore
return self.db
async def rollback(self):
"""Rollback the transaction.
"""
if self.tr:
await self.tr.rollback()
async def commit(self):
"""Commit the transaction.
"""
if self.tr:
await self.tr.commit()
async def end(self):
"""Close the transaction gracefully.
Resources are released and some cleanups are done.
"""
try:
if self.db._con:
await self._pool.pool.release(self.db._con)
finally:
self.db._con = None
self.tr = None
async def __aexit__(self, extype, ex, tb):
try:
if extype is not None:
await self.rollback()
else:
await self.commit()
finally:
await self.end()
| 33.969808 | 267 | 0.577177 | 35,843 | 0.965338 | 0 | 0 | 1,748 | 0.047078 | 11,281 | 0.303824 | 22,113 | 0.595556 |
3e8b7eee7855784a75f5858aea2cd7099da89f3d | 4,197 | py | Python | gistsig/cli.py | derekmerck/check-hashes | aaa7d596281e41bbb5b73850c5d43113b7d0632b | [
"MIT"
] | 1 | 2019-01-26T22:33:02.000Z | 2019-01-26T22:33:02.000Z | gistsig/cli.py | derekmerck/check-hashes | aaa7d596281e41bbb5b73850c5d43113b7d0632b | [
"MIT"
] | null | null | null | gistsig/cli.py | derekmerck/check-hashes | aaa7d596281e41bbb5b73850c5d43113b7d0632b | [
"MIT"
] | null | null | null | """
gistsig
Derek Merck
Winter 2019
Sign and verify Python packages using public gists.
"""
import logging
from pprint import pformat
from datetime import datetime
import click
from . import get_gist, update_gist
from . import get_pkg_info, get_pkg_gist
@click.group()
@click.option('--verbose', '-v', is_flag=True, default=False)
@click.option('--gist_id', '-g', help="Public gist id with reference signatures.")
@click.option('--gist_oauth_tok', '-o', help="Github token (only if pushing new signatures)")
@click.pass_context
def cli(ctx, verbose, gist_id, gist_oauth_tok):
"""
Perform a simple public signature lookup to verify local Python package
files.
\b
Example:
$ gistsig -g 4b0bfbca0a415655d97f36489629e1cc show diana
Local package has signature python-diana:2.0.13:9fec66ac3f4f87f8b933c853d8d5f49bdae0c1dc
"""
ctx.obj['gist_id'] = gist_id
ctx.obj['gist_oauth_tok'] = gist_oauth_tok
if verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.ERROR)
pass
@cli.command()
@click.argument('packages', nargs=-1)
def show(packages):
"""Compute local package signature."""
for pkg_name in packages:
key, value = get_pkg_info(pkg_name)
msg = click.style("Local package has signature {}:{}.".format(key, value), fg='yellow')
click.echo(msg)
def find_gist_id(pkg_name):
# Check the package
gist_id = get_pkg_gist(pkg_name)
if gist_id and \
click.confirm("No reference gist set, use package declared gist? ({})".format(gist_id)):
return gist_id
click.echo("No gist found for this package")
return None
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def pull(ctx, packages):
"""Show public package signatures."""
gist_id = ctx.obj['gist_id']
for pkg_name in packages:
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
msg = click.style("Reference package has signatures:", fg='yellow')
click.echo(msg)
click.echo(pformat(pkg_sigs))
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def verify(ctx, packages):
"""Compare local to public package signatures."""
exit_code = 0
gist_id = ctx.obj['gist_id']
for pkg_name in packages:
key, value = get_pkg_info(pkg_name)
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
ref = None
if pkg_sigs:
entry = pkg_sigs.get(key)
if entry:
ref = entry.get('hash')
if value != ref:
msg = click.style("Package signature {}:{} is not valid.".format(key, value), fg='red')
click.echo(msg)
exit_code = 1
else:
msg = click.style("Package signature {}:{} is valid.".format(key, value), fg="green")
click.echo(msg)
exit(exit_code)
@click.command()
@click.argument("packages", nargs=-1)
@click.pass_context
def push(ctx, packages):
"""Update public package signatures"""
gist_id = ctx.obj['gist_id']
gist_oauth_tok = ctx.obj['gist_oauth_tok']
if not gist_oauth_tok:
click.echo("Need a gist oauth token to push data. Set with envvar or on the cli.")
exit(1)
for pkg_name in packages:
if not gist_id:
gist_id = find_gist_id(pkg_name)
pkg_sigs = get_gist(gist_id=gist_id, name=pkg_name)
logging.debug("Found pkg keys:")
logging.debug(pformat(pkg_sigs))
key, value = get_pkg_info(pkg_name)
click.echo("Submitting signature {}:{}".format(key, value))
pkg_sigs[key] = { "hash": value,
"time": datetime.now().isoformat() }
update_gist(oauth_tok=gist_oauth_tok, gist_id=gist_id,
name=pkg_name, content=pkg_sigs)
def _cli():
cli.add_command(show)
cli.add_command(pull)
cli.add_command(verify)
cli.add_command(push)
cli(auto_envvar_prefix="GISTSIG", obj={})
if __name__ == "__main__":
_cli()
| 27.794702 | 99 | 0.646414 | 0 | 0 | 0 | 0 | 3,424 | 0.815821 | 0 | 0 | 1,210 | 0.288301 |
3e8c2e49f52c5a966e053c091e7e268d680d58d4 | 2,397 | py | Python | cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py | mostafaelaraby/cvxpy | 078e025be8b8315b5f579bd0209e8e3a1e2a2a19 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-09-24T12:59:45.000Z | 2021-09-24T13:00:08.000Z | cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py | mostafaelaraby/cvxpy | 078e025be8b8315b5f579bd0209e8e3a1e2a2a19 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py | mostafaelaraby/cvxpy | 078e025be8b8315b5f579bd0209e8e3a1e2a2a19 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-12T05:17:18.000Z | 2020-04-12T05:17:18.000Z | """
Copyright 2018 Riley Murray
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import cvxpy.settings as s
from cvxpy.reductions.solvers.conic_solvers.conic_solver import ConicSolver
from cvxpy.reductions.solvers.conic_solvers.scs_conif import dims_to_solver_dict, SCS
class SuperSCS(SCS):
DEFAULT_SETTINGS = {'use_indirect': False, 'eps': 1e-8, 'max_iters': 10000}
def name(self):
return s.SUPER_SCS
def import_solver(self):
import superscs
superscs # For flake8
def solve_via_data(self, data, warm_start, verbose, solver_opts, solver_cache=None):
"""Returns the result of the call to SuperSCS.
Parameters
----------
data : dict
Data generated via an apply call.
warm_start : Bool
Whether to warm_start SuperSCS.
verbose : Bool
Control the verbosity.
solver_opts : dict
SuperSCS-specific options.
Returns
-------
The result returned by a call to superscs.solve().
"""
import superscs
args = {"A": data[s.A], "b": data[s.B], "c": data[s.C]}
if warm_start and solver_cache is not None and \
self.name in solver_cache:
args["x"] = solver_cache[self.name()]["x"]
args["y"] = solver_cache[self.name()]["y"]
args["s"] = solver_cache[self.name()]["s"]
cones = dims_to_solver_dict(data[ConicSolver.DIMS])
# settings
user_opts = list(solver_opts.keys())
for k in list(SuperSCS.DEFAULT_SETTINGS.keys()):
if k not in user_opts:
solver_opts[k] = SuperSCS.DEFAULT_SETTINGS[k]
results = superscs.solve(
args,
cones,
verbose=verbose,
**solver_opts)
if solver_cache is not None:
solver_cache[self.name()] = results
return results
| 33.291667 | 88 | 0.635378 | 1,643 | 0.68544 | 0 | 0 | 0 | 0 | 0 | 0 | 1,088 | 0.453901 |
3e8fb96193b2244d64a924fa63c9c59dfafd9741 | 557 | py | Python | Software_University/fundamentals/functions/lecture/orders.py | Ivanazzz/SoftUni-W3resource-Python | 892321a290e22a91ff2ac2fef5316179a93f2f17 | [
"MIT"
] | 1 | 2022-01-26T07:38:11.000Z | 2022-01-26T07:38:11.000Z | Software_University/fundamentals/functions/lecture/orders.py | Ivanazzz/SoftUni-W3resource-Python | 892321a290e22a91ff2ac2fef5316179a93f2f17 | [
"MIT"
] | null | null | null | Software_University/fundamentals/functions/lecture/orders.py | Ivanazzz/SoftUni-W3resource-Python | 892321a290e22a91ff2ac2fef5316179a93f2f17 | [
"MIT"
] | null | null | null | product_type = input("Enter the product type(coffee, water, coke, snacks): ")
quantity = int(input("Enter the quantity: "))
def price():
if product_type == "coffee":
total_price = quantity * 1.50
return total_price
elif product_type == "water":
total_price = quantity * 1.00
return total_price
elif product_type == "coke":
total_price = quantity * 1.40
return total_price
elif product_type == "snacks":
total_price = quantity * 2.00
return total_price
print(f"{price():.2f}") | 30.944444 | 77 | 0.626571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 122 | 0.219031 |
e40f115d7100a36cb4b801ec2f9f1a7a1eb33d05 | 4,984 | py | Python | linear_model.py | gavb222/flatpanel-localize | 6504eb94379f5df268ae280f996c7dd66f063e4e | [
"MIT"
] | 1 | 2021-02-01T18:17:11.000Z | 2021-02-01T18:17:11.000Z | linear_model.py | gavb222/flatpanel-localize | 6504eb94379f5df268ae280f996c7dd66f063e4e | [
"MIT"
] | null | null | null | linear_model.py | gavb222/flatpanel-localize | 6504eb94379f5df268ae280f996c7dd66f063e4e | [
"MIT"
] | 1 | 2021-02-01T18:07:12.000Z | 2021-02-01T18:07:12.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import time
import random
import matlab.engine
def gaussian(spread):
#spread controls the size of the array
linspace = torch.linspace(-2.5,2.5,spread)
# gaussian = e^((-x)^2/2) when standard dev is 1 and height is 1
linspace = torch.exp(-1 * torch.div(torch.pow(linspace,2),2))
out_x = linspace.expand(spread,spread)
out_y = out_x.permute(1,0)
out_gaussian = out_x * out_y
return out_gaussian
#panel_x, panel_y = panel dimensions
#n_freq = n frequency bins
#x, y = top left of gaussian
#spread = spread of the gaussian
#NB that x-spread > 0, y-spread > 0, x+spread < panel_x, y+spread < panel_y
def produce_freq_response(panel_x, panel_y, n_freq, x, y, spread, expand_dim=False):
#TODO: change these to return errors
if x+spread > panel_x-1:
return torch.ones(panel_x,panel_y)*-1
elif y+spread > panel_y-1:
return torch.ones(panel_x,panel_y)*-1
response = gaussian(spread)
#response.size is (dim,dim)
#n_gaussian_elems = response.size()[0]
#pad response with zeros until its the size we want
#n = math.floor(n_gaussian_elems/2)
#pad of x starting from 0,
#pad_left = torch.zeros((x-n),n_gaussian_elems)
#pad_right = torch.zeros((panel_x-(x+n))-1,n_gaussian_elems)
#pad_top = torch.zeros(panel_x,(y-n))
#pad_bottom = torch.zeros(panel_x,(panel_y-(y+n))-1)
#response = torch.cat((pad_left,response), dim=0)
#response = torch.cat((response,pad_right), dim=0)
#response = torch.cat((pad_top,response), dim=1)
#response = torch.cat((response,pad_bottom), dim=1)
out_array = torch.zeros(panel_x,panel_y)
out_array[x:x+spread,y:y+spread] = response
if expand_dim:
out_array = out_array.expand(n_freq,panel_x,panel_y)
return out_array
class Conv_Block(nn.Module):
def __init__(self, input_size, output_size, kernel_size=4, stride=2, padding=1,activation=True):
super(Conv_Block, self).__init__()
self.conv = nn.Conv2d(input_size, output_size, kernel_size, stride, padding)
self.activation = activation
def forward(self, x):
if self.activation:
out = self.conv(F.relu(x))
else:
out = self.conv(x)
return out
class Conv_Net(nn.Module):
def __init__(self, input_channels, n_filters, output_channels):
super(Conv_Net, self).__init__()
self.conv1 = Conv_Block(input_channels, n_filters, activation=False)
self.conv2 = Conv_Block(n_filters, n_filters * 2)
self.conv3 = Conv_Block(n_filters * 2, n_filters * 4)
self.conv4 = Conv_Block(n_filters * 4, n_filters * 8, stride=1)
self.conv5 = Conv_Block(n_filters * 8, output_channels, stride=1)
self.classifier = nn.Linear(384,24)
def forward(self,x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.conv5(x)
x = x.view(-1)
x = self.classifier(x)
#what size is this?
out = torch.nn.Sigmoid()(x)
out = torch.reshape(out,(4,6))
return out
model = Conv_Net(1,16,24)
model.cuda()
model.train()
loss_fn = nn.MSELoss
criterion = torch.optim.Adam(model.parameters(), lr = .0001, betas = (.5,.999))
keep_training = True
epoch_counter = 0
panel_x = 50
panel_y = 50
eng = matlab.engine.start_matlab()
#make a panel
driver_locations = torch.tensor((0.25, 0.25, 0.75, 0.75, 0.25, 0.75, 0.75, 0.25)).view(4,2)
Lx = 0.3
Ly = 0.5
while keep_training:
epoch_counter = epoch_counter + 1
time_start = time.time()
gt = torch.ones(panel_x,panel_y)*-1
model.zero_grad()
#random init starting conditions
while gt[0,0] == -1:
#returns -1 for invalid configuration
gt = produce_freq_response(panel_x,panel_y,1,random.randint(1,panel_x-1),random.randint(1,panel_y-1),random.randint(3,15))
coefs = model(gt.unsqueeze(0).unsqueeze(0).cuda())
print(coefs.size())
#very possible that the interpreter doesnt like torch tensors, might have to go numpy with this
response1, frequencies = eng.get_biquad_response(coefs[0,:].cpu().detach().numpy(),44100,nargout = 2)
response2, temp = eng.get_biquad_response(coefs[1,:].cpu().detach().numpy(),44100,nargout = 2)
response3, temp = eng.get_biquad_response(coefs[2,:].cpu().detach().numpy(),44100,nargout = 2)
response4, temp = eng.get_biquad_response(coefs[3,:].cpu().detach().numpy(),44100,nargout = 2)
responses = torch.stack((response1,response2,response3,response4),dim=-1)
matlab_panel = eng.Clamped_Panel[driver_locations,responses,frequencies,Lx,Ly]
matlab_out = eng.matlab_panel.view_total_scan(200,0)
loss = loss_fn(matlab_out,gt)
criterion.step()
print("holy moly!")
| 33.006623 | 131 | 0.647673 | 1,342 | 0.269262 | 0 | 0 | 0 | 0 | 0 | 0 | 1,140 | 0.228732 |
e40f68af3b51a18af4106a68a0e2666e5541b720 | 4,438 | py | Python | client/client.py | s-ball/remo_serv | 66accbd77183db0628a9618cf258656ec2d81316 | [
"MIT"
] | null | null | null | client/client.py | s-ball/remo_serv | 66accbd77183db0628a9618cf258656ec2d81316 | [
"MIT"
] | null | null | null | client/client.py | s-ball/remo_serv | 66accbd77183db0628a9618cf258656ec2d81316 | [
"MIT"
] | null | null | null | # Copyright (c) 2020 SBA- MIT License
import getpass
import argparse
import sys
import cmd
import shlex
from urllib.error import HTTPError
from cryptography.hazmat.primitives import serialization
from client.clientlib import login, Connection
from client import smartcard
def parse2(arg):
args = list(shlex.split(arg))
if len(args) == 1:
args.append(args[0])
elif len(args) != 2:
return None, None
return args
class CmdLoop(cmd.Cmd):
def __init__(self, con: Connection, server, encoding):
self.con = con
self.prompt = server + '> '
super().__init__()
self.encoding = encoding
def do_get(self, arg):
"""Get a file from remote: get remote_file [local_file]"""
params = parse2(arg)
if params[0] is None:
print('ERROR: 1 or 2 parameters required', file=sys.stderr)
else:
try:
self.con.get(*params)
except HTTPError as e:
print(e)
def do_put(self, arg):
"""Send a file to remote: put remote_file [local_file]"""
params = parse2(arg)
if params[0] is None:
print('ERROR: 1 or 2 parameters required', file=sys.stderr)
else:
try:
self.con.put(*params)
except HTTPError as e:
print(e)
def do_exec(self, arg):
"""Execute a command on the remote and print the result: exec cmd param"""
try:
r = self.con.exec(arg)
print(r.read().decode(self.encoding))
except HTTPError as e:
print(e)
def do_iexec(self, arg):
"""Execute an interactive command"""
try:
r = self.con.iexec(arg)
print(r.read().decode(self.encoding))
except HTTPError as e:
print(e)
def do_idata(self, arg):
"""Send input to the interactive command: idata data..."""
try:
r = self.con.idata(arg)
print(r.read().decode(self.encoding))
except HTTPError as e:
print(e)
def do_iend(self, _arg):
"""Close the input channel of the interactive command"""
try:
r = self.con.end_cmd()
print(r.read().decode(self.encoding))
except HTTPError as e:
print(e)
# noinspection PyPep8Naming
@staticmethod
def do_EOF(_arg):
"""Quit the program"""
return True
@staticmethod
def do_quit(_arg):
"""Quit the program"""
return True
def do_set_encoding(self, arg):
"""Set the server encoding"""
self.encoding = arg
def parse(args):
parser = argparse.ArgumentParser()
parser.add_argument('host', help='Name or address of remote')
parser.add_argument('port', nargs='?', default=80, type=int,
help='Server port (default: 80)')
parser.add_argument('--server', '-s', default='remo_serv.pem',
help='Public key of the server (PEM format)')
parser.add_argument('--user', '-u', default=getpass.getuser(),
help='user name')
parser.add_argument('--key', '-k', help='File name of user key'
' (PEM format). Default: user_key.pem')
parser.add_argument('--label', '-l', help='Label of a certificate '
'private key on a smart card')
parser.add_argument('--encoding', '-e', default='utf_8',
help='encoding of the server')
params = parser.parse_args(args)
if params.key is None:
params.key = params.user + '_key.pem'
return params
# noinspection PyArgumentList
def run(args):
params = parse(args)
with open(params.server, 'rb') as fd:
remo_pub = serialization.load_pem_public_key(fd.read())
if params.label is None:
with open(params.key, 'rb') as fd:
own_key = serialization.load_pem_private_key(fd.read(), b'foo')
signer = None
else:
own_key = None
signer = smartcard.get_token(params.label)
server = 'http://' + params.host
if params.port != 80:
server += ':' + str(params.port)
con = login(server, '/auth', params.user, own_key, signer, remo_pub)
cmd_loop = CmdLoop(con, server, params.encoding)
cmd_loop.cmdloop()
if __name__ == '__main__':
run(sys.argv[1:])
| 30.190476 | 83 | 0.570077 | 2,211 | 0.498197 | 0 | 0 | 173 | 0.038982 | 0 | 0 | 979 | 0.220595 |
e410307635af99e3b3cc52fdda648a0910806c95 | 1,867 | py | Python | unfollower.py | Sam-F90/unfollower | feee9815f440d3a654f77a21ec84680ac92022c1 | [
"MIT"
] | null | null | null | unfollower.py | Sam-F90/unfollower | feee9815f440d3a654f77a21ec84680ac92022c1 | [
"MIT"
] | null | null | null | unfollower.py | Sam-F90/unfollower | feee9815f440d3a654f77a21ec84680ac92022c1 | [
"MIT"
] | null | null | null | import tweepy
import datetime
import os
# get keys from evironment variable "TWITTER_KEYS"
TWITTER_API_KEYS = (os.environ.get("TWITTER_KEYS").split(","))
consumer_key,consumer_secret,access_token_key,access_token_secret = TWITTER_API_KEYS
# Authenticate to Twitter
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token_key, access_token_secret)
# establish api
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
# verify
try:
api.verify_credentials()
except:
print("Error during authentication")
exit()
# get my id
me = api.me()
# get list of friends (id)
friends = api.friends_ids(me.id)
# get list of followers (id)
follower_ids = []
for follower in tweepy.Cursor(api.followers, me.screen_name).items(api.me().friends_count):
follower_ids.append(follower.id)
# get list of muted friends (id)
muted_friends = api.mutes_ids()
# create list of users who are muted and do not follow you
to_unfollow = []
for friend in friends:
if friend not in follower_ids and friend in muted_friends:
to_unfollow.append(friend)
# create log to record data and string to send to DM
log = [datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")]
dm = [datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")]
# unfollow useres in to_unfollow[] and record them in log[] and dm[]
for user in to_unfollow:
# unfollowed = api.destroy_friendship(user)
unfollowed = api.get_user(user)
log.append('unfollowed ' + unfollowed.screen_name + " [" +str(unfollowed.friends_count) + "," + str(unfollowed.followers_count) + "]")
dm.append("@" + unfollowed.screen_name)
# write info to log
with open("unfollow_log.txt","a") as fp:
for line in log:
fp.write(line + "\n")
fp.write("\n")
api.send_direct_message(api.me().id,"\n".join(dm))
print("finished") | 27.455882 | 139 | 0.719336 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 588 | 0.314944 |
e4129e9fa1ffc789238869830a16a81f822bb51c | 2,113 | py | Python | alpha/NN/autoencoders/charlie.py | DanielBerns/keras-effective-adventure | d9bc8c08f769f0c07379d2a3756d040ca14239f2 | [
"MIT"
] | null | null | null | alpha/NN/autoencoders/charlie.py | DanielBerns/keras-effective-adventure | d9bc8c08f769f0c07379d2a3756d040ca14239f2 | [
"MIT"
] | null | null | null | alpha/NN/autoencoders/charlie.py | DanielBerns/keras-effective-adventure | d9bc8c08f769f0c07379d2a3756d040ca14239f2 | [
"MIT"
] | null | null | null | # https://medium.com/datadriveninvestor/deep-autoencoder-using-keras-b77cd3e8be95
from keras.datasets import mnist
from keras.layers import Input, Dense
from keras.models import Model
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
(X_train, _), (X_test, _) = mnist.load_data()
X_train = X_train.astype('float32')/255
X_test = X_test.astype('float32')/255
X_train = X_train.reshape(len(X_train), np.prod(X_train.shape[1:]))
X_test = X_test.reshape(len(X_test), np.prod(X_test.shape[1:]))
print(X_train.shape)
print(X_test.shape)
input_img= Input(shape=(784,))
encoded = Dense(units=128, activation='relu')(input_img)
encoded = Dense(units=64, activation='relu')(encoded)
encoded = Dense(units=32, activation='relu')(encoded)
decoded = Dense(units=64, activation='relu')(encoded)
decoded = Dense(units=128, activation='relu')(decoded)
decoded = Dense(units=784, activation='sigmoid')(decoded)
autoencoder=Model(input_img, decoded)
encoder = Model(input_img, encoded)
print('autoencoder')
autoencoder.summary()
print('encoder')
encoder.summary()
autoencoder.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
autoencoder.fit(X_train, X_train,
epochs=50,
batch_size=256,
shuffle=True,
validation_data=(X_test, X_test))
encoded_imgs = encoder.predict(X_test)
predicted = autoencoder.predict(X_test)
plt.figure(figsize=(40, 4))
for i in range(10):
# display original images
ax = plt.subplot(3, 20, i + 1)
plt.imshow(X_test[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display encoded images
ax = plt.subplot(3, 20, i + 1 + 20)
plt.imshow(encoded_imgs[i].reshape(8,4))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstructed images
ax = plt.subplot(3, 20, 2*20 +i+ 1)
plt.imshow(predicted[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
| 27.802632 | 87 | 0.69664 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 276 | 0.13062 |
e4137613cb4a7761df5564e9e723f2867c6f080e | 5,569 | py | Python | tests/pages/alert_box_page.py | nairraghav/selenium-example | 88e4316a75bcd7feced65489c0ffe1b8c2b8487b | [
"MIT"
] | null | null | null | tests/pages/alert_box_page.py | nairraghav/selenium-example | 88e4316a75bcd7feced65489c0ffe1b8c2b8487b | [
"MIT"
] | null | null | null | tests/pages/alert_box_page.py | nairraghav/selenium-example | 88e4316a75bcd7feced65489c0ffe1b8c2b8487b | [
"MIT"
] | null | null | null | class AlertBoxPage:
def __init__(self, driver):
self.driver = driver
self.title_css = "h1"
self.title_text = "Alert Box Examples"
self.explanation_css = "div.explanation > p"
self.explanation_text = (
"There are three main JavaScript methods "
"which show alert dialogs: alert, confirm "
"and prompt. This page has examples of each."
)
self.alert_box_descriptions_css = "div.page-body > p"
self.alert_box_description_text = (
"The following button will display an alert when clicked."
)
self.alert_box_button_id = "alertexamples"
self.alert_box_button_text = "Show alert box"
self.alert_box_text = "I am an alert box!"
self.confirm_box_description_text = (
"The following button will display a confirm dialog when clicked."
)
self.confirm_box_button_id = "confirmexample"
self.confirm_box_button_text = "Show confirm box"
self.confirm_boolean_text_id = "confirmreturn"
self.confirm_boolean_confirm_text = "true"
self.confirm_boolean_cancel_text = "false"
self.confirm_text_id = "confirmexplanation"
self.confirm_text = "You clicked OK, confirm returned true."
self.cancel_text = "You clicked Cancel, confirm returned false."
self.prompt_box_description_text = (
"The following button will display a prompt dialog when clicked."
)
self.prompt_box_button_id = "promptexample"
self.prompt_box_button_text = "Show prompt box"
self.prompt_box_text = "I prompt you"
self.prompt_value_text_id = "promptreturn"
self.prompt_text_id = "promptexplanation"
self.prompt_text_prefix = "You clicked OK. 'prompt' returned "
self.prompt_cancel_text = "You clicked Cancel. 'prompt' returned null"
def is_page_rendered(self):
for element_css in (
self.title_css,
self.explanation_css,
self.alert_box_descriptions_css,
):
found_element = self.driver.find_element_by_css_selector(element_css)
if found_element is None:
return False
for element_id in (
self.alert_box_button_id,
self.confirm_box_button_id,
self.prompt_box_button_id,
):
found_element = self.driver.find_element_by_id(element_id)
if found_element is None:
return False
return True
def validate_text_on_page(self):
for element_css, text in (
(self.title_css, self.title_text),
(self.explanation_css, self.explanation_text),
):
found_element = self.driver.find_element_by_css_selector(element_css)
assert found_element.text == text
for element_id, text in (
(self.alert_box_button_id, self.alert_box_button_text),
(self.confirm_box_button_id, self.confirm_box_button_text),
(self.prompt_box_button_id, self.prompt_box_button_text),
):
found_element = self.driver.find_element_by_id(element_id)
assert found_element.get_attribute("value") == text, (
f"Actual: {found_element.get_attribute('value')}\t\t" "Expected: {text}"
)
descriptions = self.driver.find_elements_by_css_selector(
self.alert_box_descriptions_css
)
assert descriptions[0].text == self.alert_box_description_text
assert descriptions[2].text == self.confirm_box_description_text
assert descriptions[4].text == self.prompt_box_description_text
def interact_with_alert_box(self):
self.driver.find_element_by_id(self.alert_box_button_id).click()
alert = self.driver.switch_to.alert
alert.accept()
def interact_with_confirm_box(self):
self.driver.find_element_by_id(self.confirm_box_button_id).click()
alert = self.driver.switch_to.alert
alert.accept()
assert (
self.driver.find_element_by_id(self.confirm_boolean_text_id).text
== self.confirm_boolean_confirm_text
)
assert (
self.driver.find_element_by_id(self.confirm_text_id).text
== self.confirm_text
)
self.driver.find_element_by_id(self.confirm_box_button_id).click()
alert = self.driver.switch_to.alert
alert.dismiss()
assert (
self.driver.find_element_by_id(self.confirm_boolean_text_id).text
== self.confirm_boolean_cancel_text
)
assert (
self.driver.find_element_by_id(self.confirm_text_id).text
== self.cancel_text
)
def interact_with_prompt_box(self):
self.driver.find_element_by_id(self.prompt_box_button_id).click()
alert = self.driver.switch_to.alert
alert_text = "Testing"
alert.send_keys(alert_text)
alert.accept()
assert (
self.driver.find_element_by_id(self.prompt_value_text_id).text == alert_text
)
assert (
self.driver.find_element_by_id(self.prompt_text_id).text
== f"{self.prompt_text_prefix}{alert_text}"
)
self.driver.find_element_by_id(self.prompt_box_button_id).click()
alert = self.driver.switch_to.alert
alert.dismiss()
assert (
self.driver.find_element_by_id(self.prompt_text_id).text
== self.prompt_cancel_text
)
| 40.355072 | 88 | 0.644101 | 5,568 | 0.99982 | 0 | 0 | 0 | 0 | 0 | 0 | 887 | 0.159275 |
e41482448ad0c9a9ce2ec0102c5edc24cd4e69ff | 11,339 | py | Python | tests/test_views/test_memberships.py | freelancing-solutions/GCP-Based-Database-as-a-Service | 7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3 | [
"MIT"
] | 1 | 2021-04-15T19:45:04.000Z | 2021-04-15T19:45:04.000Z | tests/test_views/test_memberships.py | freelancing-solutions/pinydesk | 7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3 | [
"MIT"
] | 516 | 2021-05-02T11:46:36.000Z | 2022-03-29T06:09:49.000Z | tests/test_views/test_memberships.py | freelancing-solutions/pinydesk | 7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3 | [
"MIT"
] | 1 | 2021-09-04T22:40:14.000Z | 2021-09-04T22:40:14.000Z | import random
import typing
from datetime import datetime, timedelta
from random import randint
from google.cloud import ndb
from data_service.config.stocks import currency_symbols
from data_service.store.mixins import AmountMixin
from data_service.views.memberships import MembershipsView
from data_service.store.memberships import Memberships, MembershipPlans
from data_service.utils.utils import create_id
from .. import test_app
# noinspection PyUnresolvedReferences
from pytest import raises
# noinspection PyUnresolvedReferences
from pytest_mock import mocker
class MembershipsQueryMock:
membership_instance: Memberships = Memberships()
results_range: int = randint(0, 100)
def __init__(self):
self.membership_instance.plan_id = create_id()
self.membership_instance.status = "paid"
self.membership_instance.date_created = datetime.now()
self.membership_instance.plan_start_date = datetime.now().date()
self.membership_instance.payment_method = 'paypal'
def fetch(self) -> typing.List[Memberships]:
return [self.membership_instance for _ in range(self.results_range)]
def get(self) -> Memberships:
return self.membership_instance
@ndb.tasklet
def get_async(self):
return self.membership_instance
class MembershipPlansQueryMock:
membership_plan_instance: MembershipPlans = MembershipPlans()
results_range: int = randint(0, 100)
def __init__(self):
self.membership_plan_instance.date_created = datetime.now()
self.membership_plan_instance.plan_name = "bronze"
self.membership_plan_instance.description = "bronze plan"
self.membership_plan_instance.total_members = 10
self.membership_plan_instance.schedule_day = 1
self.membership_plan_instance.schedule_term = "monthly"
self.membership_plan_instance.term_payment_amount = AmountMixin(amount=100,
currency=random.choice(currency_symbols()))
self.membership_plan_instance.registration_amount = AmountMixin(amount=100,
currency=random.choice(currency_symbols()))
def fetch(self) -> typing.List[MembershipPlans]:
return [self.membership_plan_instance for _ in range(self.results_range)]
def get(self) -> MembershipPlans:
return self.membership_plan_instance
membership_mock_data: dict = {
"uid": create_id(),
"plan_id": create_id(),
"status": "unpaid",
"date_created": datetime.now(),
"plan_start_date": datetime.date(datetime.now() + timedelta(days=5))
}
# noinspection PyShadowingNames
def test_create_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
plan_id = membership_mock_data['plan_id']
plan_start_date = membership_mock_data['plan_start_date']
response, status = membership_view_instance.add_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
response_data: dict = response.get_json()
assert status == 500, response_data['message']
mocker.patch('data_service.store.users.UserValidators.is_user_valid', return_value=True)
mocker.patch('data_service.store.memberships.PlanValidators.plan_exist', return_value=False)
mocker.patch('data_service.store.memberships.MembershipValidators.start_date_valid', return_value=True)
# mocker.patch('data_service.views.memberships.Validators.can_add_member', return_value=True)
response, status = membership_view_instance.add_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
response_data: dict = response.get_json()
assert status == 200, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_update_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
plan_id = membership_mock_data['plan_id']
plan_start_date = membership_mock_data['plan_start_date']
mocker.patch('data_service.store.users.UserValidators.is_user_valid', return_value=True)
mocker.patch('data_service.store.memberships.PlanValidators.plan_exist', return_value=False)
mocker.patch('data_service.store.memberships.MembershipValidators.start_date_valid', return_value=True)
response, status = membership_view_instance.update_membership(uid=uid, plan_id=plan_id,
plan_start_date=plan_start_date)
assert status == 200, "Unable to update membership"
response_data: dict = response.get_json()
assert response_data.get('message') is not None, "message was not set properly"
assert response_data.get('payload') is not None, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_set_membership_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid = membership_mock_data['uid']
status = membership_mock_data['status']
response, status = membership_view_instance.set_membership_status(uid=uid, status=status)
assert status == 200, "Unable to set membership status"
response, status = membership_view_instance.set_membership_status(uid=uid, status="paid")
assert status == 200, "Unable to set membership status"
mocker.stopall()
# noinspection PyShadowingNames
def test_change_membership(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
membership_query_mock_instance = MembershipsQueryMock()
membership_query_mock_instance.membership_instance.plan_id = membership_mock_data['plan_id']
mocker.patch('google.cloud.ndb.Model.query', return_value=membership_query_mock_instance)
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
dest_plan_id: str = create_id()
mocker.patch('data_service.views.memberships.MembershipsView.plan_exist', return_value=True)
response, status = membership_view_instance.change_membership(uid=uid, origin_plan_id=plan_id,
dest_plan_id=dest_plan_id)
assert status == 200, "Unable to change membership"
mocker.stopall()
# noinspection PyShadowingNames
def test_send_welcome_email(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
response, status = membership_view_instance.send_welcome_email(uid=uid, plan_id=plan_id)
assert status == 200, "unable to send welcome email"
mocker.stopall()
# noinspection PyShadowingNames
def test_plan_members_payment_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
plan_id: str = membership_mock_data['plan_id']
status: str = membership_mock_data['status']
response, status = membership_view_instance.return_plan_members_by_payment_status(plan_id=plan_id,
status=status)
assert status == 200, "unable to fetch plan members by status"
mocker.stopall()
# noinspection PyShadowingNames
def test_return_plan_members(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
plan_id: str = membership_mock_data['plan_id']
response, status = membership_view_instance.return_plan_members(plan_id=plan_id)
assert status == 200, "unable to fetch plan members"
mocker.stopall()
# noinspection PyShadowingNames
def test_is_member_off(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
response, status = membership_view_instance.is_member_off(uid=uid)
assert status == 200, "unable to test membership status"
mocker.stopall()
# noinspection PyShadowingNames
def test_payment_amount(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
mocker.patch('data_service.views.memberships.MembershipPlansView.get_plan',
return_value=MembershipPlansQueryMock().get())
response, status = membership_view_instance.payment_amount(uid=uid)
response_data: dict = response.get_json()
assert status == 200, response_data['message']
mocker.stopall()
# noinspection PyShadowingNames
def test_set_payment_status(mocker):
mocker.patch('google.cloud.ndb.Model.put', return_value=create_id())
mocker.patch('google.cloud.ndb.Model.query', return_value=MembershipsQueryMock())
with test_app().app_context():
membership_view_instance: MembershipsView = MembershipsView()
uid: str = membership_mock_data['uid']
status: str = "paid"
response, status = membership_view_instance.set_payment_status(uid=uid, status=status)
assert status == 200, "Unable to set payment status"
status: str = "unpaid"
response, status = membership_view_instance.set_payment_status(uid=uid, status=status)
assert status == 200, "Unable to set payment status"
mocker.stopall()
| 45.175299 | 115 | 0.71038 | 1,882 | 0.165976 | 0 | 0 | 77 | 0.006791 | 0 | 0 | 2,228 | 0.19649 |
e414c3ce91122f63e50497c6f5b8998f2cc88f9e | 3,893 | py | Python | padmini/prakarana/dvitva.py | sanskrit/padmini | 8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0 | [
"MIT"
] | 1 | 2022-03-01T05:05:04.000Z | 2022-03-01T05:05:04.000Z | padmini/prakarana/dvitva.py | sanskrit/padmini | 8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0 | [
"MIT"
] | null | null | null | padmini/prakarana/dvitva.py | sanskrit/padmini | 8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0 | [
"MIT"
] | null | null | null | from padmini import filters as f
from padmini import operations as op
from padmini.constants import Tag as T
from padmini.sounds import s
from padmini.prakriya import Term, Prakriya
from padmini.term_views import TermView
from padmini.prakarana.utils import eka_ac
def _double(rule: str, p: Prakriya, dhatu: Term, i: int) -> Term:
# Special logic for Nic.
if (
dhatu.adi in s("ac")
and dhatu.antya in s("hal")
and p.terms[i + 1].u in {"Ric", "RiN"}
):
ni = p.terms[i + 1]
text = dhatu.text + ni.text
third = Term.make_term(text[1:])
while f.samyogadi(third) and third.adi in {"n", "d", "r"}:
third.text = third.text[1:]
third.u = p.terms[i + 1].u
third.add_tags(T.DHATU)
ni.text = third.text
dhatu.text = dhatu.text[: -len(third.text) + 1]
op.insert_after(rule, p, ni, third)
op.samjna("6.1.4", p, ni, T.ABHYASA)
dhatu.add_tags(T.ABHYASTA)
ni.add_tags(T.ABHYASTA)
third.add_tags(T.ABHYASTA)
p.step("6.1.5")
elif eka_ac(dhatu) or dhatu.adi in s("hal"):
# TODO: correctly double jAgR
abhyasa = Term.make_term(dhatu.text)
op.insert_before(rule, p, dhatu, abhyasa)
op.samjna("6.1.4", p, abhyasa, T.ABHYASA)
abhyasa.add_tags(T.ABHYASTA)
dhatu.add_tags(T.ABHYASTA)
if p.terms[i + 2].u in ("Ric", "RiN"):
p.terms[i + 2].add_tags(T.ABHYASTA)
p.step("6.1.5")
else:
# Create 3 terms:
# 1. the dhatu without the abhyasa
# 2. the abhyasa
# 3. the doubled portion
# 6.1.2 ajAder dvitIyasya
# 6.1.3 na ndrAH saMyogAdayaH
third = Term.make_term(dhatu.text[1:])
while f.samyogadi(third) and third.adi in {"n", "d", "r"}:
third.text = third.text[1:]
third.u = dhatu.u
third.add_tags(T.DHATU)
# Ru -> nu for UrRu
if dhatu.text == "UrRu":
third.text = "nu"
abhyasa = Term.make_term(third.text)
abhyasa.add_tags(T.ABHYASA)
dhatu.text = dhatu.text[: -len(third.text)]
op.insert_after(None, p, dhatu, abhyasa)
op.insert_after(rule, p, abhyasa, third)
op.samjna("6.1.4", p, abhyasa, T.ABHYASA)
dhatu.add_tags(T.ABHYASTA)
third.add_tags(T.ABHYASTA)
abhyasa.add_tags(T.ABHYASTA)
if p.terms[i + 3].u in ("Ric", "RiN"):
p.terms[i + 3].add_tags(T.ABHYASTA)
p.step("6.1.5")
def run_for_each(p: Prakriya, dhatu: Term, i: int):
n = TermView.make_pratyaya(p, i)
if not n:
return
# HACK for Nic + caN
if n.terms[0].u in ("Ric", "RiN"):
n = TermView.make_pratyaya(p, i + 1)
n.u = n.terms[0].u
if dhatu.text in {"jakz", "jAgf", "daridrA", "cakAs", "SAs", "dIDI", "vevI"}:
# These are termed abhyasta, but they can still undergo dvitva because
# the rules below inherit "anabhyAsasya" from 6.1.8.
op.tag("6.1.6", p, dhatu, T.ABHYASTA)
if n.all("li~w"):
# kAshikA:
# dayateḥ iti dīṅo grahaṇaṃ na tu daya dāne ityasya.
# digyādeśena dvirvacanasya bādhanam iṣyate.
if dhatu.u == "de\\N":
op.text("7.4.9", p, dhatu, "digi")
else:
_double("6.1.8", p, dhatu, i)
elif n.u in ("san", "yaN"):
_double("6.1.9", p, dhatu, i)
elif n.terms[0].any(T.SLU):
_double("6.1.10", p, dhatu, i)
elif n.u == "caN":
_double("6.1.11", p, dhatu, i)
def run(p: Prakriya):
i = 0
num_terms = len(p.terms)
while i < num_terms:
c = p.terms[i]
# HACK to avoid doubling the nic / nin
if c.any(T.DHATU) and c.u not in {"Ric", "RiN"}:
run_for_each(p, c, i)
# Skip new terms
i += 1 + (len(p.terms) - num_terms)
num_terms = len(p.terms)
| 31.144 | 81 | 0.554585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 795 | 0.203429 |
e415ae5887fb3b1c6bbb5eae6bf773f7d423747e | 1,152 | py | Python | 8kyu/a-wolf-in-sheeps-clothing/solution.py | Morioki/Code-Katas | 65bffc0675d3c0f68c60706e95e38ab1dcfc8636 | [
"MIT"
] | null | null | null | 8kyu/a-wolf-in-sheeps-clothing/solution.py | Morioki/Code-Katas | 65bffc0675d3c0f68c60706e95e38ab1dcfc8636 | [
"MIT"
] | null | null | null | 8kyu/a-wolf-in-sheeps-clothing/solution.py | Morioki/Code-Katas | 65bffc0675d3c0f68c60706e95e38ab1dcfc8636 | [
"MIT"
] | null | null | null | import unittest
def warn_the_sheep(queue):
return 'Pls go away and stop eating my sheep' if queue[::-1].index('wolf') == 0 else f"Oi! Sheep number {queue[::-1].index('wolf')}! You are about to be eaten by a wolf!"
class TestSolution(unittest.TestCase):
def test_1(self):
self.assertEquals(warn_the_sheep(['sheep', 'sheep', 'sheep', 'sheep', 'sheep', 'wolf', 'sheep', 'sheep']), 'Oi! Sheep number 2! You are about to be eaten by a wolf!')
def test_2(self):
self.assertEquals(warn_the_sheep(['sheep', 'wolf', 'sheep', 'sheep', 'sheep', 'sheep', 'sheep']), 'Oi! Sheep number 5! You are about to be eaten by a wolf!')
def test_3(self):
self.assertEquals(warn_the_sheep(['wolf', 'sheep', 'sheep', 'sheep', 'sheep', 'sheep', 'sheep']), 'Oi! Sheep number 6! You are about to be eaten by a wolf!')
def test_4(self):
self.assertEquals(warn_the_sheep(['sheep', 'wolf', 'sheep']), 'Oi! Sheep number 1! You are about to be eaten by a wolf!')
def test_5(self):
self.assertEquals(warn_the_sheep(['sheep', 'sheep', 'wolf']), 'Pls go away and stop eating my sheep') | 57.6 | 175 | 0.62934 | 931 | 0.80816 | 0 | 0 | 0 | 0 | 0 | 0 | 590 | 0.512153 |
e4160c8bd63d807a761f9c2eb1581d092fef5ff0 | 449 | py | Python | modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | import logging
logger = logging.getLogger("dbnd-scheduler")
try:
from dbnd_airflow.scheduler.scheduler_dags_provider import get_dags
# airflow will only scan files containing the text DAG or airflow. This comment performs this function
dags = get_dags()
if dags:
for dag in dags:
globals()[dag.dag_id] = dag
except Exception as e:
logging.exception("Failed to get dags form databand server")
raise e
| 24.944444 | 106 | 0.710468 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 159 | 0.35412 |
e41914e68f6a31dadb107fe8bb9eaf841bed6173 | 4,268 | py | Python | tanacompendium/utils/modelmanagers.py | nkoech/tanacompendium | b4fd81b23f2c8263735806765d93eb4a70be8aba | [
"MIT"
] | null | null | null | tanacompendium/utils/modelmanagers.py | nkoech/tanacompendium | b4fd81b23f2c8263735806765d93eb4a70be8aba | [
"MIT"
] | null | null | null | tanacompendium/utils/modelmanagers.py | nkoech/tanacompendium | b4fd81b23f2c8263735806765d93eb4a70be8aba | [
"MIT"
] | null | null | null | import datetime
from django.contrib.contenttypes.models import ContentType
from django.db.models import FieldDoesNotExist
from django.db.models.base import ObjectDoesNotExist
def create_model_type(instance, model_type, key, slugify, **kwargs):
"""
Create object by model type
:param instance: Model manager instance
:param model_type: Content/model type
:param key: Primary key or slug
:param slugify: Boolean to indicate availability of a slug or primary key
:param kwargs: Fields to be created
:return: Data object
:rtype: Object
"""
model_qs = ContentType.objects.filter(model=model_type)
if model_qs.exists():
any_model = model_qs.first().model_class()
if slugify:
obj_qs = any_model.objects.filter(slug=key)
else:
obj_qs = any_model.objects.filter(pk=key)
if obj_qs.exists() and obj_qs.count() == 1:
field_values = {
'content_type': model_qs.first(),
'object_id': obj_qs.first().id
}
field_values.update(kwargs)
data_instance = instance.model(**field_values)
data_instance.save()
return data_instance
return None
def model_instance_filter(call_instance, current_instance, model_manager):
"""
Object query based on a model instance
:param call_instance: Instance of the model calling this method
:param current_instance: Instance of the model manager class this method would be called from
:param model_manager: The model manager class
:return: Object due to instantiation of the calling model class
:rtye: Object/record
"""
parent_obj = super(model_manager, current_instance)
content_type = ContentType.objects.get_for_model(call_instance.__class__)
try:
qs = parent_obj.filter(content_type=content_type, object_id=call_instance.id)
except parent_obj.DoesNotExist:
return None
return qs
def model_foreign_key_qs(call_instance, current_instance, model_manager):
"""
Object query based on foreign key
:param call_instance: Instance of the model calling this method
:param current_instance: Instance of the model manager class this method would be called from
:param model_manager: The model manager class
:return: Object query based on foreign key otherwise return none
:rtype: Object/record
"""
model_name = str(call_instance._meta.model_name) # Foreignkey name should be similar to related model name
qs_filter = {model_name: call_instance.id}
obj_qs = super(model_manager, current_instance).filter(**qs_filter)
return obj_qs
def model_type_filter(current_instance, obj_qs, model_manager):
"""
Object query based on a model class
:param current_instance: Instance of the model manager class this method would be called from
:param obj_qs: Initial object query
:param model_manager: The model manager class
:return: Object query based on the model type/class otherwise return none
:rtype: Object/record
"""
if obj_qs.exists():
if model_field_exists(obj_qs, 'content_type'):
for obj in obj_qs.iterator():
try:
qs = super(model_manager, current_instance).filter(content_type=obj.content_type) and obj_qs
return qs
except ObjectDoesNotExist:
return None
return obj_qs
def model_field_exists(instance, field_name):
"""
Check if field exists
:param instance: Instance of the model manager class this method would be called from
:param field_name: Field name to be checked
:return: True if field exists otherwise return false
:rtype: Boolean
"""
try:
instance.model._meta.get_field(field_name)
return True
except FieldDoesNotExist:
return False
def get_year_choices():
"""
Get years as model choices
:return: Years
"""
year_choice = []
for r in range(1950, (datetime.datetime.now().year + 1)):
year_choice.append((r, r))
return year_choice
def get_datetime_now():
"""
Get current year
:return: Curreent year
"""
return datetime.datetime.now().year
| 34.419355 | 112 | 0.684161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,862 | 0.43627 |
e4193bf7c1b3cd811dde985083067c06d301bbfb | 2,588 | py | Python | deletion_test.py | tjake/cassandra-dtest | df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3 | [
"Apache-2.0"
] | null | null | null | deletion_test.py | tjake/cassandra-dtest | df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3 | [
"Apache-2.0"
] | null | null | null | deletion_test.py | tjake/cassandra-dtest | df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3 | [
"Apache-2.0"
] | null | null | null | from dtest import Tester
import os, sys, time
from ccmlib.cluster import Cluster
from tools import require, since
from jmxutils import make_mbean, JolokiaAgent
class TestDeletion(Tester):
def gc_test(self):
""" Test that tombstone are fully purge after gc_grace """
cluster = self.cluster
cluster.populate(1).start()
[node1] = cluster.nodelist()
time.sleep(.5)
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'ks', 1)
self.create_cf(cursor, 'cf', gc_grace=0, key_type='int', columns={'c1': 'int'})
cursor.execute('insert into cf (key, c1) values (1,1)')
cursor.execute('insert into cf (key, c1) values (2,1)')
node1.flush()
result = cursor.execute('select * from cf;')
assert len(result) == 2 and len(result[0]) == 2 and len(result[1]) == 2, result
cursor.execute('delete from cf where key=1')
result = cursor.execute('select * from cf;')
if cluster.version() < '1.2': # > 1.2 doesn't show tombstones
assert len(result) == 2 and len(result[0]) == 1 and len(result[1]) == 1, result
node1.flush()
time.sleep(.5)
node1.compact()
time.sleep(.5)
result = cursor.execute('select * from cf;')
assert len(result) == 1 and len(result[0]) == 2, result
@require(9194)
def tombstone_size_test(self):
self.cluster.populate(1).start(wait_for_binary_proto=True)
[node1] = self.cluster.nodelist()
cursor = self.patient_cql_connection(node1)
self.create_ks(cursor, 'ks', 1)
cursor.execute('CREATE TABLE test (i int PRIMARY KEY)')
stmt = cursor.prepare('DELETE FROM test where i = ?')
for i in range(100):
cursor.execute(stmt, [i])
self.assertEqual(memtable_count(node1, 'ks', 'test'), 100)
self.assertGreater(memtable_size(node1, 'ks', 'test'), 0)
def memtable_size(node, keyspace, table):
new_name = node.get_cassandra_version() >= '2.1'
name = 'MemtableLiveDataSize' if new_name else 'MemtableDataSize'
return columnfamily_metric(node, keyspace, table, name)
def memtable_count(node, keyspace, table):
return columnfamily_metric(node, keyspace, table, 'MemtableColumnsCount')
def columnfamily_metric(node, keyspace, table, name):
with JolokiaAgent(node) as jmx:
mbean = make_mbean('metrics', type='ColumnFamily',
name=name, keyspace=keyspace, scope=table)
value = jmx.read_attribute(mbean, 'Value')
return value
| 34.052632 | 91 | 0.632921 | 1,784 | 0.689335 | 0 | 0 | 578 | 0.223338 | 0 | 0 | 469 | 0.181221 |
e41bb3e24e831bc6c9db543d89a47e06639cb0a0 | 355 | py | Python | src/comments/migrations/0004_auto_20200209_1812.py | samrika25/TRAVIS_HEROKU_GIT | bcae6d0422d9a0369810944a91dd03db7df0d058 | [
"MIT"
] | null | null | null | src/comments/migrations/0004_auto_20200209_1812.py | samrika25/TRAVIS_HEROKU_GIT | bcae6d0422d9a0369810944a91dd03db7df0d058 | [
"MIT"
] | 4 | 2021-03-30T12:35:36.000Z | 2021-06-10T18:11:24.000Z | src/comments/migrations/0004_auto_20200209_1812.py | samrika25/TRAVIS_HEROKU_GIT | bcae6d0422d9a0369810944a91dd03db7df0d058 | [
"MIT"
] | 2 | 2021-02-07T16:16:36.000Z | 2021-07-13T05:26:51.000Z | # Generated by Django 3.0.2 on 2020-02-09 18:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('comments', '0003_auto_20200208_0812'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ['published_at']},
),
]
| 19.722222 | 51 | 0.6 | 270 | 0.760563 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.323944 |
e41c425d0ed1f3d737beeff6b6c0f31113fafb62 | 768 | py | Python | multicasting_test_scripts/sender.py | sandwichdoge/libmulticastudp | 735a3a6242d5444f9a5a070322a7033296707cdf | [
"MIT"
] | null | null | null | multicasting_test_scripts/sender.py | sandwichdoge/libmulticastudp | 735a3a6242d5444f9a5a070322a7033296707cdf | [
"MIT"
] | null | null | null | multicasting_test_scripts/sender.py | sandwichdoge/libmulticastudp | 735a3a6242d5444f9a5a070322a7033296707cdf | [
"MIT"
] | null | null | null | #
# mostly copied from
# http://bioportal.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/socket/multicast.html
#
import socket
import struct
import sys
import time
message = 'data worth repeating'
multicast_group = ('226.1.1.1', 4321)
# Create the datagram socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Set a timeout so the socket does not block indefinitely when trying
# to receive data.
sock.settimeout(0.2)
counter = 0
try:
while True:
counter +=1
# Send data to the multicast group
print >>sys.stderr, '%d: sending "%s"' % (counter, message )
sent = sock.sendto(message, multicast_group)
time.sleep( 5 )
finally:
print >>sys.stderr, 'closing socket'
sock.close()
| 20.756757 | 90 | 0.670573 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 328 | 0.427083 |
e42510b046e5ad727d96dec824908363abd5654f | 852 | py | Python | python/chol_factor_test.py | davxy/numeric | 1e8b44a72e1d570433a5ba81ae0795a750ce5921 | [
"Unlicense"
] | 2 | 2020-05-03T17:02:44.000Z | 2022-02-21T04:09:34.000Z | python/chol_factor_test.py | davxy/numeric | 1e8b44a72e1d570433a5ba81ae0795a750ce5921 | [
"Unlicense"
] | null | null | null | python/chol_factor_test.py | davxy/numeric | 1e8b44a72e1d570433a5ba81ae0795a750ce5921 | [
"Unlicense"
] | null | null | null | import numpy as np
from chol_factor import chol_factor
from triangular import triangular
# TEST: Cholesky factorization (LL')
# Symmetric positive definite matrix
A = np.matrix('5 1.2 0.3 -0.6;'
'1.2 6 -0.4 0.9;'
'0.3 -0.4 8 1.7;'
'-0.6 0.9 1.7 10');
print('A = \n', A)
# Computation of the L factor
L = chol_factor(A)
print('L = \n', L)
# Check
if np.allclose(A, np.dot(L, L.transpose())) == False:
raise Exception('QR factorizzation test failure')
# TEST: System Resolution
# Ax = LL'x = b
b = np.matrix("68; 9; 45; 35")
print('b = \n', b)
# Lk = b
k = triangular(L, b, 1)
print('k = \n', k)
# L'x = k
x = triangular(L.transpose(), k, 0)
print('x = \n', x)
# Check
b1 = np.dot(A, x)
print('b1 = \n', b1)
if np.allclose(b, b1) == False:
raise Exception('System resolution failure')
| 23.027027 | 53 | 0.580986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 378 | 0.443662 |
e4257523a5f56faf33e09f713fd3a02e93109a4b | 11,245 | py | Python | PSO_system/GUI/gui_root.py | daniel4lee/PSO-car-simulator | b4aebca0fed614e33acc3e7d665085d55a67b82a | [
"MIT"
] | 1 | 2022-03-23T21:51:59.000Z | 2022-03-23T21:51:59.000Z | PSO_system/GUI/gui_root.py | daniel4lee/PSO-car-simulator | b4aebca0fed614e33acc3e7d665085d55a67b82a | [
"MIT"
] | 1 | 2018-10-08T12:53:42.000Z | 2018-10-08T13:46:13.000Z | PSO_system/GUI/gui_root.py | daniel4lee/PSO-car-simulator | b4aebca0fed614e33acc3e7d665085d55a67b82a | [
"MIT"
] | 2 | 2020-04-26T08:22:53.000Z | 2021-05-18T09:51:24.000Z | """Build the tkinter gui root"""
import math
from PyQt5.QtWidgets import *#(QWidget, QToolTip, QDesktopWidget, QPushButton, QApplication)
from PyQt5.QtGui import QFont
from PyQt5.QtCore import QCoreApplication, QObject, QRunnable, QThread, QThreadPool, pyqtSignal, pyqtSlot
from PyQt5.QtGui import QIntValidator, QDoubleValidator
import sys
from PSO_system.Counting.plot import PlotCanvas
from PSO_system.Counting.run import CarRunning
from PSO_system.Counting.test_result import TestRunning
THREADS = []
class GuiRoot(QWidget):
"""Root of gui."""
def __init__(self, dataset, training_data):
"""Create GUI root with datasets dict"""
super().__init__()
self.threadpool = QThreadPool()
self.setFixedSize(800, 800)
self.center()
self.setWindowTitle('PSO')
self.show()
#read the map and training data
self.map_datalist = dataset.keys()
self.map_data = dataset
self.training_datalist = training_data.keys()
self.training_data = training_data
#creat file choosing area
self.file_run_creation(self.map_datalist, self.training_datalist)
self.operation_parameter_creation()
self.ouput_text_creation()
hbox = QHBoxLayout()
vbox = QVBoxLayout()
vbox.addWidget(self.file_run)
vbox.addWidget(self.operation_type)
vbox.addWidget(self.text_group_box)
hbox.addLayout(vbox)
self.m = PlotCanvas(self.map_data)
hbox.addWidget(self.m)
self.setLayout(hbox)
def file_run_creation(self, datalist, training_data):
self.file_run = QGroupBox("File choose")
layout = QGridLayout()
layout.setSpacing(10)
map_file_label = QLabel("Map file: ")
self.map_file_choose = QComboBox()
for i in datalist:
self.map_file_choose.addItem("{}".format(i))
self.map_file_choose.currentTextChanged.connect(self.file_changed)
training_file_label = QLabel("Training file: ")
self.training_file_choose = QComboBox()
for i in training_data:
self.training_file_choose.addItem("{}".format(i))
self.run_btn = QPushButton("Start", self)
self.run_btn.clicked.connect(self.run)
self.test_btn = QPushButton("Test", self)
self.test_btn.clicked.connect(self.test_rbfn)
layout.addWidget(map_file_label, 1, 0, 1, 1)
layout.addWidget(self.map_file_choose, 1, 1, 1, 3)
layout.addWidget(training_file_label, 2, 0, 1, 1)
layout.addWidget(self.training_file_choose, 2, 1, 1, 3)
layout.addWidget(self.run_btn, 3, 0, 1, 4)
layout.addWidget(self.test_btn, 4, 0, 1, 4)
layout.setVerticalSpacing(0)
layout.setHorizontalSpacing(0)
self.file_run.setLayout(layout)
self.test_parameter = None
def operation_parameter_creation(self):
"""Operation parameter field"""
self.operation_type = QGroupBox("Operation parameter setting")
vbox = QVBoxLayout()
#Set and operation paremeter region, including iteration times, population number,
#mutation probability, crossover probability, network j value
iteration_layout = QHBoxLayout()
iteration_setting = QLabel("Iteration times :")
self.iteration_line = QSpinBox()
self.iteration_line.setRange(1, 10000)
self.iteration_line.setValue(60)
self.iteration_line.setMaximumWidth(150)
iteration_layout.addWidget(iteration_setting)
iteration_layout.addWidget(self.iteration_line)
iteration_layout.insertSpacing(-1,100)
swarm_size_layout = QHBoxLayout()
swarm_size_setting = QLabel("Swarm size:")
self.swarm_size_line = QSpinBox()
self.swarm_size_line.setRange(1, 10000)
self.swarm_size_line.setValue(200)
self.swarm_size_line.setMaximumWidth(150)
swarm_size_layout.addWidget(swarm_size_setting)
swarm_size_layout.addWidget(self.swarm_size_line)
swarm_size_layout.insertSpacing(-1,100)
w_layout = QHBoxLayout()
w_setting = QLabel("Robust of w: ")
self.w_line = QDoubleSpinBox()
self.w_line.setRange(0, 10)
self.w_line.setDecimals(2)
self.w_line.setValue(0.8)
self.w_line.setMaximumWidth(150)
w_layout.addWidget(w_setting)
w_layout.addWidget(self.w_line)
w_layout.insertSpacing(-1,100)
# in PSO, φ1 means the parameter multiplied with (pi(t)-x(t))
fai_1_layout = QHBoxLayout()
fai_1_setting = QLabel("Robust of φ1: ")
self.fai_1_line = QDoubleSpinBox()
self.fai_1_line.setValue(1.5)
self.fai_1_line.setRange(0, 10)
self.fai_1_line.setDecimals(2)
self.fai_1_line.setMaximumWidth(150)
fai_1_layout.addWidget(fai_1_setting)
fai_1_layout.addWidget(self.fai_1_line)
fai_1_layout.insertSpacing(-1,100)
fai_2_layout = QHBoxLayout()
fai_2_setting = QLabel("Robust of φ2: ")
self.fai_2_line = QDoubleSpinBox()
self.fai_2_line.setRange(0, 10)
self.fai_2_line.setDecimals(2)
self.fai_2_line.setValue(2.5)
self.fai_2_line.setMaximumWidth(150)
fai_2_layout.addWidget(fai_2_setting)
fai_2_layout.addWidget(self.fai_2_line)
fai_2_layout.insertSpacing(-1,100)
net_j_layout = QHBoxLayout()
net_j_setting = QLabel("Network neurl number j: ")
self.net_j_line = QSpinBox()
self.net_j_line.setRange(1,10)
self.net_j_line.setValue(6)
self.net_j_line.setMaximumWidth(150)
net_j_layout.addWidget(net_j_setting)
net_j_layout.addWidget(self.net_j_line)
net_j_layout.insertSpacing(-1,100)
sd_layout = QHBoxLayout()
sd_setting = QLabel("Maximum SD: ")
self.sd_line = QSpinBox()
self.sd_line.setRange(1,100)
self.sd_line.setValue(10)
self.sd_line.setMaximumWidth(150)
sd_layout.addWidget(sd_setting)
sd_layout.addWidget(self.sd_line)
sd_layout.insertSpacing(-1,100)
v_max_layout = QHBoxLayout()
v_max_setting = QLabel("Maximum V: ")
self.v_max_line = QDoubleSpinBox()
self.v_max_line.setRange(0, 10)
self.v_max_line.setDecimals(2)
self.v_max_line.setValue(4)
self.v_max_line.setMaximumWidth(150)
v_max_layout.addWidget(v_max_setting)
v_max_layout.addWidget(self.v_max_line)
v_max_layout.insertSpacing(-1,100)
vbox.addLayout(iteration_layout)
vbox.addLayout(swarm_size_layout)
vbox.addLayout(w_layout)
vbox.addLayout(fai_1_layout)
vbox.addLayout(fai_2_layout)
vbox.addLayout(net_j_layout)
vbox.addLayout(v_max_layout)
vbox.addLayout(sd_layout)
self.operation_type.setLayout(vbox)
def ouput_text_creation(self):
self.text_group_box = QGroupBox("Execution log")
layout = QVBoxLayout()
self.console = QTextEdit()
self.console.setReadOnly(True)
layout.addWidget(self.console)
self.text_group_box.setLayout(layout)
def file_changed(self):
"""print map"""
self.m.plot_map(self.map_file_choose.currentText())
self.console.append('Map changed')
def run(self):
self.test_parameter = None
l = []
l.append(self.iteration_line.value())
l.append(self.swarm_size_line.value())
l.append(self.w_line.value())
l.append(self.fai_1_line.value())
l.append(self.fai_2_line.value())
l.append(self.net_j_line.value())
l.append(self.v_max_line.value())
l.append(self.sd_line.value())
# disable avoid to touch
self.disable('yes')
# transfer for counting
self.console.append('Start training RBFN with PSO')
car = CarRunning(self.map_data, self.map_file_choose.currentText(), self.training_data, self.training_file_choose.currentText(), l)
car.signals.iteration.connect(self.console_output)
car.signals.result.connect(self.dir_test_rbfn)
self.threadpool.start(car)
def dir_test_rbfn(self, parameters):
# disable avoid to touch
self.disable('yes')
# transfer for counting
self.test_parameter = parameters
self.console.append('Start testing result on current map.')
self.console.append("------------------------------------------------------")
test_thread = TestRunning(self.map_data, self.map_file_choose.currentText(), parameters, None)
test_thread.signals.plot.connect(self.plot_output)
self.threadpool.start(test_thread)
def test_rbfn(self):
if self.test_parameter == None:
self.console.append('No RBFN model, please push [Start] button first.')
else:
# disable avoid to touch
self.disable('yes')
# transfer for counting
self.console.append('Start testing result on current map.')
self.console.append("------------------------------------------------------")
test_thread = TestRunning(self.map_data, self.map_file_choose.currentText(), None, self.test_parameter)
test_thread.signals.plot.connect(self.plot_output)
self.threadpool.start(test_thread)
def console_output(self, s):
self.console.append(str(s))
def plot_output(self, s):
self.m.plot_car(s)
self.disable('no')
self.console.append('Test is complete, and showing on right area')
self.console.append("------------------------------------------------------")
def center(self):
"""Place window in the center"""
qr = self.frameGeometry()
central_p = QDesktopWidget().availableGeometry().center()
qr.moveCenter(central_p)
self.move(qr.topLeft())
def disable(self, yes_or_no):
if yes_or_no == 'yes':
self.iteration_line.setDisabled(True)
self.swarm_size_line.setDisabled(True)
self.w_line.setDisabled(True)
self.fai_2_line.setDisabled(True)
self.fai_1_line.setDisabled(True)
self.net_j_line.setDisabled(True)
self.map_file_choose.setDisabled(True)
self.training_file_choose.setDisabled(True)
self.run_btn.setDisabled(True)
self.test_btn.setDisabled(True)
self.v_max_line.setDisabled(True)
self.sd_line.setDisabled(True)
else:
self.iteration_line.setDisabled(False)
self.swarm_size_line.setDisabled(False)
self.w_line.setDisabled(False)
self.fai_2_line.setDisabled(False)
self.fai_1_line.setDisabled(False)
self.net_j_line.setDisabled(False)
self.map_file_choose.setDisabled(False)
self.training_file_choose.setDisabled(False)
self.run_btn.setDisabled(False)
self.test_btn.setDisabled(False)
self.v_max_line.setDisabled(False)
self.sd_line.setDisabled(False)
if __name__ == '__main__':
print("Error: This file can only be imported. Execute 'main.py'")
| 41.494465 | 139 | 0.649355 | 10,642 | 0.946124 | 0 | 0 | 0 | 0 | 0 | 0 | 1,354 | 0.120377 |
e425b8c86c1c0699016fdb4cfc8b01eea833c4f2 | 2,346 | py | Python | qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py | alexiatoumpa/QSR_Detector | ff92a128dddb613690a49a7b4130afeac0dd4381 | [
"MIT"
] | 15 | 2015-06-15T16:50:37.000Z | 2022-03-27T09:25:56.000Z | qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py | alexiatoumpa/QSR_Detector | ff92a128dddb613690a49a7b4130afeac0dd4381 | [
"MIT"
] | 205 | 2015-01-22T12:02:59.000Z | 2022-03-29T11:59:55.000Z | qsrlib/src/qsrlib_qsrs/qsr_cardinal_direction.py | alexiatoumpa/QSR_Detector | ff92a128dddb613690a49a7b4130afeac0dd4381 | [
"MIT"
] | 16 | 2015-02-04T23:13:18.000Z | 2022-03-08T13:45:53.000Z | # -*- coding: utf-8 -*-
from __future__ import print_function, division
from qsrlib_qsrs.qsr_dyadic_abstractclass import QSR_Dyadic_1t_Abstractclass
import math
class QSR_Cardinal_Direction(QSR_Dyadic_1t_Abstractclass):
"""Cardinal direction relations.
Values of the abstract properties
* **_unique_id** = "cardir"
* **_all_possible_relations** = ("n", "ne", "e", "se", "s", "sw", "w", "nw", "eq")
* **_dtype** = "bounding_boxes_2d"
Some explanation about the QSR or better link to a separate webpage explaining it. Maybe a reference if it exists.
"""
_unique_id = "cardir"
"""str: Unique identifier name of the QSR."""
_all_possible_relations = ("n", "ne", "e", "se", "s", "sw", "w", "nw", "eq")
"""tuple: All possible relations of the QSR."""
_dtype = "bounding_boxes_2d"
"""str: On what kind of data the QSR works with."""
def __init__(self):
"""Constructor."""
super(QSR_Cardinal_Direction, self).__init__()
def _compute_qsr(self, data1, data2, qsr_params, **kwargs):
"""Compute QSR relation.
:param data1: Bounding box.
:type data1: list or tuple of int or floats
:param data2: Bounding box.
:type data2: list or tuple of int or floats
:return: QSR relation.
:rtype: str
"""
# Finds the differnece between the centres of each object
dx = ((data2[0]+data2[2])/2.0) - ((data1[0]+data1[2])/2.0)
dy = ((data2[1]+data2[3])/2.0) - ((data1[1]+data1[3])/2.0)
if dx==0 and dy==0:
return 'eq'
# Calculate the angle of the line between the two objects (in degrees)
angle = (math.atan2(dx,dy) * (180/math.pi))+22.5
# If that angle is negative, invert it
if angle < 0.0:
angle = (360.0 + angle)
# Lookup labels and return answer
return self.__direction_switch(math.floor(((angle)/45.0)))
def __direction_switch(self, x):
"""Switch Statement convert number into region label.
:param x:
:type x:
:return: QSR relation.
:rtype: str
"""
return {
0: 's',
1: 'sw',
2: 'w',
3: 'nw',
4: 'n',
5: 'ne',
6: 'e',
7: 'se',
}.get(x)
| 31.28 | 118 | 0.561381 | 2,183 | 0.93052 | 0 | 0 | 0 | 0 | 0 | 0 | 1,258 | 0.536232 |
e4287373cf648c93ed322e508af33deff1f8e862 | 4,291 | py | Python | clustering/GMM.py | peasant98/NBA-Stats-Clustering | 57ff7e70a8cbb0c609d6a6720134a37695e2a860 | [
"MIT"
] | null | null | null | clustering/GMM.py | peasant98/NBA-Stats-Clustering | 57ff7e70a8cbb0c609d6a6720134a37695e2a860 | [
"MIT"
] | null | null | null | clustering/GMM.py | peasant98/NBA-Stats-Clustering | 57ff7e70a8cbb0c609d6a6720134a37695e2a860 | [
"MIT"
] | null | null | null | # NBA Stats Clustering
# Copyright Matthew Strong, 2019
# gaussian mixture models with em algorithm
import numpy as np
from scipy import stats
from clustering.Cluster import NBACluster
# nba gmm class
# gmm from scratch as well, more explained below
class NBAGMM(NBACluster):
def fit(self):
self.method = 'GMM'
# get the points
a, m = self.get_points(self.num_clusters)
# em algorithm for 100 iterations
res = self.em_algorithm(self.num_clusters, m, a)
probs_given_data = res[2]
# probability of each point
# sum of squared disatnces
# and get assignments by max probability of each point to a certain cluster
l = []
dist = 0
for v in range(len(a)):
selection = np.argmax(probs_given_data[:,v])
dist += self.dist(a[v], res[0][selection])
l.append(selection)
self.ssd = dist
self.labels = l
self.centroids = res[0]
def get_points(self, k):
# select points randomly
a = self.df.values
indices = np.random.choice(list(range(len(a))), k, replace=False)
k_points = a[indices]
return a, k_points
def dist(self, x1, x2):
# euclidean distance
return np.sqrt(np.sum((x1-x2)**2))
# this algorithm was influenced by the gmm in class notebook, as well as my implementation in hw2
# but heavily adapted for n dimensions and varying values of k, now all vectorized, so, more
# dynamic to work with.
def em_algorithm(self, k, m, a):
# works for n dimensional data
# pick k random points
# get means from the randomly selected data
# works
mu = np.zeros((k, a.shape[-1]))
covariances = np.zeros((k, a.shape[-1], a.shape[-1]))
probs = np.zeros(k)
# also p_class_n
# set probabilities of each cluster, or the weight, to all equal
probs.fill(1./k)
# calculations of prob of m give data require these matrices
p_given_class = np.zeros((k, len(a)))
p_given_data = np.zeros((k, len(a)))
p_class_data = np.zeros((k, len(a), 1, 1))
n_class = np.zeros(k)
for ind,val in enumerate(mu):
mu[ind] = m[ind]
for ind,val in enumerate(covariances):
# set all covariances of k mixtures to overall covariance of dataset
if ind == 0:
covariances[0] = np.cov(a.T)
else:
covariances[ind] = covariances[0]
for _ in range(100):
# 100 iterations
summation = np.zeros((len(a)))
for i in range(k):
# compute pdf
p_given_class[i] = stats.multivariate_normal.pdf(a, mean=mu[i], cov=covariances[i], allow_singular=True)
p_given_data[i] = p_given_class[i] * probs[i]
summation += p_given_data[i]
length = len(a)
for i in range(k):
# get probabilities of mixtures
p_given_data[i]/=summation
n_class[i] = np.sum(p_given_data[i])
probs[i] = n_class[i]/length
for i in range(k):
means = np.zeros(a.shape[-1])
# get means from data
for j in range(len(means)):
means[j] = (1.0/n_class[i]) * np.sum(p_given_data[i]*a[:,j])
mu[i] = np.array(means)
for i in range(k):
# covariance calculations
covs = []
for p in a:
x_i = p
r = x_i - mu[i]
vec = np.expand_dims(r, axis=0)
cov_i = vec * vec.T
covs.append(cov_i)
# expand dims and use np sum to get results along axis=0
covs = np.array(covs)
temp = np.expand_dims(p_given_data[i], axis=1)
p_class_data[i] = np.expand_dims(temp, axis=1)
covariances[i] = np.sum(p_class_data[i] * covs, axis=0) / n_class[i]
# return means, covariances of cluster, probabilities of points being in certain mixture
# and probabilities of mixtures themselves.
return mu, covariances, p_given_data, probs
| 40.102804 | 120 | 0.554649 | 4,038 | 0.941039 | 0 | 0 | 0 | 0 | 0 | 0 | 1,201 | 0.279888 |
e428f454d7dceb480c84f33f264e2ac819a010fd | 1,484 | py | Python | ML/eval.py | Data-Science-Community-SRM/Fashion-Generation | fa062e2b31b4fba8945820d911dfa41de45b1333 | [
"MIT"
] | 1 | 2021-04-27T09:13:09.000Z | 2021-04-27T09:13:09.000Z | ML/eval.py | Aradhya-Tripathi/Fashion-Generation | fa062e2b31b4fba8945820d911dfa41de45b1333 | [
"MIT"
] | null | null | null | ML/eval.py | Aradhya-Tripathi/Fashion-Generation | fa062e2b31b4fba8945820d911dfa41de45b1333 | [
"MIT"
] | 1 | 2021-03-12T13:15:08.000Z | 2021-03-12T13:15:08.000Z | import torch
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import sys
sys.path.append("./ML")
import Definitions.models as models
from Definitions.dataset import Data
def main(imgpath="Data", noise_dim=100, vec_shape=100, root="./ModelWeights/"):
netG = models.Generator(device="cpu", noise_dim=noise_dim, vec_shape=vec_shape)
netD = models.Discriminator()
netENC = models.ResNetEncoder(vec_shape)
netG.load_state_dict(torch.load(root + "Gen.pt"))
netD.load_state_dict(torch.load(root + "Dis.pt"))
netENC.load_state_dict(torch.load(root + "RES.pt"))
# netG.eval()
# netD.eval()
# netENC.eval()
numrows = 5
d = Data(path=imgpath, batch_size=numrows, size=(64, 64))
d_loaded = DataLoader(d.folderdata, numrows, shuffle=True)
# get one random batch of images
imgs = next(iter(d_loaded))[0]
with torch.no_grad():
vector = netENC(imgs)
fakeImages = netG(vector)
_, ax = plt.subplots(2, numrows, squeeze=False, sharex=True, sharey=True, figsize=(8, 4))
for i in range(numrows):
ax[0, i].imshow((fakeImages[i].permute(1, 2, 0).numpy() + [1, 1, 1]) / [2, 2, 2])
ax[0, i].axis(False)
ax[1, i].imshow((imgs[i].permute(1, 2, 0).numpy() + [1, 1, 1]) / [2, 2, 2])
ax[1, i].axis(False)
plt.subplots_adjust(wspace=0, hspace=0)
plt.show()
if __name__ == "__main__":
main()
| 26.981818 | 94 | 0.617925 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 145 | 0.097709 |
e42935051444daddcd5cee33f9a2daa9cde6e823 | 4,965 | py | Python | app/screens/authorize.py | jimkutter/rpi_lcars | f5ae0891f26d3494ad77f894c4f7733deaf063ee | [
"MIT"
] | null | null | null | app/screens/authorize.py | jimkutter/rpi_lcars | f5ae0891f26d3494ad77f894c4f7733deaf063ee | [
"MIT"
] | null | null | null | app/screens/authorize.py | jimkutter/rpi_lcars | f5ae0891f26d3494ad77f894c4f7733deaf063ee | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
import pygame
from pygame.mixer import Sound
from screens.base_screen import BaseScreen
from ui import colours
from ui.widgets.background import LcarsBackgroundImage
from ui.widgets.gifimage import LcarsGifImage
from ui.widgets.lcars_widgets import LcarsButton
from ui.widgets.lcars_widgets import LcarsText
class CodeButton(LcarsButton):
def __init__(self, colour, pos, text, handler=None, rectSize=None):
super().__init__(colour, pos, text, handler, rectSize)
self.code = None
class ScreenAuthorize(BaseScreen):
def __init__(self, app):
super().__init__(app, None, None)
self.login_timeout = None
self.reset_timer()
def setup(self, all_sprites):
all_sprites.add(LcarsBackgroundImage("assets/lcars_screen_2.png"), layer=0)
all_sprites.add(LcarsGifImage("assets/gadgets/stlogorotating.gif", (103, 369), 50), layer=0)
all_sprites.add(LcarsText(colours.ORANGE, (270, -1), "AUTHORIZATION REQUIRED", 2), layer=0)
all_sprites.add(LcarsText(colours.BLUE, (330, -1), "ONLY AUTHORIZED PERSONNEL MAY ACCESS THIS TERMINAL", 1.5),
layer=1)
all_sprites.add(LcarsText(colours.BLUE, (360, -1), "TOUCH TERMINAL TO PROCEED", 1.5), layer=1)
greek_alphabet = [
"alpha",
"beta",
"gamma",
"delta",
"epsilon",
"zeta",
"eta",
"theta",
"iota",
"kappa",
"lambda",
"mu",
"nu",
"xi",
"omicron",
"pi",
"rho",
"sigma",
"tau",
"upsilon",
"phi",
"chi",
"psi",
"omega",
]
x_orig = 127
y_orig = 75
padding = 20
width = 122
height = 44
row = 0
col = 0
for letter in greek_alphabet:
x = x_orig + (col * (width + padding / 2))
y = y_orig + (row * (height + padding / 2))
button = CodeButton(colours.GREY_BLUE, (y, x), letter.upper(), self.button_handler)
button.code = letter
col = col + 1
if col > 3:
row = row + 1
col = 0
all_sprites.add(button, layer=2)
self.layer1 = all_sprites.get_sprites_from_layer(1)
self.layer2 = all_sprites.get_sprites_from_layer(2)
# sounds
if not self.app.is_screen_off:
Sound("assets/audio/panel/215.wav").play()
self.sound_granted = Sound("assets/audio/accessing.wav")
self.sound_beep1 = Sound("assets/audio/panel/201.wav")
self.sound_denied = Sound("assets/audio/access_denied.wav")
self.sound_deny1 = Sound("assets/audio/deny_1.wav")
self.sound_deny2 = Sound("assets/audio/deny_2.wav")
############
# SET PIN CODE WITH THIS VARIABLE
############
self.pin = self.app.config['pin']
############
self.reset()
def reset(self):
# Variables for PIN code verification
self.correct = 0
self.pin_i = 0
self.granted = False
for sprite in self.layer1: sprite.visible = True
for sprite in self.layer2: sprite.visible = False
def screen_update(self):
super().screen_update()
if self.login_timeout:
auth_delta = self.login_timeout - datetime.now()
if int(auth_delta.total_seconds()) == 0:
self.reset()
def handleEvents(self, event, fpsClock):
if event.type == pygame.MOUSEBUTTONDOWN:
# Play sound
self.sound_beep1.play()
self.app.screen_on()
if event.type == pygame.MOUSEBUTTONUP:
if not self.layer2[0].visible:
self.show_login_controls()
elif self.pin_i == len(self.pin):
# Ran out of button presses
if self.correct == len(self.pin):
self.sound_granted.play()
from screens.main import ScreenMain
self.loadScreen(ScreenMain(self.app))
else:
self.sound_deny2.play()
self.sound_denied.play()
self.reset()
return False
def show_login_controls(self):
for sprite in self.layer1: sprite.visible = False
for sprite in self.layer2: sprite.visible = True
Sound("assets/audio/enter_authorization_code.wav").play()
self.reset_timer()
def button_handler(self, item, event, clock):
self.reset_timer()
if self.pin[self.pin_i] == item.code:
self.correct += 1
print(self.correct)
self.pin_i += 1
def reset_timer(self):
self.login_timeout = datetime.now() + timedelta(seconds=self.app.config['login_timeout'])
| 31.03125 | 118 | 0.557301 | 4,608 | 0.928097 | 0 | 0 | 0 | 0 | 0 | 0 | 695 | 0.13998 |
e42efd7b2e91e2b6ad55453d791a04774b95fe07 | 31 | py | Python | swarm_tasks/utils/__init__.py | rmvanarse/swarm_tasks | 3335297ba8fcdbff756ae519002bcce919d54a84 | [
"MIT"
] | 6 | 2021-03-13T12:54:18.000Z | 2022-01-29T12:12:28.000Z | swarm_tasks/utils/__init__.py | rmvanarse/swarm_tasks | 3335297ba8fcdbff756ae519002bcce919d54a84 | [
"MIT"
] | null | null | null | swarm_tasks/utils/__init__.py | rmvanarse/swarm_tasks | 3335297ba8fcdbff756ae519002bcce919d54a84 | [
"MIT"
] | 2 | 2021-08-06T15:02:15.000Z | 2022-02-08T12:11:30.000Z | import swarm_tasks.utils.robot
| 15.5 | 30 | 0.870968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
e4324e2ffd9d0f0cc445c08f1b32895fbc79b0d2 | 2,178 | py | Python | Problems/P0010 - Soma de primos.py | clasenback/EulerProject | 775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad | [
"CC0-1.0"
] | null | null | null | Problems/P0010 - Soma de primos.py | clasenback/EulerProject | 775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad | [
"CC0-1.0"
] | null | null | null | Problems/P0010 - Soma de primos.py | clasenback/EulerProject | 775d9774fcdfbbcc579e3c4ec0bb2d4a941764ad | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 7 17:11:12 2021
@author: User
SUMMATION OF PRIMES
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
21min19s to find.
"""
from datetime import datetime as date
def nextPrime(n, primes):
isPrime = False
if n % 2 == 0:
n +=1
else:
if n % primes[-1] == 0:
n += 2
while not isPrime:
for prime in primes:
# FIRST: Cheking if if is prime or should go next. SECOND: it is
# a waste to try division by a prime which returns less than 3
if n % prime == 0:
n +=2
# skipping 5
if str(n)[-1] == "5":
n += 2
break
if prime == primes[-1]:
isPrime = not isPrime
if n / prime < 3 :
isPrime = not isPrime
break
return n
# INPUTS
target = 2000000
primes = [2, 3, 5, 7, 11, 13, 17, 19]
control = target / 10
path = "C:/Users/User/Documents/AA - Pessoal/DataScience/Project Euler/"
file = "primos_ate_" + str(target) + ".csv"
print("INICIANDO BUSCA DOS NÚMEROS PRIMOS MENORES QUE", target)
start = date.now()
# PROCESSING
while primes[-1] < target :
candidate = nextPrime(primes[-1], primes)
if candidate > target :
break
primes.append(candidate)
# CONTROLLING
if candidate >= control:
print("O", len(primes), "º primo é", candidate, "em", date.now() - start)
control += target / 10
# OUTPUT
print("\n")
print("RESULTADOS:")
print("ENCONTRAR OS NÚMEROS PRIMOS MENORES QUE", target)
print("FORAM ENCONTRADOS", len(primes), "NÚMEROS PRIMOS")
print("ÚLTIMO PRIMO DA LISTA:", primes[-1])
print("SOMA DOS PRIMOS ENCONTRADOS:", sum(primes))
print("TEMPO TOTAL DA BUSCA:", date.now() - start)
# TO FILE
f = open(path + file, "w+")
for i in range(len(primes)):
f.write(str(i+1))
f.write("\t") # tab
f.write(str(primes[i]))
f.write("\r") # carriage return
f.close()
| 26.888889 | 82 | 0.539027 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 804 | 0.368132 |
e4348a8c3eadb9042a4b4b0ebb7cd499d99a7b46 | 1,124 | py | Python | l5kit/l5kit/tests/rasterization/render_context_test.py | cdicle-motional/l5kit | 4dc4ee5391479bb71f0b373f39c316f9eef5a961 | [
"Apache-2.0"
] | 1 | 2021-12-04T17:48:53.000Z | 2021-12-04T17:48:53.000Z | l5kit/l5kit/tests/rasterization/render_context_test.py | cdicle-motional/l5kit | 4dc4ee5391479bb71f0b373f39c316f9eef5a961 | [
"Apache-2.0"
] | null | null | null | l5kit/l5kit/tests/rasterization/render_context_test.py | cdicle-motional/l5kit | 4dc4ee5391479bb71f0b373f39c316f9eef5a961 | [
"Apache-2.0"
] | 1 | 2021-11-19T08:13:46.000Z | 2021-11-19T08:13:46.000Z | import numpy as np
import pytest
from l5kit.geometry import transform_points
from l5kit.rasterization.render_context import RenderContext
@pytest.mark.parametrize("set_origin_to_bottom", [False, True])
def test_transform_points_to_raster(set_origin_to_bottom: bool) -> None:
image_shape_px = np.asarray((200, 200))
center_in_raster_ratio = np.asarray((0.5, 0.5))
pixel_size_m = np.asarray((1.0, 1.0))
center_world = np.asarray((0, -2))
render_context = RenderContext(
raster_size_px=image_shape_px,
pixel_size_m=pixel_size_m,
center_in_raster_ratio=center_in_raster_ratio,
set_origin_to_bottom=set_origin_to_bottom,
)
input_points = np.array([[0, 0], [10, 10], [-10, -10]])
if set_origin_to_bottom:
expected_output_points = np.array([[100, 98], [110, 88], [90, 108]])
else:
expected_output_points = np.array([[100, 102], [110, 112], [90, 92]])
tf = render_context.raster_from_world(center_world, 0.0)
output_points = transform_points(input_points, tf)
np.testing.assert_array_equal(output_points, expected_output_points)
| 35.125 | 77 | 0.715302 | 0 | 0 | 0 | 0 | 982 | 0.873665 | 0 | 0 | 22 | 0.019573 |
e434cb20e1bb4b89d1f4687abbe31af32ff3e3b8 | 1,528 | py | Python | plugin/fcitx.py | bigshans/fcitx.vim | 228a51c6c95997439feddff6c38d62ce014e6d59 | [
"MIT"
] | null | null | null | plugin/fcitx.py | bigshans/fcitx.vim | 228a51c6c95997439feddff6c38d62ce014e6d59 | [
"MIT"
] | null | null | null | plugin/fcitx.py | bigshans/fcitx.vim | 228a51c6c95997439feddff6c38d62ce014e6d59 | [
"MIT"
] | null | null | null | import vim
import functools
import dbus
class FcitxComm():
def __init__(self):
bus = dbus.SessionBus()
obj = bus.get_object('org.fcitx.Fcitx5', '/controller')
self.fcitx = dbus.Interface(obj, dbus_interface='org.fcitx.Fcitx.Controller1')
def status(self):
return self.fcitx.State() == 2
def activate(self):
self.fcitx.Activate()
def deactivate(self):
self.fcitx.Deactivate()
try:
Fcitx = FcitxComm()
fcitx_loaded = True
except dbus.exceptions.DBusException as e:
if not vim.vars.get('silent_unsupported'):
vim.command('echohl WarningMsg | echom "fcitx.vim not loaded: %s" | echohl NONE' % e)
fcitx_loaded = False
def may_reconnect(func):
@functools.wraps(func)
def wrapped():
global Fcitx
for _ in range(2):
try:
return func()
except Exception as e:
vim.command('echohl WarningMsg | echom "fcitx.vim: %s: %s" | echohl NONE' % (type(e).__name__, e))
Fcitx = FcitxComm()
return wrapped
@may_reconnect
def fcitx2en():
if vim.eval('g:disable_fcitx_toggle_temp') == '1':
return
if Fcitx.status():
vim.command('let b:inputtoggle = 1')
Fcitx.deactivate()
@may_reconnect
def fcitx2zh():
if vim.eval('g:disable_fcitx_toggle_temp') == '1':
vim.command('let g:disable_fcitx_toggle_temp = 0')
return
if vim.eval('exists("b:inputtoggle")') == '1':
if vim.eval('b:inputtoggle') == '1':
Fcitx.activate()
vim.command('let b:inputtoggle = 0')
else:
vim.command('let b:inputtoggle = 0')
| 25.466667 | 106 | 0.656414 | 369 | 0.241492 | 0 | 0 | 815 | 0.533377 | 0 | 0 | 425 | 0.278141 |
e43577db4ce37b9708732914de0c5a01c24639dc | 311 | py | Python | ctf/post.py | ntdgy/python_study | c3511846a89ea72418937de4cc3edf1595a46ec5 | [
"MIT"
] | null | null | null | ctf/post.py | ntdgy/python_study | c3511846a89ea72418937de4cc3edf1595a46ec5 | [
"MIT"
] | null | null | null | ctf/post.py | ntdgy/python_study | c3511846a89ea72418937de4cc3edf1595a46ec5 | [
"MIT"
] | null | null | null | import requests
def post():
url1 = "http://165.227.106.113/post.php"
headers1 = {
'Host': '165.227.106.113'
}
data = {
'username': 'admin',
'password': '71urlkufpsdnlkadsf'
}
r1 = requests.post(url=url1, data=data, headers=headers1)
print(r1.text)
post()
| 17.277778 | 61 | 0.559486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 103 | 0.33119 |
e435bc6759728f66c9ba58ab0f9f30b4d9e6d31b | 828 | py | Python | avioclient/controller.py | HermenegildoK/AvioClient | 9cad3a89bbf10d7212561cf15b3ad453060c9434 | [
"MIT"
] | null | null | null | avioclient/controller.py | HermenegildoK/AvioClient | 9cad3a89bbf10d7212561cf15b3ad453060c9434 | [
"MIT"
] | null | null | null | avioclient/controller.py | HermenegildoK/AvioClient | 9cad3a89bbf10d7212561cf15b3ad453060c9434 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from avioclient.send_data import SendControls
from avioclient import config
def send_data():
data_sender = SendControls(config.SERVER_URL)
connections_done = 0
while True:
connections_done += 1
print(
data_sender.get_data(
config.GET_ENDPOINT.format(
connection_id=connections_done
)
)
)
print(
data_sender.post_data(
config.POST_ENDPOINT.format(
connection_id=connections_done
),
data={
"position": "LEFT",
"offset": 180
}
)
)
if connections_done > 100:
break
if __name__ == "__main__":
send_data()
| 23.657143 | 50 | 0.48913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.068841 |
e43608fd33081461199e20cc093779ca67fd8543 | 132 | py | Python | pythonExercicios/ex014.py | Yhago-Carvalho/CursoPython | 343ccabb1a61e16c6078de9672c78c56deed2589 | [
"MIT"
] | null | null | null | pythonExercicios/ex014.py | Yhago-Carvalho/CursoPython | 343ccabb1a61e16c6078de9672c78c56deed2589 | [
"MIT"
] | null | null | null | pythonExercicios/ex014.py | Yhago-Carvalho/CursoPython | 343ccabb1a61e16c6078de9672c78c56deed2589 | [
"MIT"
] | null | null | null | c = float(input('Digite a temperatura em Ceusius: '))
f = (9*c + 160)/5
print(f'A temperatura de {c:.1f}ºC corresponde a {f:.1f}ºF') | 44 | 60 | 0.659091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.671642 |
e436ff03150d44e0196337e442c791322d057adb | 95 | py | Python | python/p287ex5.py | ThePeeps191/dmoj-solutions | 7137e945f3f595c481ad4d29e1dc3a77d8b26e55 | [
"MIT"
] | 1 | 2022-01-23T16:02:14.000Z | 2022-01-23T16:02:14.000Z | python/p287ex5.py | ThePeeps191/dmoj-solutions | 7137e945f3f595c481ad4d29e1dc3a77d8b26e55 | [
"MIT"
] | 5 | 2022-01-23T00:16:49.000Z | 2022-01-30T04:37:45.000Z | python/p287ex5.py | ThePeeps191/dmoj-solutions | 7137e945f3f595c481ad4d29e1dc3a77d8b26e55 | [
"MIT"
] | 1 | 2022-01-23T00:03:47.000Z | 2022-01-23T00:03:47.000Z | # not yet finished
for _ in range(int(input())):print(len(list(set(input().replace("-", ""))))) | 47.5 | 76 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.242105 |
e43c4d5552c855523479c4f6f4237cbc56d53955 | 906 | py | Python | tests/test_fitsutils.py | lsst-dm/despyfitsutils | 7fb96869077712eb20a1cb0f5c132e1cc85424ec | [
"NCSA"
] | null | null | null | tests/test_fitsutils.py | lsst-dm/despyfitsutils | 7fb96869077712eb20a1cb0f5c132e1cc85424ec | [
"NCSA"
] | null | null | null | tests/test_fitsutils.py | lsst-dm/despyfitsutils | 7fb96869077712eb20a1cb0f5c132e1cc85424ec | [
"NCSA"
] | null | null | null | import os
import unittest
import despyfitsutils.fitsutils as utils
TESTDIR = os.path.dirname(__file__)
class MefTest(unittest.TestCase):
"""Tests for a MEF object.
"""
def setUp(self):
inputs = [os.path.join(TESTDIR, 'data/input.fits.fz')]
output = os.path.join(TESTDIR, 'data/output.fits.fz')
# Instantiation of the class creates the output file (__init__()
# calls write()) so clobber must be set to True.
self.mef = utils.makeMEF(filenames=inputs, outname=output,
clobber=True)
def tearDown(self):
try:
os.remove(self.mef.outname)
except FileNotFoundError:
pass
def testRead(self):
self.mef.read()
self.assertEqual(len(self.mef.HDU), 1)
def testWrite(self):
self.mef.write()
self.assertTrue(os.path.isfile(self.mef.outname))
| 25.885714 | 72 | 0.611479 | 798 | 0.880795 | 0 | 0 | 0 | 0 | 0 | 0 | 187 | 0.206402 |
e43dacaa5bafcd52f175484e3b1f257816fb14b1 | 4,047 | py | Python | applications/MensajeriaMasiva/models/db.py | chitohugo/MassiveSMS | 05b528de146498531c967aff1ee4fe72720febb3 | [
"BSD-3-Clause"
] | null | null | null | applications/MensajeriaMasiva/models/db.py | chitohugo/MassiveSMS | 05b528de146498531c967aff1ee4fe72720febb3 | [
"BSD-3-Clause"
] | null | null | null | applications/MensajeriaMasiva/models/db.py | chitohugo/MassiveSMS | 05b528de146498531c967aff1ee4fe72720febb3 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from time import gmtime, strftime
from gluon.custom_import import track_changes
track_changes(True)
from gluon import current
from pydal import *
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
if request.global_settings.web2py_version < "2.14.1":
raise HTTP(500, "Requires web2py 2.13.3 or newer")
from gluon.contrib.appconfig import AppConfig
myconf = AppConfig(reload=True)
uri = "postgres://chito:yndrid@localhost/massivesms"
current.db = DAL(uri,pool_size=1, check_reserved=['all'], lazy_tables=False, migrate=False)
current.db.define_table('municipio',
Field('descripcion', type='string', length=20, required=True, notnull=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
)
current.db.define_table('cargo',
Field('descripcion', type='string', length=20, required=True, notnull=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
)
current.db.define_table('mun_cargo',
Field('fk_municipio', 'reference municipio'),
Field('fk_cargo', 'reference cargo'),
primarykey=['fk_municipio','fk_cargo'],
)
current.db.define_table('contacto',
Field('numero', type='string', length=11, required=True, notnull=True,unique=True,
requires=[IS_NOT_EMPTY(error_message=('Este campo no puede ser vacio'))]),
Field('fk_municipio_id', 'reference municipio',required=True),
Field('fk_cargo_id', 'reference cargo',required=True),
)
current.db.define_table('estado_mensaje',
Field('estado', length=1, required=True, notnull=True,default=1),
Field('estado_envio',length=1,required=True, notnull=True,default=1),
Field('fk_municipio_id', 'reference municipio',required=True),
Field('fk_cargo_id', 'reference cargo',required=True),
Field('destino',length=11,required=True, notnull=True),
Field('mensaje',length=160,required=True, notnull=True),
)
# -------------------------------------------------------------------------
response.generic_patterns = ['*'] if request.is_local else []
response.formstyle = myconf.get('forms.formstyle') # or 'bootstrap3_stacked' or 'bootstrap2' or other
response.form_label_separator = myconf.get('forms.separator') or ''
from gluon.tools import Auth, Service, PluginManager
# host names must be a list of allowed host names (glob syntax allowed)
auth = Auth(current.db, host_names=myconf.get('host.names'))
service = Service()
plugins = PluginManager()
# -------------------------------------------------------------------------
# create all tables needed by auth if not custom tables
# -------------------------------------------------------------------------
auth.define_tables(username=True, signature=False)
# -------------------------------------------------------------------------
# configure email
# -------------------------------------------------------------------------
mail = auth.settings.mailer
mail.settings.server = 'logging' if request.is_local else myconf.get('smtp.server')
mail.settings.sender = myconf.get('smtp.sender')
mail.settings.login = myconf.get('smtp.login')
mail.settings.tls = myconf.get('smtp.tls') or False
mail.settings.ssl = myconf.get('smtp.ssl') or False
# -------------------------------------------------------------------------
# configure auth policy
# -------------------------------------------------------------------------
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
| 45.988636 | 165 | 0.560909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,470 | 0.363232 |
e43dfd916520e80acf562c6592c0e2124190ae44 | 2,066 | py | Python | dibs/src/dibs_link.py | emin63/dibs | 419b2fad041aee40647429d3c1faac52c92c25a3 | [
"MIT"
] | null | null | null | dibs/src/dibs_link.py | emin63/dibs | 419b2fad041aee40647429d3c1faac52c92c25a3 | [
"MIT"
] | null | null | null | dibs/src/dibs_link.py | emin63/dibs | 419b2fad041aee40647429d3c1faac52c92c25a3 | [
"MIT"
] | null | null | null |
import os
if (os.name == 'nt' or os.name == 'dos'):
try:
from win32com.shell import shell
import pythoncom
except Exception, e:
print 'WARNING: Received exception ' + `e` + ' in doing import.'
print 'WARNING: Unable to import win32com.shell.shell, pythoncom.'
print 'WARNING: Symbolic links and Shortcuts will not work.'
def GetRealFilename( fileName ):
return fileName
def StripShortcutExtensionOnWindows( name ):
raise 'Refusing to deal with shortcut file without win32com stuff.'
from win32com.shell import shell
import pythoncom, os
class PyShortcut:
def __init__( self ):
self._base = pythoncom.CoCreateInstance(
shell.CLSID_ShellLink, None,
pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink)
def load( self, filename ):
# Get an IPersist interface
# which allows save/restore of object to/from files
self._base.QueryInterface( pythoncom.IID_IPersistFile ).Load(
filename )
def save( self, filename ):
self._base.QueryInterface( pythoncom.IID_IPersistFile ).Save(
filename, 0 )
def __getattr__( self, name ):
if name != "_base":
return getattr( self._base, name )
def GetRealFilename( fileName ):
L = len(fileName)
if (fileName[(L-4):L] == '.lnk'):
shortcut = PyShortcut()
shortcut.load(fileName)
return shortcut.GetPath(shell.SLGP_SHORTPATH)[0]
else:
return fileName
def StripShortcutExtensionOnWindows( fileName ):
L = len(fileName)
if (fileName[(L-4):L] == '.lnk'):
return fileName[0:(L-4)]
else:
return fileName
else:
def GetRealFilename( fileName ):
return os.path.realpath(fileName)
def StripShortcutExtensionOnWindows( name ):
return name
| 34.433333 | 80 | 0.57696 | 733 | 0.354792 | 0 | 0 | 0 | 0 | 0 | 0 | 335 | 0.162149 |
e43f5553851f44ad5911378e9d31bfdce168b90d | 1,207 | py | Python | rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py | psiyan/rfid | 401a093958ffafdcd10259cc9e19b7bd9f0c0e8c | [
"Apache-2.0"
] | null | null | null | rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py | psiyan/rfid | 401a093958ffafdcd10259cc9e19b7bd9f0c0e8c | [
"Apache-2.0"
] | null | null | null | rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py | psiyan/rfid | 401a093958ffafdcd10259cc9e19b7bd9f0c0e8c | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-28 08:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pigeon', '0002_auto_20160327_0723'),
]
operations = [
migrations.AlterField(
model_name='scroll',
name='pubDate',
field=models.DateTimeField(blank=True, null=True, verbose_name='date published'),
),
migrations.AlterField(
model_name='scroll',
name='scrollFrom',
field=models.CharField(max_length=80, verbose_name='from'),
),
migrations.AlterField(
model_name='scroll',
name='scrollID',
field=models.CharField(max_length=40, verbose_name='RFID'),
),
migrations.AlterField(
model_name='scroll',
name='scrollMessage',
field=models.CharField(max_length=1024, verbose_name='message'),
),
migrations.AlterField(
model_name='scroll',
name='scrollTo',
field=models.CharField(max_length=80, verbose_name='to'),
),
]
| 29.439024 | 93 | 0.584093 | 1,050 | 0.869925 | 0 | 0 | 0 | 0 | 0 | 0 | 240 | 0.19884 |
e43fd711dcd86e63949520216ee91e975352e431 | 10,839 | py | Python | esp8266/main.py | 0xSebin/SwimTime.github.io | e2d997464d1f4a36783638c81307a775cdfa7fcd | [
"MIT"
] | 1 | 2021-03-28T16:24:23.000Z | 2021-03-28T16:24:23.000Z | esp8266/main.py | 5ebin-thomas/SwimTime.github.io | e2d997464d1f4a36783638c81307a775cdfa7fcd | [
"MIT"
] | null | null | null | esp8266/main.py | 5ebin-thomas/SwimTime.github.io | e2d997464d1f4a36783638c81307a775cdfa7fcd | [
"MIT"
] | 2 | 2018-02-15T17:27:34.000Z | 2019-11-20T10:00:43.000Z | """
Group -
SwimTime - Swim your way to success
"""
import ads1x15
import network
import time
import math
import machine
from umqtt.simple import MQTTClient
import micropython
from micropython import const
from machine import Pin
"""
Define constant values
"""
run = False
lapnr = 3 #default lap number
temp = 0.0
wifi_ssid = "Alfabeta"
wifi_pswd = "12345678"
server = "io.adafruit.com"
user = "kk2314"
passwd = "674d8794c84d49008c5e0092dc6be24b"
mqtt_temp = "kk2314/feeds/temp"
mqtt_time = "kk2314/feeds/time"
mqtt_rawdata = "kk2314/feeds/rawdata"
mqtt_control = "kk2314/feeds/control"
mqtt_stat = "kk2314/feeds/stat"
mqtt_debug = "kk2314/feeds/debug"
mqtt_tempalert = "kk2314/feeds/tempalert"
"""
Define pins for LED and buzzer
"""
red = Pin(0, Pin.OUT)
blue = Pin(2, Pin.OUT)
p12 = machine.Pin(12)
buzz = machine.PWM(p12)
#function to blink LED
def blink_LED(colour):
colour.off()
time.sleep_ms(50)
colour.on()
time.sleep_ms(50)
#setting up I2C for range finder/ set up ADC
i2c = machine.I2C(scl=machine.Pin(5), sda=machine.Pin(4), freq=100000)
adc = ads1x15.ADS1115(i2c)
adc.gain = 1 #ADS1015_REG_CONFIG_PGA_4_096V
#setting up I2C for temp sens
i2c_temp = machine.I2C(scl=machine.Pin(14), sda=machine.Pin(13), freq=100000)
#Received messages from subscriptions will be delivered to this callback
def sub_cb(topic, msg):
global state
global run
global lapnr
global temp
print((topic, msg))
#Check for messages only for the control topic
if topic == b"kk2314/feeds/control":
if msg == b"start":
run = True
elif msg.decode() == "temp":
get_temp()
payload_temp = "{}".format(temp)
c.publish(mqtt_temp,payload_temp)
print(temp)
else:
lapnr = int(msg)
print(lapnr)
"""
Connect to the wifi
"""
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
sta_if.scan()
sta_if.connect(wifi_ssid, wifi_pswd)
print('Connecting to Wi-Fi')
#while connecting blink LED and wait
while not sta_if.isconnected():
blink_LED(red)
pass
print('Wifi connected')
#Turn red LED on (active-low)
red.off()
# Turn off ESP8266's AP
ap_if = network.WLAN(network.AP_IF)
ap_if.active(False)
#Converts the data received from ultrasonic sensor into meters
def convert(data):
global distance
distance = data/10000
distance = distance/0.000976562 #vcc/(1025*5)
distance = (distance/1000)+0.16 #distance offset
#Send a read request and read information of temp sensor as well as convert temp into degree celcius
def get_temp():
global temp
i2c_temp.writeto(0x40, bytearray([0xf3]))
time.sleep(0.5)
data=i2c_temp.readfrom(0x40, 2)
tempraw=int.from_bytes(data, "big")
temp = 175.72 * tempraw / 65536
temp = temp - 46.85
#sets up the buzzer to run a countdown composed of 3 short beeps and a long one
def countdown():
count = 0
freq = 300
while count < 3:
buzz.freq(400)
buzz.duty(512)
time.sleep(0.7)
buzz.duty(1023)
time.sleep(0.7)
count = count + 1
buzz.freq(500)
buzz.duty(512)
time.sleep(1.25)
buzz.duty(1023)
#converts secs into min and seconds
def format(sec):
sec = sec/1000
mins, secs = divmod( sec, 60)
secs = round(secs, 3)
return (mins, secs)
#main() function which executes sensing and mqtt push
def main(server):
global run
global lapnr
global nr
global c
global mqttConnected
"""
Defines which client to connect to.
Using adafruit.io broker requires authentification
so we also set username and password
"""
c = MQTTClient("Sensor boards", server, user = user, password = passwd)
c.set_callback(sub_cb)
#sets flag for mqtt connected
if c.connect() == False:
mqttConnected = True
print('MQTT Connected')
#subscribe to the topic where controls are received
c.subscribe("kk2314/feeds/control")
while True:
if True:
c.wait_msg() #blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
else:
c.check_msg() #non-blocking check for message
#start timing laps
if run == True:
#reset the run flag
run = False
#do countdown
countdown()
c.publish(mqtt_debug,"Started countdown")
#start timer
start = time.ticks_ms()
c.publish(mqtt_debug,"Timer started")
print("go")
#wait for user to go away from sensor
time.sleep(5)
#resets statistical variables every beginning of run
lap_index = 0
best_lap= 0
avr_lap = 0
total_time= 0
worst_lap = 0
#main while loop which continues until lapnr goes to 0
while lapnr > 0:
blink_LED(blue)
data = adc.read(0)
convert(data)
#if sensor detects object within threshold it times a lap
if distance < 0.80:
lap_time_raw = time.ticks_diff(time.ticks_ms(), start)
#reset time measure
start = time.ticks_ms()
c.publish(mqtt_debug, "Lap end detected")
lap_index = lap_index + 1
total_time = total_time + lap_time_raw
#check if the lap is the slowest
if lap_time_raw > worst_lap:
worst_lap = lap_time_raw
worst_index = lap_index
#update average lap_time
avr_lap = total_time/lap_index
#check if lap is the fastest
if lap_index == 1:
best_lap = lap_time_raw
best_index = 1
elif lap_time_raw < best_lap:
best_lap = lap_time_raw
best_index = lap_index
#format all the statistical values in mins, secs
mins_av, secs_av = format(avr_lap)
mins_bs, secs_bs = format(best_lap)
mins_ws, secs_ws = format(worst_lap)
mins_to, secs_to = format(total_time)
mins, secs = format(lap_time_raw)
#read current temp
get_temp()
#send alert if temperature is outside ideal range
if temp > 21 and temp < 29:
c.publish(mqtt_tempalert, "Temperature is ideal for a splash, Happy Swimming!")
elif temp < 21:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too low)")
elif temp > 29:
c.publish(mqtt_tempalert, "Careful! We have detected temperature is outside ideal range (Too high)")
#encode all data to JSON - manually to save memory
payload_temp = "{}".format(temp)
payload = " Lap number {} was: {} m {} s. ".format( lap_index, mins, secs)
payload_raw = "{}".format(lap_time_raw/1000)
payload_stat_av = "Average lap time is : {} m {} s ".format(mins_av,secs_av)
payload_stat_bs = "Best lap was lap number {} : {} m {} s ".format(best_index,mins_bs,secs_bs)
payload_stat_ws = "Worst lap was lap number {} : {} m {} s ".format(worst_index,mins_ws,secs_ws)
payload_stat_to = "Total time is : {} m {} s ".format(mins_to,secs_to)
#publish converted and raw data to mqtt broker
c.publish(mqtt_time,payload)
c.publish(mqtt_rawdata, payload_raw)
c.publish(mqtt_temp,payload_temp)
c.publish(mqtt_stat,payload_stat_av)
c.publish(mqtt_stat,payload_stat_bs)
c.publish(mqtt_stat,payload_stat_ws)
c.publish(mqtt_stat,payload_stat_to)
c.publish(mqtt_debug, "Data published successfully")
lapnr = lapnr - 1
#wait for 10 sec for object to get out of range of sensor
if lapnr != 0:
time.sleep(10)
c.publish(mqtt_debug, "Done with current run") #debug messages
c.disconnect()
if __name__ == "__main__":
main(server) | 28.448819 | 110 | 0.658456 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,863 | 0.356398 |
e44176bdde09e0e534875279d12d7f2e7e878bfb | 40,102 | py | Python | pyboto3/workdocs.py | thecraftman/pyboto3 | 653a0db2b00b06708334431da8f169d1f7c7734f | [
"MIT"
] | null | null | null | pyboto3/workdocs.py | thecraftman/pyboto3 | 653a0db2b00b06708334431da8f169d1f7c7734f | [
"MIT"
] | null | null | null | pyboto3/workdocs.py | thecraftman/pyboto3 | 653a0db2b00b06708334431da8f169d1f7c7734f | [
"MIT"
] | null | null | null | '''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def abort_document_version_upload(DocumentId=None, VersionId=None):
"""
Aborts the upload of the specified document version that was previously initiated by InitiateDocumentVersionUpload . The client should make this call only when it no longer intends or fails to upload the document version.
See also: AWS API Documentation
:example: response = client.abort_document_version_upload(
DocumentId='string',
VersionId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The ID of the version.
"""
pass
def activate_user(UserId=None):
"""
Activates the specified user. Only active users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.activate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def add_resource_permissions(ResourceId=None, Principals=None):
"""
Creates a set of permissions for the specified folder or document. The resource permissions are overwritten if the principals already have different permissions.
See also: AWS API Documentation
:example: response = client.add_resource_permissions(
ResourceId='string',
Principals=[
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER'
},
]
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Principals: list
:param Principals: [REQUIRED]
The users, groups, or organization being granted permission.
(dict) --Describes the recipient type and ID, if available.
Id (string) -- [REQUIRED]The ID of the recipient.
Type (string) -- [REQUIRED]The type of the recipient.
Role (string) -- [REQUIRED]The role of the recipient.
:rtype: dict
:return: {
'ShareResults': [
{
'PrincipalId': 'string',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Status': 'SUCCESS'|'FAILURE',
'ShareId': 'string',
'StatusMessage': 'string'
},
]
}
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def create_folder(Name=None, ParentFolderId=None):
"""
Creates a folder with the specified name and parent folder.
See also: AWS API Documentation
:example: response = client.create_folder(
Name='string',
ParentFolderId='string'
)
:type Name: string
:param Name: The name of the new folder.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def create_notification_subscription(OrganizationId=None, Endpoint=None, Protocol=None, SubscriptionType=None):
"""
Configure WorkDocs to use Amazon SNS notifications.
The endpoint receives a confirmation message, and must confirm the subscription. For more information, see Confirm the Subscription in the Amazon Simple Notification Service Developer Guide .
See also: AWS API Documentation
:example: response = client.create_notification_subscription(
OrganizationId='string',
Endpoint='string',
Protocol='HTTPS',
SubscriptionType='ALL'
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Endpoint: string
:param Endpoint: [REQUIRED]
The endpoint to receive the notifications. If the protocol is HTTPS, the endpoint is a URL that begins with 'https://'.
:type Protocol: string
:param Protocol: [REQUIRED]
The protocol to use. The supported value is https, which delivers JSON-encoded messasges using HTTPS POST.
:type SubscriptionType: string
:param SubscriptionType: [REQUIRED]
The notification type.
:rtype: dict
:return: {
'Subscription': {
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
}
}
"""
pass
def create_user(OrganizationId=None, Username=None, GivenName=None, Surname=None, Password=None, TimeZoneId=None, StorageRule=None):
"""
Creates a user in a Simple AD or Microsoft AD directory. The status of a newly created user is "ACTIVE". New users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.create_user(
OrganizationId='string',
Username='string',
GivenName='string',
Surname='string',
Password='string',
TimeZoneId='string',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type Username: string
:param Username: [REQUIRED]
The login name of the user.
:type GivenName: string
:param GivenName: [REQUIRED]
The given name of the user.
:type Surname: string
:param Surname: [REQUIRED]
The surname of the user.
:type Password: string
:param Password: [REQUIRED]
The password of the user.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def deactivate_user(UserId=None):
"""
Deactivates the specified user, which revokes the user's access to Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.deactivate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def delete_document(DocumentId=None):
"""
Permanently deletes the specified document and its associated metadata.
See also: AWS API Documentation
:example: response = client.delete_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
"""
pass
def delete_folder(FolderId=None):
"""
Permanently deletes the specified folder and its contents.
See also: AWS API Documentation
:example: response = client.delete_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_folder_contents(FolderId=None):
"""
Deletes the contents of the specified folder.
See also: AWS API Documentation
:example: response = client.delete_folder_contents(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_notification_subscription(SubscriptionId=None, OrganizationId=None):
"""
Deletes the specified subscription from the specified organization.
See also: AWS API Documentation
:example: response = client.delete_notification_subscription(
SubscriptionId='string',
OrganizationId='string'
)
:type SubscriptionId: string
:param SubscriptionId: [REQUIRED]
The ID of the subscription.
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
"""
pass
def delete_user(UserId=None):
"""
Deletes the specified user from a Simple AD or Microsoft AD directory.
See also: AWS API Documentation
:example: response = client.delete_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def describe_document_versions(DocumentId=None, Marker=None, Limit=None, Include=None, Fields=None):
"""
Retrieves the document versions for the specified document.
By default, only active versions are returned.
See also: AWS API Documentation
:example: response = client.describe_document_versions(
DocumentId='string',
Marker='string',
Limit=123,
Include='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of versions to return with this call.
:type Include: string
:param Include: A comma-separated list of values. Specify 'INITIALIZED' to include incomplete versions.
:type Fields: string
:param Fields: Specify 'SOURCE' to include initialized versions and a URL for the source document.
:rtype: dict
:return: {
'DocumentVersions': [
{
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_folder_contents(FolderId=None, Sort=None, Order=None, Limit=None, Marker=None, Type=None, Include=None):
"""
Describes the contents of the specified folder, including its documents and sub-folders.
By default, Amazon WorkDocs returns the first 100 active document and folder metadata items. If there are more results, the response includes a marker that you can use to request the next set of results. You can also request initialized documents.
See also: AWS API Documentation
:example: response = client.describe_folder_contents(
FolderId='string',
Sort='DATE'|'NAME',
Order='ASCENDING'|'DESCENDING',
Limit=123,
Marker='string',
Type='ALL'|'DOCUMENT'|'FOLDER',
Include='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Sort: string
:param Sort: The sorting criteria.
:type Order: string
:param Order: The order for the contents of the folder.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Type: string
:param Type: The type of items.
:type Include: string
:param Include: The contents to include. Specify 'INITIALIZED' to include initialized documents.
:rtype: dict
:return: {
'Folders': [
{
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
},
],
'Documents': [
{
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_notification_subscriptions(OrganizationId=None, Marker=None, Limit=None):
"""
Lists the specified notification subscriptions.
See also: AWS API Documentation
:example: response = client.describe_notification_subscriptions(
OrganizationId='string',
Marker='string',
Limit=123
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:rtype: dict
:return: {
'Subscriptions': [
{
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
},
],
'Marker': 'string'
}
"""
pass
def describe_resource_permissions(ResourceId=None, Limit=None, Marker=None):
"""
Describes the permissions of a specified resource.
See also: AWS API Documentation
:example: response = client.describe_resource_permissions(
ResourceId='string',
Limit=123,
Marker='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call)
:rtype: dict
:return: {
'Principals': [
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Roles': [
{
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Type': 'DIRECT'|'INHERITED'
},
]
},
],
'Marker': 'string'
}
"""
pass
def describe_users(OrganizationId=None, UserIds=None, Query=None, Include=None, Order=None, Sort=None, Marker=None, Limit=None, Fields=None):
"""
Describes the specified users. You can describe all users or filter the results (for example, by status or organization).
By default, Amazon WorkDocs returns the first 24 active or pending users. If there are more results, the response includes a marker that you can use to request the next set of results.
See also: AWS API Documentation
:example: response = client.describe_users(
OrganizationId='string',
UserIds='string',
Query='string',
Include='ALL'|'ACTIVE_PENDING',
Order='ASCENDING'|'DESCENDING',
Sort='USER_NAME'|'FULL_NAME'|'STORAGE_LIMIT'|'USER_STATUS'|'STORAGE_USED',
Marker='string',
Limit=123,
Fields='string'
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type UserIds: string
:param UserIds: The IDs of the users.
:type Query: string
:param Query: A query to filter users by user name.
:type Include: string
:param Include: The state of the users. Specify 'ALL' to include inactive users.
:type Order: string
:param Order: The order for the results.
:type Sort: string
:param Sort: The sorting criteria.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'STORAGE_METADATA' to include the user storage quota and utilization information.
:rtype: dict
:return: {
'Users': [
{
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
},
],
'TotalNumberOfUsers': 123,
'Marker': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_document(DocumentId=None):
"""
Retrieves the specified document object.
See also: AWS API Documentation
:example: response = client.get_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document object.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
}
}
:returns:
(string) --
(string) --
"""
pass
def get_document_path(DocumentId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the requested document.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested document and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the names of the parent folders.
See also: AWS API Documentation
:example: response = client.get_document_path(
DocumentId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_document_version(DocumentId=None, VersionId=None, Fields=None):
"""
Retrieves version metadata for the specified document.
See also: AWS API Documentation
:example: response = client.get_document_version(
DocumentId='string',
VersionId='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'SOURCE' to include a URL for the source document.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def get_folder(FolderId=None):
"""
Retrieves the metadata of the specified folder.
See also: AWS API Documentation
:example: response = client.get_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def get_folder_path(FolderId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the specified folder.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested folder and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the parent folder names.
See also: AWS API Documentation
:example: response = client.get_folder_path(
FolderId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_waiter():
"""
"""
pass
def initiate_document_version_upload(Id=None, Name=None, ContentCreatedTimestamp=None, ContentModifiedTimestamp=None, ContentType=None, DocumentSizeInBytes=None, ParentFolderId=None):
"""
Creates a new document object and version object.
The client specifies the parent folder ID and name of the document to upload. The ID is optionally specified when creating a new version of an existing document. This is the first step to upload a document. Next, upload the document to the URL returned from the call, and then call UpdateDocumentVersion .
To cancel the document upload, call AbortDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.initiate_document_version_upload(
Id='string',
Name='string',
ContentCreatedTimestamp=datetime(2015, 1, 1),
ContentModifiedTimestamp=datetime(2015, 1, 1),
ContentType='string',
DocumentSizeInBytes=123,
ParentFolderId='string'
)
:type Id: string
:param Id: The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ContentCreatedTimestamp: datetime
:param ContentCreatedTimestamp: The time stamp when the content of the document was originally created.
:type ContentModifiedTimestamp: datetime
:param ContentModifiedTimestamp: The time stamp when the content of the document was modified.
:type ContentType: string
:param ContentType: The content type of the document.
:type DocumentSizeInBytes: integer
:param DocumentSizeInBytes: The size of the document, in bytes.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
'UploadMetadata': {
'UploadUrl': 'string',
'SignedHeaders': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def remove_all_resource_permissions(ResourceId=None):
"""
Removes all the permissions from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_all_resource_permissions(
ResourceId='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
"""
pass
def remove_resource_permission(ResourceId=None, PrincipalId=None, PrincipalType=None):
"""
Removes the permission for the specified principal from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_resource_permission(
ResourceId='string',
PrincipalId='string',
PrincipalType='USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type PrincipalId: string
:param PrincipalId: [REQUIRED]
The principal ID of the resource.
:type PrincipalType: string
:param PrincipalType: The principal type of the resource.
"""
pass
def update_document(DocumentId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified document. The user must have access to both the document and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_document(
DocumentId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the document. Note that only ACTIVE and RECYCLED are supported.
"""
pass
def update_document_version(DocumentId=None, VersionId=None, VersionStatus=None):
"""
Changes the status of the document version to ACTIVE.
Amazon WorkDocs also sets its document container to ACTIVE. This is the last step in a document upload, after the client uploads the document to an S3-presigned URL returned by InitiateDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.update_document_version(
DocumentId='string',
VersionId='string',
VersionStatus='ACTIVE'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type VersionStatus: string
:param VersionStatus: The status of the version.
"""
pass
def update_folder(FolderId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified folder. The user must have access to both the folder and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_folder(
FolderId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Name: string
:param Name: The name of the folder.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the folder. Note that only ACTIVE and RECYCLED are accepted values from the API.
"""
pass
def update_user(UserId=None, GivenName=None, Surname=None, Type=None, StorageRule=None, TimeZoneId=None, Locale=None):
"""
Updates the specified attributes of the specified user, and grants or revokes administrative privileges to the Amazon WorkDocs site.
See also: AWS API Documentation
:example: response = client.update_user(
UserId='string',
GivenName='string',
Surname='string',
Type='USER'|'ADMIN',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
},
TimeZoneId='string',
Locale='en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:type GivenName: string
:param GivenName: The given name of the user.
:type Surname: string
:param Surname: The surname of the user.
:type Type: string
:param Type: The type of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type Locale: string
:param Locale: The locale of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
| 29.143895 | 310 | 0.573039 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37,179 | 0.927111 |
e4423151d9e155eac596c2c27348cae0215b843a | 983 | py | Python | binding/python/ddls/feeder/feeder.py | huzelin/ddls | 3333a669c59ce2e525945f814a54784dafc6191b | [
"MIT"
] | 3 | 2019-01-03T07:34:01.000Z | 2020-02-13T19:53:35.000Z | binding/python/ddls/feeder/feeder.py | huzelin/ddls | 3333a669c59ce2e525945f814a54784dafc6191b | [
"MIT"
] | null | null | null | binding/python/ddls/feeder/feeder.py | huzelin/ddls | 3333a669c59ce2e525945f814a54784dafc6191b | [
"MIT"
] | 1 | 2020-05-06T11:08:07.000Z | 2020-05-06T11:08:07.000Z | """ Feeder for batch production"""
from __future__ import absolute_import
import ctypes
from ddls.base import check_call, LIB, c_str, c_array
from ddls.feeder.batch_iterator import BatchIterator
class Feeder(object):
""" The feeder
"""
def __init__(self):
""" create a new Tensor instance
"""
pass
def start(self, thread_num):
""" Start feeder,
Note: now must called after schedule, for dynamic scheduleing should fix it.
"""
check_call(LIB.HPPS_FeederStart(thread_num))
def schedule(self, plan, max_queue_size = 1):
""" Schedule the plan
"""
out = ctypes.c_void_p()
check_call(LIB.HPPS_FeederSchedule(plan.handle,
max_queue_size,
ctypes.byref(out)))
return BatchIterator(out)
def stop(self):
""" Stop feeder
"""
check_call(LIB.HPPS_FeederStop())
| 27.305556 | 84 | 0.580875 | 784 | 0.797558 | 0 | 0 | 0 | 0 | 0 | 0 | 274 | 0.278739 |
e44339ec7d8d98173878c5ddc15f39e511c628ec | 258 | py | Python | tests/test_example.py | akoul1/mvlearn | 177d391bb12c6e94335720d9af3608bd719d8be1 | [
"Apache-2.0"
] | null | null | null | tests/test_example.py | akoul1/mvlearn | 177d391bb12c6e94335720d9af3608bd719d8be1 | [
"Apache-2.0"
] | null | null | null | tests/test_example.py | akoul1/mvlearn | 177d391bb12c6e94335720d9af3608bd719d8be1 | [
"Apache-2.0"
] | null | null | null | import pytest
from mvlearn.example.example import example_function
def test_example_function():
"""
Test that example function returns correct value.
"""
assert example_function() == "param"
assert example_function("hello") == "hello"
| 21.5 | 53 | 0.713178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 86 | 0.333333 |
e443a35a02a890811a35899fe38cc7d3bb4c7d5c | 2,155 | py | Python | api/resources/resources.py | arkhn/fhirball-server | b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e | [
"Apache-2.0"
] | 5 | 2018-12-21T13:20:12.000Z | 2019-11-20T23:58:06.000Z | api/resources/resources.py | arkhn/fhir-ball-server | b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e | [
"Apache-2.0"
] | null | null | null | api/resources/resources.py | arkhn/fhir-ball-server | b4d1a1c29dfff5ba60bfbb6b291f6bdb6e6ccd6e | [
"Apache-2.0"
] | null | null | null | from flask_restful import Resource
import requests
from api.common.utils import file_response
ENCODING = 'utf-8'
SCHEMA_URL = 'http://127.0.0.1:8422'
STORE_URL = 'http://127.0.0.1:8423'
class FhirDatatypes(Resource):
@staticmethod
def get():
"""Returns CSV list of available database schemas."""
content = requests.get('{}/datatypes.json'.format(
STORE_URL
)).content.decode(ENCODING)
return file_response(content, 'json')
class FhirResources(Resource):
@staticmethod
def get():
"""Returns CSV list of available database schemas."""
content = requests.get('{}/resource_list.json'.format(
STORE_URL
)).content.decode(ENCODING)
return file_response(content, 'json')
class FhirResource(Resource):
@staticmethod
def get(resource_name):
"""Returns CSV list of available database schemas."""
content = requests.get('{}/fhirResources/{}.json'.format(
STORE_URL,
resource_name
)).content.decode(ENCODING)
return file_response(content, 'json')
class Schemas(Resource):
@staticmethod
def get():
"""Returns CSV list of available database schemas."""
content = requests.get('{}/databases.json'.format(
SCHEMA_URL
)).content.decode(ENCODING)
return file_response(content, 'json')
class Schema(Resource):
@staticmethod
def get(database_name, extension):
"""Fetches distant file and parses it according to its extension."""
content = requests.get('{}/{}.{}'.format(
SCHEMA_URL,
database_name,
extension
)).content.decode(ENCODING)
return file_response(content, extension)
class Store(Resource):
@staticmethod
def get(resource_name, extension):
"""Fetches distant file from Store and parses it according to its extension."""
content = requests.get('{}/{}.{}'.format(
STORE_URL,
resource_name,
extension
)).content.decode(ENCODING)
return file_response(content, extension)
| 24.770115 | 87 | 0.624594 | 1,948 | 0.903944 | 0 | 0 | 1,760 | 0.816705 | 0 | 0 | 543 | 0.251972 |
e445d667e0d2518eeb5e300fca8baeaa532b0501 | 427 | py | Python | t_mongo.py | iloghyr/easy_python | b750f6817d54562b23630e2419bace19da0abf8b | [
"Apache-2.0"
] | 1 | 2018-03-01T02:42:52.000Z | 2018-03-01T02:42:52.000Z | t_mongo.py | iloghyr/easy_python | b750f6817d54562b23630e2419bace19da0abf8b | [
"Apache-2.0"
] | null | null | null | t_mongo.py | iloghyr/easy_python | b750f6817d54562b23630e2419bace19da0abf8b | [
"Apache-2.0"
] | null | null | null | #!/bin/env python
#coding: utf-8
import pymongo
print [x for x in range(2)]
con = pymongo.MongoClient("localhost", 27017)
db = con.mars
collection = db.users
data = collection.find_one({"username":"hyr"})
print data
data['age'] = 225
print collection.update({"_idd":data['_id']}, data)
print collection.find_one({"username":"hyr"})
# for i in collection.find().sort('_id', pymongo.DESCENDING).limit(1):
# print i
| 17.08 | 70 | 0.683841 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.400468 |
e4466c3b9ecc29dbb105b55c4d10907897f3d25c | 742 | py | Python | ArtificialData/RhoAndBeta.py | AlfLobos/DSP | 1e1073c6b0da562b0aea3dec9d62bc563a3b46f5 | [
"CNRI-Python"
] | null | null | null | ArtificialData/RhoAndBeta.py | AlfLobos/DSP | 1e1073c6b0da562b0aea3dec9d62bc563a3b46f5 | [
"CNRI-Python"
] | null | null | null | ArtificialData/RhoAndBeta.py | AlfLobos/DSP | 1e1073c6b0da562b0aea3dec9d62bc563a3b46f5 | [
"CNRI-Python"
] | null | null | null | import numpy as np
def CalcRhoAndBetaVectors(bid_vec, UB_bid, num_edges, index_Imps, adverPerImp, firstPrice):
## I will assume I want to evaluate the full vector.
rhoBetaMat=np.zeros((num_edges,2))
for edge_num,impType in enumerate(index_Imps):
rhoBetaMat[edge_num,:]=RhoBetaValue(bid_vec[edge_num], UB_bid[impType],\
adverPerImp[impType], firstPrice)
return [rhoBetaMat[:,0],rhoBetaMat[:,1]]
def CalcBeta(bid, num_adv, firstPrice):
if firstPrice:
return bid
else:
return (num_adv/(num_adv+1.0)) * bid
def RhoBetaValue(bid, ub, n, firstPrice):
## For rho_beta_Type=0, args[0]=adv
rho = np.power((bid/ub),n)
beta = CalcBeta(bid, n, firstPrice)
return [rho, beta] | 35.333333 | 91 | 0.677898 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 87 | 0.117251 |
e44985df33485739c9a738d44c1ed72af3c01cd0 | 3,208 | py | Python | src/utils/greedy.py | vmgabriel/tabu-base | 615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd | [
"Apache-2.0"
] | null | null | null | src/utils/greedy.py | vmgabriel/tabu-base | 615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd | [
"Apache-2.0"
] | null | null | null | src/utils/greedy.py | vmgabriel/tabu-base | 615c45e4d6b6fdb1c85c8fbaa316a1e6ce829fcd | [
"Apache-2.0"
] | null | null | null | """
Greedy Module Solution for Utils control
"""
# Libraries
from typing import List
from functools import reduce
# Modules
from src.utils.math import (
list_negative,
invert_positions,
evaluate_fo
)
# Constants
COMPARE_VALUE = 99999999
def worst_solution(distance_matrix: List[List[float]]) -> List[int]:
"""This generate the worst solution"""
negative_matrix = list(map(
list_negative,
distance_matrix
))
return neghbord_most_near(negative_matrix)
def neghbord_most_near(
distance_matrix: List[List[float]],
start_city: int = 0
) -> List[int]:
"""
get the city most near in distance
"""
neghbord_used = [start_city]
def city_most_near(line: int) -> int:
"""
Get City most near
"""
compare_value = COMPARE_VALUE
most_near = -1
for key, value in enumerate(distance_matrix[line]):
if (
line != key and
value < compare_value and
key not in neghbord_used
):
compare_value = value
most_near = key
neghbord_used.append(most_near)
return most_near
return list(map(
lambda x: city_most_near(x) if x != start_city else start_city,
range(len(distance_matrix))
))
def best_change_not_tabu(
matrix_distance: List[List[float]],
solution: List[int]
) -> (float, tuple):
"""
change the data for best change based into function objective
matrix_distance: List[List[float]] -> Matrix of distances
solution: List[int] -> all solutions
return (float, (posx, posy)) -> the best solution into position
"""
# fun_before = evaluate_fo(matrix_distance, solution)
best_fo = 1E+100
position = (-1, -1)
tam = len(solution)
for posx in range(tam-1):
for posy in range(posx+1 if posx+1 != tam else tam, tam):
funobj = evaluate_fo(
matrix_distance,
invert_positions(solution, posx, posy)
)
if funobj < best_fo:
best_fo = funobj
position = (posx, posy)
return (best_fo, position)
def generate_local_search(
matrix_distance: List[List[float]],
solution: List[int]
) -> (int, List[int]):
"""
This generate a local search for the minize way based in fo
matrix_distance: List[List[float]]
"""
counter = 0
manage = True
best_change = best_change_not_tabu(matrix_distance, solution)
prev_change = (1E+100,)
while manage:
if prev_change[0] < best_change[0]:
manage = False
else:
prev_change = best_change
best_change = best_change_not_tabu(matrix_distance, solution)
solution = invert_positions(
solution,
origin=best_change[1][0],
destiny=best_change[1][1]
)
counter += 1
return (
counter,
(
prev_change[0]
if prev_change[0] < best_change[0] and
prev_change[0] != 0
else best_change[0]
),
solution
)
| 25.870968 | 73 | 0.5798 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 625 | 0.194825 |
e45010e55211f1d8b353af0fb64ccf62757ae1c3 | 5,649 | py | Python | codes/models/modules/Inv_arch.py | lin-zhao-resoLve/Symmetric-Enhancement | 11c1a662020582d1333d11cf5f9c99556ec0f427 | [
"Apache-2.0"
] | 14 | 2021-09-30T07:05:04.000Z | 2022-03-31T08:22:39.000Z | codes/models/modules/Inv_arch.py | lin-zhao-resoLve/Symmetric-Enhancement | 11c1a662020582d1333d11cf5f9c99556ec0f427 | [
"Apache-2.0"
] | 3 | 2021-11-09T06:52:13.000Z | 2021-11-20T08:00:46.000Z | codes/models/modules/Inv_arch.py | lin-zhao-resoLve/Symmetric-Enhancement | 11c1a662020582d1333d11cf5f9c99556ec0f427 | [
"Apache-2.0"
] | null | null | null | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from models.modules.model.vgg16 import Vgg16
import os
vgg = Vgg16()
vgg.load_state_dict(torch.load(os.path.join(os.path.abspath('.'), 'models/modules/model/', 'vgg16.weight')))
params = list(vgg.named_parameters())
encoding1 = params[0][1].data
encoding2 = params[2][1].data
class InvBlockExp(nn.Module):
def __init__(self, subnet_constructor, channel_num, channel_split_num, clamp=1.):
super(InvBlockExp, self).__init__()
self.split_len1 = channel_split_num
self.split_len2 = channel_num - channel_split_num
self.clamp = clamp
self.F = subnet_constructor(self.split_len2, self.split_len1)
self.G = subnet_constructor(self.split_len1, self.split_len2)
self.H = subnet_constructor(self.split_len1, self.split_len2)
def forward(self, x, rev=False):
x1, x2 = (x.narrow(1, 0, self.split_len1), x.narrow(1, self.split_len1, self.split_len2))
if not rev:
y1 = x1 + self.F(x2)
self.s = self.clamp * (torch.sigmoid(self.H(y1)) * 2 - 1)
y2 = x2.mul(torch.exp(self.s)) + self.G(y1)
else:
self.s = self.clamp * (torch.sigmoid(self.H(x1)) * 2 - 1)
y2 = (x2 - self.G(x1)).div(torch.exp(self.s))
y1 = x1 - self.F(y2)
return torch.cat((y1, y2), 1)
def jacobian(self, x, rev=False):
if not rev:
jac = torch.sum(self.s)
else:
jac = -torch.sum(self.s)
return jac / x.shape[0]
# class encoder(nn.Module):
# def __init__(self, in_channels, out_channels, num_features):
# super(encoder, self).__init__()
# stride = 1
# padding = 1
# kernel_size = 3
# self.conv1 = nn.Conv2d(in_channels, 2*num_features, kernel_size, stride=stride, padding=padding)
# self.conv2 = nn.Conv2d(2*num_features, num_features, kernel_size, stride=stride, padding=padding)
# self.conv3 = nn.Conv2d(num_features, out_channels, kernel_size=1, stride=1)
# self.prelu = nn.PReLU(num_parameters=1, init=0.2)
#
# def forward(self, x, rev=False):
# x1 = self.prelu(self.conv1(x))
# x2 = self.prelu(self.conv2(x1))
# x3 = self.prelu(self.conv3(x2))
# return x3
class Downsampling(nn.Module):
def __init__(self, channel_in):
super(Downsampling, self).__init__()
self.channel_in = channel_in
self.haar_weights1 = encoding1
self.haar_weights1 = nn.Parameter(self.haar_weights1)
self.haar_weights1.requires_grad = False
self.haar_weights2 = encoding2
self.haar_weights2 = nn.Parameter(self.haar_weights2)
self.haar_weights2.requires_grad = False
def forward(self, x, rev=False):
if not rev:
out = F.conv2d(x, self.haar_weights1, bias=None, stride=1, padding=1, groups=1)
out = F.conv2d(out, self.haar_weights2, bias=None, stride=1, padding=1, groups=1)
return out
else:
out = F.conv_transpose2d(x, self.haar_weights2, bias=None, stride=1, padding=1, groups=1)
out = F.conv_transpose2d(out, self.haar_weights1, bias=None, stride=1, padding=1, groups=1)
return out
def jacobian(self, x, rev=False):
return self.last_jac
class Upsampling(nn.Module):
def __init__(self, channel_in):
super(Upsampling, self).__init__()
self.channel_in = channel_in
self.haar_weights1 = encoding1
self.haar_weights1 = nn.Parameter(self.haar_weights1)
self.haar_weights1.requires_grad = False
self.haar_weights2 = encoding2
self.haar_weights2 = nn.Parameter(self.haar_weights2)
self.haar_weights2.requires_grad = False
def forward(self, x, rev=False):
if rev:
out = F.conv2d(x, self.haar_weights1, bias=None, stride=1, padding=1, groups=1)
out = F.conv2d(out, self.haar_weights2, bias=None, stride=1, padding=1, groups=1)
return out
else:
out = F.conv_transpose2d(x, self.haar_weights2, bias=None, stride=1, padding=1, groups=1)
out = F.conv_transpose2d(out, self.haar_weights1, bias=None, stride=1, padding=1, groups=1)
return out
def jacobian(self, x, rev=False):
return self.last_jac
class InvRescaleNet(nn.Module):
def __init__(self, channel_in=3, channel_out=3, subnet_constructor=None, block_num=[], down_num=2):
super(InvRescaleNet, self).__init__()
operations = []
current_channel = channel_in
for i in range(down_num):
b = Downsampling(current_channel)
operations.append(b)
current_channel = 64
for j in range(block_num[i]):
b = InvBlockExp(subnet_constructor, current_channel, channel_out)
operations.append(b)
b = Upsampling(current_channel)
operations.append(b)
self.operations = nn.ModuleList(operations)
def forward(self, x, rev=False, cal_jacobian=False):
out = x
jacobian = 0
if not rev:
for op in self.operations:
out = op.forward(out, rev)
if cal_jacobian:
jacobian += op.jacobian(out, rev)
else:
for op in reversed(self.operations):
out = op.forward(out, rev)
if cal_jacobian:
jacobian += op.jacobian(out, rev)
if cal_jacobian:
return out, jacobian
else:
return out
| 36.211538 | 108 | 0.615861 | 4,512 | 0.798725 | 0 | 0 | 0 | 0 | 0 | 0 | 777 | 0.137546 |
e450e0a78fcbebd70da772f87d262f552594b525 | 56 | py | Python | FrontRunner.py | mmaist/FrontRunner | 05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7 | [
"MIT"
] | 1 | 2021-02-18T10:41:36.000Z | 2021-02-18T10:41:36.000Z | FrontRunner.py | mmaist/FrontRunner | 05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7 | [
"MIT"
] | null | null | null | FrontRunner.py | mmaist/FrontRunner | 05095421b69a0a5ccf4ef53ae3dc35b8e8b926b7 | [
"MIT"
] | null | null | null | import time
import random
import json
import requests
| 8 | 15 | 0.821429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
e4520356b6e60cb7ea00f5353a2466e715bcd995 | 1,642 | py | Python | py_algo/dynamic_programming/introduction/equal_array.py | Sk0uF/Algorithms | 236cc5b056ce2637d5d947c5fc1e3367cde886bf | [
"MIT"
] | 1 | 2021-07-05T15:39:04.000Z | 2021-07-05T15:39:04.000Z | py_algo/dynamic_programming/introduction/equal_array.py | Sk0uF/Algorithms | 236cc5b056ce2637d5d947c5fc1e3367cde886bf | [
"MIT"
] | null | null | null | py_algo/dynamic_programming/introduction/equal_array.py | Sk0uF/Algorithms | 236cc5b056ce2637d5d947c5fc1e3367cde886bf | [
"MIT"
] | 1 | 2021-09-02T21:31:34.000Z | 2021-09-02T21:31:34.000Z | """
Codemonk link: https://www.hackerearth.com/practice/algorithms/dynamic-programming/introduction-to-dynamic-programming-1/practice-problems/algorithm/equal-array-84cf6c5f/
You are given an array A of size N. Find the minimum non negative number X such that there exists an index j that when
you can replace Aj by Aj+X, the sum of elements of the array from index 1 to j and j+1 to N become equal where
1 <= j <= N-1. Assume array to be 1-indexed. If there is no possible X print -1 in a separate line.
Input - Output:
The first line contains the number of test cases.
The first line of each test case contains an integer N,which denotes the size of the array.
The second line contains N space-separated integers where the ith integer denotes Ai.
Sample input:
1
5
1 2 3 2 1
Sample Output:
3
"""
"""
We can simply find the partial sums array, iterate throught the array end at each step check for the minimum X number
that is required.
Final complexity: O(N)
"""
t = int(input())
for _ in range(t):
n = int(input())
array = list(map(int, input().split()))
partial_sums = [array[0]]
for i in range(1, n):
partial_sums.append(array[i]+partial_sums[i-1])
ans = float("inf")
stop = False
for i in range(n):
if partial_sums[i] < partial_sums[-1] - partial_sums[i]:
val = partial_sums[-1] - 2*partial_sums[i]
ans = min(ans, val)
if partial_sums[i] == partial_sums[-1] - partial_sums[i]:
print(0)
stop = True
break
if not stop:
if ans != float("inf"):
print(ans)
else:
print(-1)
| 30.407407 | 170 | 0.658343 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 980 | 0.596833 |
e4526af2d705bb3c47b1ba3a6b79144d1876aeeb | 1,331 | py | Python | model.py | mollikka/Penrose | 6d9870f54e9810f7e2f4ea82bb619424785a65db | [
"MIT"
] | 1 | 2019-07-17T02:46:45.000Z | 2019-07-17T02:46:45.000Z | model.py | mollikka/Penrose | 6d9870f54e9810f7e2f4ea82bb619424785a65db | [
"MIT"
] | null | null | null | model.py | mollikka/Penrose | 6d9870f54e9810f7e2f4ea82bb619424785a65db | [
"MIT"
] | null | null | null | from itertools import chain
phi = 1.61803398875
class PenroseModel:
def __init__(self, start_state):
self.tiles = start_state
self.history = []
def split(self):
self.history.append(list(self.tiles))
self.tiles = list(chain(*[tile.split() for tile in self.tiles]))
def desplit(self):
if self.history:
self.tiles = self.history.pop()
def get_tiles(self):
return self.tiles
class HalfDart:
def __init__(self, A,B,C):
self.a = A
self.b = B
self.c = C
def split(self):
a,b,c = self.a,self.b,self.c
ax, ay = self.a
cx, cy = self.c
fx = cx + (ax-cx)*(1/phi)
fy = cy + (ay-cy)*(1/phi)
f = (fx,fy)
return [HalfKite(f, c, b),HalfDart(b, f, a)]
class HalfKite:
def __init__(self, A,B,C):
self.a = A
self.b = B
self.c = C
def split(self):
a,b,c = self.a,self.b,self.c
ax, ay = self.a
bx, by = self.b
cx, cy = self.c
gx = bx + (cx-bx)*(1/phi)
gy = by + (cy-by)*(1/phi)
g = (gx,gy)
fx = bx + (ax-bx)*(1/(phi**2))
fy = by + (ay-by)*(1/(phi**2))
f = (fx,fy)
return [HalfDart(g,f,b),
HalfKite(c,a,g),
HalfKite(f,a,g),]
| 22.183333 | 72 | 0.480841 | 1,276 | 0.958678 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
e45397111350f9273e2cc86843e6973c134d6e85 | 1,465 | py | Python | src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py | QuTech-Delft/qilib | a87892f8a9977ed338c36e8fb1e262b47449cf44 | [
"MIT"
] | 1 | 2019-02-20T16:56:30.000Z | 2019-02-20T16:56:30.000Z | src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py | QuTech-Delft/qilib | a87892f8a9977ed338c36e8fb1e262b47449cf44 | [
"MIT"
] | 22 | 2019-02-16T06:10:55.000Z | 2022-02-15T18:52:34.000Z | src/tests/unittests/configuration_helper/adapters/test_keysight_e8267d_instrument_adapter.py | QuTech-Delft/qilib | a87892f8a9977ed338c36e8fb1e262b47449cf44 | [
"MIT"
] | 2 | 2020-02-04T08:46:21.000Z | 2020-10-18T16:31:58.000Z | import unittest
from unittest.mock import call, patch, Mock, MagicMock
from qilib.configuration_helper import InstrumentAdapterFactory
class TestKeysightE8267DInstrumentAdapter(unittest.TestCase):
def test_read_filter_out_val_mapping(self):
with patch('qilib.configuration_helper.adapters.keysight_e8267d_instrument_adapter.Keysight_E8267D') \
as mock_instrument:
mock_instrument_instance = MagicMock()
mock_instrument.return_value = mock_instrument_instance
mock_instrument_instance.snapshot.return_value = {
'name': 'some_keysight',
'parameters': {
'good_parameter': {'value': 42},
'filtered_parameter_1': {'val_mapping': {1: True, 0: False}, 'value': False},
'filtered_parameter_2': {'on_off_mapping': {1: 'ON', 0: 'OFF'}, 'value': 'OFF'}
}
}
adapter = InstrumentAdapterFactory.get_instrument_adapter('KeysightE8267DInstrumentAdapter', 'fake')
config = adapter.read()
self.assertNotIn('val_mapping', config['filtered_parameter_1'])
self.assertNotIn('on_off_mapping', config['filtered_parameter_2'])
self.assertEqual(42, config['good_parameter']['value'])
self.assertFalse(config['filtered_parameter_1']['value'])
self.assertEqual('OFF', config['filtered_parameter_2']['value'])
adapter.close_instrument()
| 47.258065 | 112 | 0.661433 | 1,325 | 0.904437 | 0 | 0 | 0 | 0 | 0 | 0 | 443 | 0.302389 |
e455b64eee36fc129ded8331905ce5976719baa2 | 1,364 | py | Python | scripts/mint.py | tomazmm/artsyapes-contract | 95b10e1c73aa4e0712ff8d5162271e84aec91810 | [
"Apache-2.0"
] | null | null | null | scripts/mint.py | tomazmm/artsyapes-contract | 95b10e1c73aa4e0712ff8d5162271e84aec91810 | [
"Apache-2.0"
] | null | null | null | scripts/mint.py | tomazmm/artsyapes-contract | 95b10e1c73aa4e0712ff8d5162271e84aec91810 | [
"Apache-2.0"
] | null | null | null | import json
import pprint
import random
from terra_sdk.core import AccAddress, Coins
from terra_sdk.core.auth import StdFee
from terra_sdk.core.broadcast import BlockTxBroadcastResult
from scripts.deploy import owner, lt
from terra_sdk.core.wasm import MsgExecuteContract
def mint(contract_address: str):
mint_msg = MsgExecuteContract(
owner.key.acc_address,
AccAddress(contract_address),
{
"mint": {
"token_id": str(random.randint(1, 1000000)),
"owner": owner.key.acc_address,
"token_uri": "www.ipfs_link"
}
}
)
mint_tx = owner.create_and_sign_tx(msgs=[mint_msg], fee=StdFee(1000000, Coins(uluna=1000000)))
mint_tx_result = lt.tx.broadcast(mint_tx)
# print_tx_result(mint_tx_result)
def print_tx_result(tx_result: BlockTxBroadcastResult):
print(f"Height: {tx_result.height}")
print(f"TxHash: {tx_result.txhash}")
for event in tx_result.logs[0].events:
print(f"{event['type']} : {pprint.pformat(event['attributes'])}")
def main():
try:
with open("contract.json", "r") as f:
data = json.load(f)
mint(data['contract_address'])
except FileNotFoundError:
print("Contract.json file not found.\nDeploy contract before minting NFTs.")
if __name__ == '__main__':
main()
| 28.416667 | 98 | 0.662023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 313 | 0.229472 |
e45875441dea1d18e8ce1f3858f85bde9799b868 | 281 | py | Python | url_shortener/exceptions.py | alena-kono/simple-shortener | d1549b342e190ff70509ce5b442cb31376f2a07a | [
"MIT"
] | null | null | null | url_shortener/exceptions.py | alena-kono/simple-shortener | d1549b342e190ff70509ce5b442cb31376f2a07a | [
"MIT"
] | null | null | null | url_shortener/exceptions.py | alena-kono/simple-shortener | d1549b342e190ff70509ce5b442cb31376f2a07a | [
"MIT"
] | null | null | null | from core.exceptions import BaseProjectException
class InvalidUrlError(BaseProjectException):
"""Exception raised when invalid url is passed."""
pass
class MissingPrimaryKeyError(BaseProjectException):
"""Exception raised when primary key is missing."""
pass
| 20.071429 | 55 | 0.761566 | 226 | 0.80427 | 0 | 0 | 0 | 0 | 0 | 0 | 101 | 0.359431 |
e4589a7ec39dfb446ef1fe4c8fd01bbb42b8704d | 1,507 | py | Python | enbios/processing/indicators/__init__.py | ENVIRO-Module/enbios | 10e93df9a168627833eca6d04e4e2b864de8e8d9 | [
"BSD-3-Clause"
] | 2 | 2022-01-28T09:38:28.000Z | 2022-01-28T09:38:32.000Z | enbios/processing/indicators/__init__.py | ENVIRO-Module/enbios | 10e93df9a168627833eca6d04e4e2b864de8e8d9 | [
"BSD-3-Clause"
] | 1 | 2022-01-27T21:42:42.000Z | 2022-01-27T21:42:42.000Z | enbios/processing/indicators/__init__.py | ENVIRO-Module/enbios | 10e93df9a168627833eca6d04e4e2b864de8e8d9 | [
"BSD-3-Clause"
] | null | null | null | import math
from nexinfosys.model_services import State
materials = {
"Aluminium",
"Antimony",
"Arsenic",
"Baryte",
"Beryllium",
"Borates",
"Cadmium",
"Cerium",
"Chromium",
"Cobalt",
"Copper",
"Diatomite",
"Dysprosium",
"Europium",
"Fluorspar",
"Gadolinium",
"Gallium",
"Gold",
"Gypsum",
"IronOre",
"KaolinClay",
"Lanthanum",
"Lead",
"Lithium",
"Magnesite",
"Magnesium",
"Manganese",
"Molybdenum",
"NaturalGraphite",
"Neodymium",
"Nickel",
"Palladium",
"Perlite",
"Phosphorus",
"Platinum",
"Praseodymium",
"Rhenium",
"Rhodium",
"Samarium",
"Selenium",
"SiliconMetal",
"Silver",
"Strontium",
"Sulphur",
"Talc",
"Tantalum",
"Tellurium",
"Terbium",
"Tin",
"Titanium",
"Tungsten",
"Vanadium",
"Yttrium",
"Zinc",
"Zirconium"
}
def supply_risk(state: State):
sr = 0
for i in materials:
ri = state.get(i)
if ri is not None:
SRi = state.get(f"sr{i}")
ci = state.get(f"c{i}")
sr += ri*SRi/ci
return sr
def recycling_rate(state: State):
rr_num = 0
rr_denom = 0
for i in materials:
ri = state.get(i)
if ri is not None:
RRi = state.get(f"rr{i}")
rr_num += ri*RRi
rr_denom += ri
if rr_denom != 0.0:
return rr_num / rr_denom
else:
return -1.0
| 17.125 | 43 | 0.50564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 564 | 0.374253 |
e45a47a7a23107da9b1e4e894dbe004e6d56eaf1 | 2,933 | py | Python | Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py | mrankitgupta/PythonLessons | 119efc58518c5b35c6647009c74ff96728f851fa | [
"MIT"
] | null | null | null | Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py | mrankitgupta/PythonLessons | 119efc58518c5b35c6647009c74ff96728f851fa | [
"MIT"
] | null | null | null | Python Exercises/Exercise 8 - Functions/Functions- Decorators & Generators.py | mrankitgupta/PythonLessons | 119efc58518c5b35c6647009c74ff96728f851fa | [
"MIT"
] | null | null | null | # defining a decorator
def hello_decorator(func):
# inner1 is a Wrapper function in which the argument is called inner function can access the outer local functions like in this case "func"
def inner1():
print("Hello, this is before function execution")
# calling the actual function now inside the wrapper function.
func()
print("This is after function execution")
return inner1
# defining a function, to be called inside wrapper
def function_to_be_used():
print("This is inside the function !!")
# passing 'function_to_be_used' inside the decorator to control its behaviour
function_to_be_used = hello_decorator(function_to_be_used)
# calling the function
function_to_be_used()
# find out the execution time of a function using a decorator
# importing libraries
import time
import math
# decorator to calculate duration # taken by any function.
def calculate_time(func):
# added arguments inside the inner1, if function takes any arguments, can be added like this.
def inner1(*args, **kwargs):
# storing time before function execution
begin = time.time()
func(*args, **kwargs)
# storing time after function execution
end = time.time()
print("Total time taken in : ", func.__name__, end - begin)
return inner1
# this can be added to any function present, in this case to calculate a factorial
@calculate_time
def factorial(num):
# sleep 2 seconds because it takes very less time so that you can see the actual difference
time.sleep(2)
print(math.factorial(num))
# calling the function.
factorial(10)
# Chaining Decorators
# code for testing decorator chaining
def decor1(func):
def inner():
x = func()
return x * x
return inner
def decor(func):
def inner():
x = func()
return 2 * x
return inner
@decor1
@decor
def num():
return 10
print(num())
# Decorators with parameters in Python
def decorator(*args, **kwargs):
print("Inside decorator")
def inner(func):
# code functionality here
print("Inside inner function")
print("I am", kwargs['name'])
func()
# returning inner function
return inner
@decorator(name="Ankit Gupta")
def my_func():
print("Inside actual function")
# Generator Function
# A generator function that yields 1 for first time, 2 second time and 3 third time
def simpleGeneratorFun():
yield 1
yield 2
yield 3
# Driver code to check above generator function
for value in simpleGeneratorFun():
print(value)
# A Python program to demonstrate use of generator object with next()
# A generator function
def simpleGeneratorFun():
yield 1
yield 2
yield 3
# x is a generator object
x = simpleGeneratorFun()
# Iterating over the generator object using next
print(x.next()) # In Python 3, __next__()
print(x.next())
print(x.next())
| 30.237113 | 144 | 0.691101 | 0 | 0 | 110 | 0.037504 | 306 | 0.10433 | 0 | 0 | 1,616 | 0.550972 |
e45a7bbe70e7b8614eb0c9109018644cf05fb490 | 24,654 | py | Python | src/1-topicmodeling.py | sofieditmer/topic_modeling | edfff3c4d45c932562f796cc81e9ce9fe35f8e4b | [
"MIT"
] | null | null | null | src/1-topicmodeling.py | sofieditmer/topic_modeling | edfff3c4d45c932562f796cc81e9ce9fe35f8e4b | [
"MIT"
] | null | null | null | src/1-topicmodeling.py | sofieditmer/topic_modeling | edfff3c4d45c932562f796cc81e9ce9fe35f8e4b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Info: This script performs topic modeling on the clean tweets by Donald Trump. The number of topics is estimated by computing coherence values for different number of topics, and an LDA model is constructed with the number of topics with the highest coherence value. Visualizations of the topics are created relying on pyLDAvis and wordcloud and these visualizations are saved in the output directory.
Parameters:
(optional) input_file: str <name-of-input-file>, default = clean_trump_tweets.csv
(optional) chunk_size: int <size-of-chunks>, default = 10
(optional) passes: int <number-of-passes>, default = 10
(optional) min_count: int <minimum-count-bigrams>, default = 2
(optional) threshold: int <threshold-for-keeping-phrases>, default = 100
(optional) iterations: int <number-of-iterations>, default = 100
(optional) rolling_mean: int <rolling-mean>, default = 50
(optional) step_size: int <size-of-steps>, default = 5
Usage:
$ python 1-topicmodeling.py
Output:
- topics.txt: overview of topics generated by the LDA model
- dominant_topic.csv: table showing the most dominant topics and their associated keywords as well as how much each topic contributes.
- topic_contributions.csv: a dataframe showing the most contributing keywords for each topic.
- topics_over_time.jpg: visualization of the topic contributions over time.
- topic_wordclouds.png: the topics visualized as word clouds.
"""
### DEPENDENCIES ###
# core libraries
import sys
import os
sys.path.append(os.path.join(".."))
# numpy, pandas, pyplot
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
# spaCy
import spacy
nlp = spacy.load("en_core_web_sm", disable=["ner"])
nlp.max_length = 68000000 # increasing maximum length
# pyLDAvis and seaborn for vizualisations
import pyLDAvis.gensim
import seaborn as sns
# matplotlib colors
import matplotlib.colors as mcolors
# wordcloud tools
from wordcloud import WordCloud
# LDA tools
import gensim
import gensim.corpora as corpora
from gensim.models import CoherenceModel
from utils import lda_utils
# Ignore warnings
import logging, warnings
warnings.filterwarnings('ignore')
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.ERROR)
# argparse
import argparse
### MAIN FUNCTION ###
def main():
### ARGPARSE ###
# Initialize ArgumentParser class
ap = argparse.ArgumentParser()
# Argument 1: Input file
ap.add_argument("-i", "--input_filename",
type = str,
required = False, # not required argument
help = "Name of input file",
default = "clean_trump_tweets.csv") # default
# Argument 2: Number of passes
ap.add_argument("-p", "--n_passes",
type = int,
required = False, # not required argument
help = "Define the number of passes which is the number of times you want the model to go through the entire corpus.",
default = 10) # default number of passes
# Argument 3: Minimum count for bigrams
ap.add_argument("-m", "--min_count",
type = int,
required = False, # not required argument
help = "Define the minimum count for bigrams to occur to be included",
default = 2) # default minimum count
# Argument 4: Threshold
ap.add_argument("-th", "--threshold",
type = int,
required = False, # not required argument
help = "Define the threshold which determines which phrases to include and which to exlude. The higher the threshold, the fewer the number of phrases are included.",
default = 100) # default threshold
# Argument 5: Iterations
ap.add_argument("-it", "--n_iterations",
type = int,
required = False, # not required argument
help = "Define the number of iterations through each document in the corpus",
default = 100) # default number of iterations
# Argument 6: Rolling mean size
ap.add_argument("-r", "--rolling_mean",
type = int,
required = False, # not required argument
help = "Define the rolling mean which is the number of chunks of tweets to calculate contribution of at a time",
default = 50) # default
# Argument 7: Step size
ap.add_argument("-s", "--step_size",
type = int,
required = False, # not required argument
help = "Define the step size",
default = 5) # default step size
# Argument 8: Chunk size
ap.add_argument("-c", "--chunk_size",
type = int,
required = False, # not required argument
help = "Define the size of the chunks, i.e. how many tweets one chunk should consist of.",
default = 10) # default chunk size
# Parse arguments
args = vars(ap.parse_args())
# Save input parameters
input_file = os.path.join("..", "data", args["input_filename"])
n_passes = args["n_passes"]
min_count = args["min_count"]
threshold = args["threshold"]
n_iterations = args["n_iterations"]
rolling_mean = args["rolling_mean"]
step_size = args["step_size"]
chunk_size = args["chunk_size"]
# Create output directory if it does not already exist
if not os.path.exists(os.path.join("..", "output")):
os.mkdir(os.path.join("..", "output"))
# Start message to user
print("\n[INFO] Initializing topic modeling on all Donald Trump tweets from May 2009 to June 2020...")
# Instantiate the topic modeling class
topic_modeling = Topic_modeling(input_file)
# Load and prepare data
print(f"\n[INFO] Loading '{input_file}'...")
clean_tweets_df = topic_modeling.load_data()
# Chunk data
print(f"\n[INFO] Chunking the data into chunks of {chunk_size}...")
chunks = topic_modeling.chunk_tweets(clean_tweets_df, chunk_size)
# Process data
print("\n[INFO] Creating bigram and trigram models and performing lemmatization and part-of-speech-tagging...")
processed_data = topic_modeling.process_data(chunks, min_count, threshold)
# Create bag of words
print("\n[INFO] Creating dictionary and word corpus...")
id2word, corpus = topic_modeling.create_dict_corpus(processed_data)
# Estimate the optimal number of topics
print("\n[INFO] Finding the optimal number of topics...")
optimal_n_topics = topic_modeling.find_optimal_n_topics(processed_data, corpus, id2word, step_size)
# Print the optimal number of topics to the screen
print(f"\nThe optimal number of topics is {optimal_n_topics}")
# Create LDA model and compute perplexity and coherence scores
print("\n[INFO] Creating LDA model...")
lda_model, perplexity_score, coherence_score = topic_modeling.create_lda(processed_data, id2word, corpus, optimal_n_topics, chunk_size, n_passes, n_iterations)
# Create outputs
print("\n[INFO] Producing outputs and saving to 'output' directory...")
# Output 1
topic_modeling.create_output_1(lda_model, perplexity_score, coherence_score, optimal_n_topics)
print("\n[INFO] A txt-file containing the topics has been saved to output directory...")
# Output 2
df_dominant_topic, df_topic_keywords = topic_modeling.create_output_2(lda_model, corpus, processed_data, optimal_n_topics, )
print("\n[INFO] A dataframe showing the most dominant topic for each chunk has been saved to output directory...")
# Output 3
topic_modeling.create_output_3(df_dominant_topic, lda_model, corpus, processed_data, optimal_n_topics, df_topic_keywords)
print("\n[INFO] A dataframe showing the most contributing keywords for each topic has been saved to output directory...")
# Create visualization: topics over time with rolling mean
print("\n[INFO] Creating visualization of topic contributions over time...")
topic_modeling.visualize_topics(processed_data, rolling_mean, lda_model, corpus)
# Create word clouds of topics
print("\n[INFO] Creating word clouds of topics...")
topic_modeling.create_wordcloud(lda_model, optimal_n_topics)
# User message
print("\n[INFO] Done! You have now performed topic modeling on all of Donald Trump tweets from May 2009 to June 2020. The results have been saved in the 'output' folder.\n")
### TOPIC MODELING ###
# Creating Topic modeling class
class Topic_modeling:
# Intialize Preprocessing class
def __init__(self, input_file):
# Receive input
self.input_file = input_file
def load_data(self):
"""
This method loads the preprocessed data from the data folder.
"""
# Load data into dataframe with pandas
clean_tweets_df = pd.read_csv(self.input_file, lineterminator = "\n")
# Take only relevant columns
clean_tweets_df = clean_tweets_df.loc[:, ("id", "date", "clean_tweets")]
# Drop rows with missing values
clean_tweets_df = clean_tweets_df.dropna(subset=['clean_tweets'])
return clean_tweets_df
def chunk_tweets(self, clean_tweets_df, chunk_size):
"""
This method creates chunks of tweets and saves the chunks in a new column in the dataframe. Chuking the tweets
as opposed to having individual tweets is performed to ensure that clear topics are found. By chunking the tweets,
the topics become more interpretable.
"""
# Create empty list for chunks of tweets
chunks = []
# Create chunks of tweets
for i in range(0, len(clean_tweets_df["clean_tweets"]), chunk_size):
chunks.append(' '.join(clean_tweets_df["clean_tweets"][i:i+chunk_size]))
return chunks
def process_data(self, chunks, min_count, threshold):
"""
This method creates bigram and trigram models, and performs lemmatization and part-of-speech-tagging.
The threshold value determines which phrases to include. The higher the threshold, the fewer phrases are included,
because the most frequent bigrams are excluded. Removing the most frequent phrases ensures that only the most
semantically meaningful phrases are kept, and potential noise is filtered out.
The bigrams are created based on the words that appear one after another most frequently, and the bigrams are then
fed into a trigram generator which creates the trigrams based on the bigrams.
The output of this method is a list of the nouns, verbs, and adjectives within the data.
"""
# Create model of bigrams and trigrams
bigram = gensim.models.Phrases(chunks, min_count = min_count, threshold = threshold) # higher threshold fewer phrases. The min_count is the minimum number of times the bigram should occur to be included
trigram = gensim.models.Phrases(bigram[chunks], threshold = threshold) # the trigram model is based on the bigram model
# Fit the models to the data
bigram_mod = gensim.models.phrases.Phraser(bigram)
trigram_mod = gensim.models.phrases.Phraser(trigram)
# Lemmatize and part-of-speech tag
processed_data = lda_utils.process_words(chunks,
nlp,
bigram_mod,
trigram_mod,
allowed_postags=["NOUN", "VERB", "ADJ"]) # nouns, verbs, and adjectives
return processed_data
def create_dict_corpus(self, processed_data):
"""
This method creates the dictionary and corpus. In other words, it creates a representation of words within a document
in terms of how often an indivudal word occurs in each document. Hence, the documents are conceptualized as a
bag-of-words model. This means that we are no longer dealing with words, but rather we with distributions of word
frequencies (i.e., a numerical representation).
The dictionary is created by converting each word into an integer value. The corpus is created by converting the
documents to a "bag of words" model.
"""
# Create dictionary
id2word = corpora.Dictionary(processed_data)
# Create Ccrpus: Term Document Frequency
corpus = [id2word.doc2bow(text) for text in processed_data]
return id2word, corpus
def find_optimal_n_topics(self, processed_data, corpus, id2word, step_size):
"""
This method runs the model multiple times with different numbers of topics and find the optimal number based
on the maximum coherence value. Hence, the number of topics with the highest coherence value is chosen as the
most optimal number of topics. A high coherence value ensures that the topics are "coherent", i.e., meaningful.
"""
# Run model multiple times
model_list, coherence_values = lda_utils.compute_coherence_values(texts = processed_data,
corpus = corpus,
dictionary = id2word,
start = 5,
limit = 15,
step = step_size)
# Find the maximum coherence value
max_coherence = np.argmax(coherence_values)
# Find the number of topics corresponding to the maximum coherence value
optimal_n_topics = model_list[max_coherence].num_topics
return optimal_n_topics
def create_lda(self, processed_data, id2word, corpus, optimal_n_topics, chunk_size, n_passes, n_iterations):
"""
This method builds the LDA model using gensim's multicore function, and computes perplexity and coherence scores.
When we are calculating perplexity we measure how well the model is performing, i.e. the amount of error.
Ideally, error ("surprise") should be low, because this implies that when the model encounters new data,
it is less "surprised". Hence, the perplexity score should be minimized. Ideally, the coherence value should be high.
A high coherence value means that the topics are very coherent, which means that the topics actually correspond/relate
to something in the data.
"""
# Define and run LDA model
lda_model = gensim.models.LdaMulticore(corpus=corpus, # vectorized corpus (list of lists of tuples)
id2word=id2word, # gensim dictionary (mapping words to IDs)
num_topics=optimal_n_topics, # number of topics
random_state=100, # random state for reproducibility
chunksize=chunk_size, # the number of chunks to process at a time. Rather than processing one chunk at a time, we process batches of 10 chunkes which is more efficient. Increasing the chunk/batch size means that the model will train quicker.
passes=n_passes, # passes/epochs is the number of times the model should go through the entire corpus.
iterations=n_iterations, # the number of iterations is how often the model go through the single document in the corpus
per_word_topics=True, # defining word distributions for greater interpretatbility
minimum_probability=0.0) # in some cases a topic does not appear at all in any document, and I do not want to exclude these topics but rather keep them, which is why I set it to return 0 instead of nothing which is the default
# Calculate perplexity score
perplexity_score = lda_model.log_perplexity(corpus)
# Calculate coherence score
coherence_model_lda = CoherenceModel(model=lda_model,
texts=processed_data,
dictionary=id2word,
coherence='c_v')
coherence_score = coherence_model_lda.get_coherence()
return lda_model, perplexity_score, coherence_score
def create_output_1(self, lda_model, perplexity_score, coherence_score, optimal_n_topics):
"""
This method creates a txt-file containing the topics, perplexity, and coherence scors and saves it to
the 'output' directory.
"""
# Extract the topics
topics = lda_model.print_topics()
# Define path
out_path = os.path.join("..", "output", "topics.txt")
# Write txt-file containing the topics, perplexity, and coherence scores
with open(out_path, "w+") as f:
# Print how many topics the model has
f.writelines(f"The model has {optimal_n_topics} topics.\n")
# Print perplexity and coherence scores
f.writelines(f"Perplexity score: {perplexity_score}, Coherence score: {coherence_score} \n")
# Print topics
f.writelines(f"\nOverview of topics: \n {topics}")
def create_output_2(self, lda_model, corpus, processed_data, optimal_n_topics):
"""
This method creates a dataframe showing the most dominant topic for each chunk and saves it to the 'output' directory.
"""
# Find keywords for each topic
df_topic_keywords = lda_utils.format_topics_sentences(ldamodel=lda_model,
corpus=corpus,
texts=processed_data)
# Find the most dominant topic per chunk
df_dominant_topic = df_topic_keywords.reset_index()
df_dominant_topic.columns = ['Chunk_No', 'Dominant_Topic', 'Topic_Perc_Contrib', 'Keywords', 'Text']
# Save dataframe to output folder
output_path = os.path.join("..", "output", "dominant_topic.csv")
df_dominant_topic.to_csv(output_path, index = False)
return df_dominant_topic, df_topic_keywords
def create_output_3(self, df_dominant_topic, lda_model, corpus, processed_data, optimal_n_topics, df_topic_keywords):
"""
This method creates a dataframe showing the most contributing keywords for each topic and saves it to
the 'output' directory.
"""
# Display setting to show more characters in column
pd.options.display.max_colwidth = 100
# Create dataframe
sentence_topics_sorted_df = pd.DataFrame()
# Group keywords by the most dominant topic
sentence_topics_grouped = df_topic_keywords.groupby('Dominant_Topic')
# Compute how much each topic contribtues in percentage
for i, grp in sentence_topics_grouped:
sentence_topics_sorted_df = pd.concat([sentence_topics_sorted_df, grp.sort_values(['Perc_Contribution'], ascending=False).head(1)], axis=0)
# Reset index
sentence_topics_sorted_df.reset_index(drop=True, inplace=True)
# Define columns in dataframe
sentence_topics_sorted_df.columns = ['Topic_Num', "Topic_Perc_Contrib", "Keywords", "Representative Text"]
# Save dataframe to output-folder
output_path = os.path.join("..", "output", "topic_contributions.csv")
sentence_topics_sorted_df.to_csv(output_path, index = False)
def visualize_topics(self, processed_data, rolling_mean, lda_model, corpus):
"""
This method creates visualizations using pyLDAvis and seaborn and saves these in the 'output' folder.
"""
# Create viz object
viz = pyLDAvis.gensim.prepare(lda_model,
corpus,
dictionary = lda_model.id2word)
# Save visualization as html-file
out_path = os.path.join("..", "output", "lda_topics.html")
pyLDAvis.save_html(viz, out_path)
# Create list of values. The first entry is the topic, and the second entry is how much it contributes (percentage)
values = list(lda_model.get_document_topics(corpus))
# Split the values and keep only the values per topic
split = []
for entry in values:
topic_prevelance = []
for topic in entry:
topic_prevelance.append(topic[1])
split.append(topic_prevelance)
# Create document-topic matrix
matrix = pd.DataFrame(map(list,zip(*split)))
# Create plot with rolling mean
lineplot = sns.lineplot(data=matrix.T.rolling(rolling_mean).mean())
# Set axes labels
lineplot.set(xlabel="Tweet Chunks/batches", ylabel = "Topic Percentage Contribution")
# Set title of plot
lineplot.set_title('Topic Contribution Over Time', size = 20)
# Set title of legend
lineplot.legend(title='Topic', loc='upper right')
# Get figure to be able to save
fig = lineplot.get_figure()
# Specifiy outputpath
out_path = os.path.join("..", "output", "topics_over_time.jpg")
# Save lineplot to output directory
fig.savefig(out_path)
def create_wordcloud(self, lda_model, optimal_n_topics):
"""
This method takes the topics and creates word clouds to make the overview of the topics easier.
This method was inspired by the following article, but modified to fit to the this particular project:
https://www.machinelearningplus.com/nlp/topic-modeling-visualization-how-to-present-results-lda-models/
"""
# Create list of colors from the matplotlib.colors
cols = [color for name, color in mcolors.TABLEAU_COLORS.items()]
# Define word cloud
cloud = WordCloud(background_color='white',
width=2500,
height=2500,
max_words=10,
colormap='tab10',
color_func=lambda *args, **kwargs: cols[i],
prefer_horizontal=1.0)
# LDA topics
topics = lda_model.show_topics(num_topics = optimal_n_topics, formatted=False)
# Define subplots. Since the number of subplots depends on the number of topics found I use an if-statement
if (optimal_n_topics == 5):
fig, axes = plt.subplots(1, 5, figsize=(20,20), sharex=True, sharey=True)
if (optimal_n_topics == 10):
fig, axes = plt.subplots(2, 5, figsize=(20,20), sharex=True, sharey=True)
if (optimal_n_topics == 15):
fig, axes = plt.subplots(3, 5, figsize=(20,20), sharex=True, sharey=True)
# Generate a word cloud for each topic
for i, ax in enumerate(axes.flatten()):
fig.add_subplot(ax)
topic_words = dict(topics[i][1])
cloud.generate_from_frequencies(topic_words, max_font_size=300)
plt.gca().imshow(cloud)
plt.gca().set_title('Topic ' + str(i))
plt.gca().axis('off')
# Additional adjusting
plt.subplots_adjust(wspace=0, hspace=0)
plt.axis('off')
plt.margins(x=0, y=0)
plt.tight_layout()
# Save word clouds to visualization folder
output_path = os.path.join("..", "output", "topic_wordclouds.png")
plt.savefig(output_path)
# Define behaviour when called from command line
if __name__=="__main__":
main() | 46.693182 | 402 | 0.626511 | 15,751 | 0.638882 | 0 | 0 | 0 | 0 | 0 | 0 | 12,377 | 0.502028 |
e45a8dc57b1450e18797d47ff570959f3d7e2d31 | 15,086 | py | Python | EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | 23 | 2021-10-14T02:31:06.000Z | 2022-01-25T16:26:44.000Z | EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | null | null | null | EEG_Lightning/dassl/data/datasets/ProcessDataBase_v1.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | 1 | 2022-03-05T06:54:11.000Z | 2022-03-05T06:54:11.000Z | """
William DUong
"""
import os.path as osp
import os
import errno
from .build import DATASET_REGISTRY
from .base_dataset import Datum, DatasetBase,EEGDatum
from scipy.io import loadmat
import numpy as np
from collections import defaultdict
class ProcessDataBase(DatasetBase):
dataset_dir = None
file_name = None
def __init__(self, cfg):
# self.check_dataInfo()
self._n_domain = 0
self.domain_class_weight = None
self.whole_class_weight = None
self.root = osp.abspath(osp.expanduser(cfg.DATASET.ROOT))
self.dataset_dir = self.dataset_dir if not cfg.DATASET.DIR else cfg.DATASET.DIR
self.file_name = self.file_name if not cfg.DATASET.FILENAME else cfg.DATASET.FILENAME
self.cfg = cfg
# self.dataset_dir = osp.join(root, self.dataset_dir)
data_path = osp.join(self.root,self.dataset_dir, self.file_name)
if not osp.isfile(data_path):
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), data_path)
self.check_dataInfo()
total_data,total_label,test_data,test_lbl = self._read_data(data_path)
train, train_target, val, test = self.process_data_format((total_data,total_label),(test_data,test_lbl),cfg)
print("target domain : ", cfg.DATASET.TARGET_DOMAINS)
super().__init__(train_x=train, val=val, test=test, train_u=train_target)
@property
def data_domains(self):
return self._n_domain
def _read_data(self,data_path):
raise NotImplementedError
def check_dataInfo(self):
return
# def _read_data(self,data_path):
# """
# Process data from .mat file
# Re-implement this function to process new dataset
# Generate train data and test data with shape (subjects,trials,channels,frequency)
# """
# temp = loadmat(data_path)
# total_data = []
# total_label = []
# print(temp.keys())
# for idx in range(len(temp['data'][0])):
# total_data.append(temp['data'][0][idx])
# total_label.append(temp['labels'][0][idx])
# total_data = np.array(total_data) # (subjects,trials,channels,frequency)
# total_label = np.array(total_label)
# total_label = np.squeeze(total_label)
# total_label = total_label.astype(int)
#
# total_test_data = []
# total_test_label = []
# for idx in range(len(temp['testdata'][0])):
# total_test_data.append(temp['testdata'][0][idx])
# total_test_label.append(temp['testlabels'][0][idx])
#
# # test_data = np.array(temp['testdata']) # shape (trials,channels,frequency
# # test_lbl = np.array(temp['testlabels']) # trial,)
# # test_lbl = np.squeeze(test_lbl)
#
# test_data = np.array(total_test_data) # (subjects,trials,channels,frequency)
# test_lbl = np.array(total_test_label)
# test_lbl = np.squeeze(test_lbl)
# test_lbl = test_lbl.astype(int)
#
# print("BCI_IV data shape : ", total_data.shape)
# print("BCI_IV test shape : ", test_data.shape)
#
# return [total_data,total_label,test_data,test_lbl]
# def setup_within_subject_experiment(self,total_data,total_label,test_data,test_lbl,cfg):
# folds = cfg.DATASET.K_FOLD
# valid_fold = cfg.DATASET.VALID_FOLD
# train_data, train_label, valid_data, valid_label = self._pick_train_valid_same_set(total_data, total_label,
# folds=folds,
# valid_fold=valid_fold)
# return train_data, train_label, valid_data, valid_label,test_data,test_lbl
# def setup_cross_subject_experiment(self,total_data,total_label,test_data,test_lbl,cfg):
# cross_subject_seed = cfg.DATASET.RANDOM_SEED
#
# pick_data_subject_ids, pick_test_subject_ids = self._pick_leave_N_out_ids(total_subject=total_data.shape[0],
# seed=cross_subject_seed,
# given_subject_idx=None, num_subjects=3)
# # use the provided train subjects and target subjects id
# if len(cfg.DATASET.SOURCE_DOMAINS) > 0 and len(cfg.DATASET.TARGET_DOMAINS) > 0:
# pick_data_subject_ids = cfg.DATASET.SOURCE_DOMAINS
# pick_test_subject_ids = cfg.DATASET.TARGET_DOMAINS
#
# train_data = total_data[pick_data_subject_ids,]
# train_label = total_label[pick_data_subject_ids,]
# valid_data = test_data[pick_data_subject_ids,]
# valid_label = test_lbl[pick_data_subject_ids,]
# test_data = np.concatenate((total_data[pick_test_subject_ids,], test_data[pick_test_subject_ids,]), axis=1)
# test_lbl = np.concatenate((total_label[pick_test_subject_ids,], test_lbl[pick_test_subject_ids,]), axis=1)
# print("Pick subject to trian/valid : ", pick_data_subject_ids)
# print("Pick subject to test : ", pick_test_subject_ids)
# print("Train data, valid data, test data shape : ", (train_data.shape, valid_data.shape, test_data.shape))
# print("Train label, valid label, test label shape : ", (train_label.shape, valid_label.shape, test_lbl.shape))
# return train_data, train_label, valid_data, valid_label,test_data,test_lbl
def setup_within_subject_experiment(self,total_data,total_label,test_data,test_lbl,cfg):
"""
Split the total data set into k_folds. Each fold contains data from every subjects
pick 1 fold to be valid data
"""
folds = cfg.DATASET.K_FOLD
valid_fold = cfg.DATASET.VALID_FOLD
train_data, train_label, valid_data, valid_label = self._pick_train_valid_same_set(total_data, total_label,
folds=folds,
valid_fold=valid_fold)
print("train data within subjects shape : {} from k={} split".format(train_data.shape, folds))
print("valid data within subjects shape : {} from k={} split".format(valid_data.shape, folds))
return train_data, train_label, valid_data, valid_label,test_data,test_lbl
def setup_cross_subject_experiment(self,total_data,total_label,test_data,test_lbl,cfg):
"""
Split the total dataset into k folds. Each fold contains some subjects
Pick 1 folds to be valid data
"""
folds = cfg.DATASET.K_FOLD
valid_fold = cfg.DATASET.VALID_FOLD
train_data, train_label, valid_data, valid_label = self._pick_train_valid_cross_set(total_data,total_label,folds=folds,valid_fold=valid_fold)
return train_data, train_label, valid_data, valid_label, test_data, test_lbl
def _pick_train_valid_cross_set(self, total_data, total_label, folds, valid_fold):
if valid_fold > folds:
raise ValueError("can not assign fold identity outside of total cv folds")
total_subjects = np.arange(total_data.shape[0])
# total_subjects = [i for i in range(total_data.shape[0])]
split_folds = [list(x) for x in np.array_split(total_subjects, folds)]
pick_valid_subjects = split_folds[valid_fold - 1]
pick_train_subjects = []
for i in range(folds):
if i != valid_fold - 1:
for subject in split_folds[i]:
pick_train_subjects.append(subject)
# subject_train_folds = for subject in [ ]
# print("train subjects : {} from k={} split".format(pick_valid_subjects, folds))
# print("valid subjects : {} from k={} split".format(pick_valid_subjects, folds))
valid_data = total_data[pick_valid_subjects,]
valid_label = total_label[pick_valid_subjects,]
train_data = total_data[pick_train_subjects,]
train_label = total_label[pick_train_subjects,]
return train_data, train_label,pick_train_subjects, valid_data, valid_label,pick_valid_subjects
# valid_mark_start = (valid_fold - 1) * fold_trial
# valid_mark_end = valid_fold * fold_trial
#
# train_data = np.concatenate((data[:, :valid_mark_start, :, :], data[:, valid_mark_end:, :, :]), axis=1)
# train_label = np.concatenate((label[:, :valid_mark_start], label[:, valid_mark_end:]), axis=1)
#
# valid_data = data[:, valid_mark_start:valid_mark_end, :, :]
# valid_label = label[:, valid_mark_start:valid_mark_end]
#
#
# # if len(total_subjects)%folds == 0:
# train_folds = [i for i in range(1,folds+1) if i !=valid_fold]
# subject_split_folds = np.split(total_subjects,folds)
# print("subject splits : ",subject_split_folds)
#
# validation_subject_fold = subject_split_folds[valid_fold-1]
# train_subject_fold = np.concatenate(subject_split_folds[train_folds])
#
#
#
# "still need to implement"
# return None,None,None,None
def generate_class_weight(self,label):
if len(label.shape) == 2:
#label shall have shape (subjects,trials)
label = label.reshape(label.shape[0] * label.shape[1])
#data need to be shape (trials)
total = label.shape[0]
labels = np.unique(label)
list_ratio = []
for current_label in labels:
current_ratio = total / len(np.where(label == current_label)[0])
list_ratio.append(current_ratio)
return list_ratio
def generate_domain_class_weight(self,label):
"""
assume the label has shape (subjects,trials)
"""
if len(label.shape) != 2:
raise ValueError("domain labels does not have correct data format")
domain_class_weight = defaultdict()
for domain in range(label.shape[0]):
current_domain_class_weight = self.generate_class_weight(label[domain])
domain_class_weight[domain] = current_domain_class_weight
return domain_class_weight
# def _expand_data_dim(self,data):
# i
def process_data_format(self, data,test,cfg):
CROSS_SUBJECTS = cfg.DATASET.CROSS_SUBJECTS
WITHIN_SUBJECTS = cfg.DATASET.WITHIN_SUBJECTS
total_data,total_label = data
test_data,test_lbl = test
if WITHIN_SUBJECTS:
train_data, train_label, valid_data, valid_label,test_data,test_lbl = self.setup_within_subject_experiment(total_data,total_label,test_data,test_lbl,cfg)
elif CROSS_SUBJECTS:
train_data, train_label, valid_data, valid_label, test_data, test_lbl = self.setup_cross_subject_experiment(total_data,total_label,test_data,test_lbl,cfg)
else:
raise ValueError("need to specify to create train/valid for cross subjects or within subject experiments")
"""Create class weight for dataset"""
if cfg.DATASET.DOMAIN_CLASS_WEIGHT:
self.domain_class_weight =self.generate_domain_class_weight(train_label)
if cfg.DATASET.TOTAL_CLASS_WEIGHT:
self.whole_class_weight = self.generate_class_weight(train_label)
#assume the number of subjects represent number of domains
self._n_domain = train_data.shape[0]
train_data = np.expand_dims(train_data,axis=2)
valid_data = np.expand_dims(valid_data,axis=2)
test_data = np.expand_dims(test_data, axis=2)
train_items = self._generate_datasource(train_data,train_label)
valid_items = self._generate_datasource(valid_data,valid_label)
test_items = self._generate_datasource(test_data,test_lbl)
train_target_items = test_items.copy()
return train_items,train_target_items,valid_items,test_items
@classmethod
def _pick_train_valid_same_set(self,data, label, folds=4, valid_fold=1):
if valid_fold > folds:
print("can not assign fold identity outside of total cv folds")
return
total_trials = data.shape[1]
fold_trial = int(total_trials / folds)
valid_mark_start = (valid_fold - 1) * fold_trial
valid_mark_end = valid_fold * fold_trial
# print("valid mark start : ", valid_mark_start)
# print("valid_mark_end : ", valid_mark_end)
train_data = np.concatenate((data[:, :valid_mark_start, :, :], data[:, valid_mark_end:, :, :]), axis=1)
train_label = np.concatenate((label[:, :valid_mark_start], label[:, valid_mark_end:]), axis=1)
valid_data = data[:, valid_mark_start:valid_mark_end, :, :]
valid_label = label[:, valid_mark_start:valid_mark_end]
# print("train data shape : ", train_data.shape)
# print("valid data shape : ", valid_data.shape)
return train_data, train_label, valid_data, valid_label
# @classmethod
def _leave_N_out(self,data, label, seed=None, num_subjects=1, given_subject_idx=None):
"""PICK valid num subjects out"""
pick_valid_subjects_idx,pick_train_subjects_idx = self._pick_leave_N_out_ids(data.shape[0], seed, given_subject_idx,num_subjects)
subjects = np.arange(data.shape[0])
pick_train_subjects = subjects[pick_train_subjects_idx]
pick_valid_subjects = subjects[pick_valid_subjects_idx]
train_data = data[pick_train_subjects_idx,]
valid_data = data[pick_valid_subjects_idx,]
train_label = label[pick_train_subjects_idx,]
valid_label = label[pick_valid_subjects_idx,]
return train_data, train_label, pick_train_subjects, valid_data, valid_label, pick_valid_subjects
@classmethod
def _pick_leave_N_out_ids(self,total_subject, seed=None, given_subject_idx=None, num_subjects=1):
if seed is None:
np.random.choice(1)
else:
np.random.choice(seed)
subjects_idx = np.arange(total_subject) if given_subject_idx is None else given_subject_idx
pick_subjects_idx = np.random.choice(subjects_idx, num_subjects, replace=False)
pick_subjects_idx = np.sort(pick_subjects_idx)
remain_subjects_idx = subjects_idx[~np.isin(subjects_idx, pick_subjects_idx)]
return pick_subjects_idx, remain_subjects_idx
@classmethod
def _generate_datasource(self,data, label, test_data=False):
items = []
total_subjects = 1
if not test_data:
total_subjects = len(data)
for subject in range(total_subjects):
current_subject_data = data[subject]
current_subject_label = label[subject]
domain = subject
for i in range(current_subject_data.shape[0]):
item = EEGDatum(eeg_data=current_subject_data[i], label=int(current_subject_label[i]), domain=domain)
items.append(item)
return items
| 45.032836 | 166 | 0.65173 | 14,838 | 0.983561 | 0 | 0 | 2,310 | 0.153122 | 0 | 0 | 5,958 | 0.394936 |
e45ad99677d6577af2671852ef4f62636067fd15 | 9,321 | py | Python | pywolf3d/level_editor/app.py | jammers-ach/pywolf3d | 3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c | [
"MIT"
] | null | null | null | pywolf3d/level_editor/app.py | jammers-ach/pywolf3d | 3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c | [
"MIT"
] | null | null | null | pywolf3d/level_editor/app.py | jammers-ach/pywolf3d | 3e305d7bdb9aa4f38ae5cf460ed22c54efe8980c | [
"MIT"
] | null | null | null | import argparse
import json
from ursina import load_texture, Ursina, Entity, color, camera, Quad, mouse, time, window, invoke, WindowPanel, \
Text, InputField, Space, scene, Button, Draggable, Tooltip, Scrollable
from pywolf3d.games.wolf3d import WALL_DEFS, WallDef, OBJECT_DEFS
Z_GRID = 0
Z_OBJECT = 2
Z_WALL = 3
class LevelEditor():
def __init__(self, fname, path_to_game):
level_data = None
self.fname = fname
with open(fname) as f:
level_data = json.load(f)
w = len(level_data['level'])
h = len(level_data['level'][0])
self.cursor = Grid(self, parent=scene)
self.grid = []
y = 0
for row in level_data['level']:
tile_row = []
x = 0
for wall_code in row:
tile_row.append(Tile(self, position=(x,y), cursor=self.cursor, wall_code=wall_code, parent=scene))
x += 1
self.grid.append(tile_row)
y += 1
self.object_grid = [[None for y in range(y) ] for x in range(x)]
for coord, code in level_data['object_list']:
if code in OBJECT_DEFS:
y, x = coord
self.update_object_grid(x, y, code)
else:
print(f"ignoring object {code} at {coord}")
camera.orthographic = True
camera.fov = 5
camera.position = (w/2,h/2)
def wall_inventory_click(code):
print(f"clicked tile {code}")
self.current_tile = code
self.mode = "tile"
self.wall_holder = Inventory(wall_inventory_click, cursor=self.cursor)
self.wall_holder.add_script(Scrollable())
for _,w in WALL_DEFS.items():
self.wall_holder.append(w)
def object_inventory_click(code):
print(f"clicked object {code}")
self.current_tile = code
self.mode = "object"
self.object_holder = Inventory(object_inventory_click, cursor=self.cursor, visible=False)
self.object_holder.add_script(Scrollable())
for _,w in OBJECT_DEFS.items():
self.object_holder.append(w)
self.current_tile = 1
self.mode = "tile"
def objects(self):
'''switches between tile and object mode'''
if self.mode == "tile":
self.mode = "object"
self.current_tile = 25
self.object_holder.toggle_visibility(True)
self.wall_holder.toggle_visibility(False)
elif self.mode == "object":
self.mode = "tile"
self.current_tile = 1
self.object_holder.toggle_visibility(False)
self.wall_holder.toggle_visibility(True)
def update_object_grid(self, x, y, code):
x,y = int(x), int(y)
if self.object_grid[x][y]:
self.object_grid[x][y].set_obj_tile(OBJECT_DEFS[code])
else:
self.object_grid[x][y] = ObjectTile(self, OBJECT_DEFS[code], position=(x,y), cursor=self.cursor, parent=scene)
def save(self):
json_data = {"object_list": [],
"name": "test level",
"size": []}
level = []
for r in self.grid:
row = []
for col in r:
row.append(col.wall_code)
level.append(row)
json_data["level"] = level
for r in self.object_grid:
row = []
for col in r:
if col:
json_data["object_list"].append([(col.y, col.x), col.obj_tile.code])
with open(self.fname, 'w') as f:
json.dump(json_data, f)
print(f"written to {self.fname}")
class Inventory(Entity):
def __init__(self, make_click, rows=2, cols=5, full_size=120, scrollable=True, **kwargs):
super().__init__(
parent = camera.ui,
model = Quad(radius=.015),
texture = 'white_cube',
texture_scale = (rows,cols),
scale = (.1 * rows, .1 * cols),
origin = (-.5, .5),
position = (-0.9,.5),
color = color.color(0,0,.1,.9),
)
self.make_click = make_click
self.rows = rows
self.cols = cols
self.full_cols = full_size - cols
self.icons = []
for key, value in kwargs.items():
setattr(self, key, value)
self.used_spots = []
def find_free_spot(self):
for y in range(self.cols+self.full_cols):
for x in range(self.rows):
if not (x,y) in self.used_spots:
self.used_spots.append((x,y))
return x, y
raise Exception("No free spots")
def append(self, wall_def, x=0, y=0):
x, y = self.find_free_spot()
def clicked():
self.make_click(wall_def.code)
icon = Button(
parent = self,
model = 'quad',
icon = wall_def.editor_texture,
color = color.white,
scale_x = 1/self.texture_scale[0],
scale_y = 1/self.texture_scale[1],
origin = (-.5,.5),
x = x * 1/self.texture_scale[0],
y = -y * 1/self.texture_scale[1],
z = -.5,
on_click = clicked,
)
icon.tooltip = Tooltip(wall_def.description)
icon.tooltip.background.color = color.color(0,0,0,.8)
self.icons.append(icon)
def item_clicked(self, item):
self.selected.deselect()
self.selected = item
def toggle_visibility(self, visible):
self.visible = visible
self.z = 0 if visible else 10
for x in self.icons:
x.visible = visible
x.disabled = not visible
class Grid(Entity):
fov_step = 20
move_step = 10
hold_step = 20
def __init__(self, editor, **kwargs):
super().__init__()
self.model=Quad(mode='line')
self.color = color.red
self.z = Z_GRID
self.current_tile = 5
self.editor = editor
for key, value in kwargs.items():
setattr(self, key, value)
def update(self):
self.position = mouse.world_point
self.x = round(self.x, 0)
self.y = round(self.y, 0)
def input(self, key):
if key == "up arrow":
camera.y += self.move_step * time.dt
elif key == "down arrow":
camera.y -= self.move_step * time.dt
elif key == "left arrow":
camera.x -= self.move_step * time.dt
elif key == "right arrow":
camera.x += self.move_step * time.dt
elif key == "up arrow hold":
camera.y += self.hold_step * time.dt
elif key == "down arrow hold":
camera.y -= self.hold_step * time.dt
elif key == "left arrow hold":
camera.x -= self.hold_step * time.dt
elif key == "right arrow hold":
camera.x += self.hold_step * time.dt
elif key == "=" or key == "= hold":
camera.fov -= self.fov_step * time.dt
elif key == "-" or key == "- hold":
camera.fov += self.fov_step * time.dt
elif key == "s":
self.editor.save()
elif key == "o":
self.editor.objects()
class Tile(Entity):
def __init__(self, editor, **kwargs):
super().__init__()
self.model='quad'
self.z = Z_WALL
self.collider='box'
self.editor = editor
self.set_texture(kwargs['wall_code'])
for key, value in kwargs.items():
setattr(self, key, value)
def set_texture(self, wall_code):
txt = WALL_DEFS[wall_code].editor_texture
self.wall_code = wall_code
self.texture = txt
def update(self):
if self.hovered:
self.cursor.x = self.position.x
self.cursor.y = self.position.y
self.cursor.z = Z_GRID
def input(self, key):
if key == 'left mouse down' and self.hovered:
print("down", self.x, self.y, ' - ', self.wall_code)
if self.editor.mode == "tile":
self.set_texture(self.editor.current_tile)
elif self.editor.mode == "object":
self.editor.update_object_grid(self.x, self.y, self.editor.current_tile)
class ObjectTile(Entity):
def __init__(self, editor, obj_tile, **kwargs):
super().__init__()
self.model='quad'
self.z = Z_OBJECT
self.collider=None
self.editor = editor
self.texture = obj_tile.editor_texture
self.obj_tile = obj_tile
for key, value in kwargs.items():
setattr(self, key, value)
def set_obj_tile(self, obj_tile):
self.texture = obj_tile.editor_texture
self.obj_tile = obj_tile
def start_editor(fname, path_to_game):
app = Ursina()
editor = LevelEditor(fname, path_to_game)
app.run()
def run():
parser = argparse.ArgumentParser(description='Mapmaker for pywolf3d')
parser.add_argument('level', help='path to level to load')
parser.add_argument('--path', help='path to wolf3d datafiles (default ./wolfdata)',
default="./wolfdata/")
args = parser.parse_args()
start_editor(args.level, args.path)
if __name__ == '__main__':
run()
| 29.590476 | 122 | 0.55037 | 8,471 | 0.908808 | 0 | 0 | 0 | 0 | 0 | 0 | 675 | 0.072417 |
e45ba78572ce87d65bc9fa965f1a8af3685baf94 | 3,404 | py | Python | code/data_mgmt.py | TomDonoghue/EEGparam | a3e747094617479122900688643fa396ecbf8bab | [
"MIT"
] | 8 | 2021-08-17T05:22:40.000Z | 2022-03-23T02:03:48.000Z | code/data_mgmt.py | TomDonoghue/EEGparam | a3e747094617479122900688643fa396ecbf8bab | [
"MIT"
] | 1 | 2020-12-09T13:22:03.000Z | 2021-01-27T01:56:09.000Z | code/data_mgmt.py | TomDonoghue/EEGparam | a3e747094617479122900688643fa396ecbf8bab | [
"MIT"
] | 4 | 2021-06-20T14:44:38.000Z | 2021-12-11T11:21:26.000Z | """Functions for loading and data management for EEG-FOOOF."""
from os.path import join as pjoin
import numpy as np
from fooof import FOOOFGroup
from fooof.analysis import get_band_peak_fg
from settings import BANDS, YNG_INDS, OLD_INDS, N_LOADS, N_SUBJS, N_TIMES
###################################################################################################
###################################################################################################
def reshape_data(data):
"""Reshape loaded data objects into subsets for YNG and OLD groups."""
yng_data = np.vstack([data[0, YNG_INDS, :], data[1, YNG_INDS, :], data[2, YNG_INDS, :]])
old_data = np.vstack([data[0, OLD_INDS, :], data[1, OLD_INDS, :], data[2, OLD_INDS, :]])
return yng_data, old_data
def load_fooof_task_md(data_path, side='Contra', folder='FOOOF'):
"""Load task data in for all subjects, selects & return metadata."""
# Collect measures together from FOOOF results into matrices
all_r2s = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
all_errs = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
all_r2s[li, :, ind] = fg.get_params('r_squared')
all_errs[li, :, ind] = fg.get_params('error')
return all_r2s, all_errs
def load_fooof_task_ap(data_path, side='Contra', folder='FOOOF'):
"""Loads task data in for all subjects, selects and return aperiodic FOOOF outputs.
data_path : path to where data
side: 'Ipsi' or 'Contra'
"""
# Collect measures together from FOOOF results into matrices
all_exps = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
all_offsets = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
all_exps[li, :, ind] = fg.get_params('aperiodic_params', 'exponent')
all_offsets[li, :, ind] = fg.get_params('aperiodic_params', 'offset')
return all_offsets, all_exps
def load_fooof_task_pe(data_path, side='Contra', param_ind=1, folder='FOOOF'):
"""Loads task data for all subjects, selects and return periodic FOOOF outputs.
data_path : path to where data
side: 'Ipsi' or 'Contra'
"""
# Collect measures together from FOOOF results into matrices
all_alphas = np.zeros(shape=[N_LOADS, N_SUBJS, N_TIMES])
for li, load in enumerate(['Load1', 'Load2', 'Load3']):
pre, early, late = _load_fgs(data_path, folder, side, load)
for ind, fg in enumerate([pre, early, late]):
temp_alphas = get_band_peak_fg(fg, BANDS.alpha)
all_alphas[li, :, ind] = temp_alphas[:, param_ind]
return all_alphas
def _load_fgs(data_path, folder, side, load):
"""Helper to load FOOOFGroups."""
# Load the FOOOF analyses of the average
pre, early, late = FOOOFGroup(), FOOOFGroup(), FOOOFGroup()
pre.load('Group_' + load + '_' + side + '_Pre', pjoin(data_path, folder))
early.load('Group_' + load + '_' + side + '_Early', pjoin(data_path, folder))
late.load('Group_' + load + '_' + side + '_Late', pjoin(data_path, folder))
return pre, early, late
| 35.831579 | 99 | 0.623384 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,193 | 0.35047 |
e45c0f05cdc7fe7a2e45a2f57230877bc9ba6968 | 413 | py | Python | match_shapes.py | KyojiOsada/Python-Library | b06e50454c56c84c2abb96e6f68d35117ea5f4b5 | [
"Apache-2.0"
] | null | null | null | match_shapes.py | KyojiOsada/Python-Library | b06e50454c56c84c2abb96e6f68d35117ea5f4b5 | [
"Apache-2.0"
] | null | null | null | match_shapes.py | KyojiOsada/Python-Library | b06e50454c56c84c2abb96e6f68d35117ea5f4b5 | [
"Apache-2.0"
] | null | null | null | import sys
import cv2
import numpy as np
img1 = cv2.imread('source1.jpg',0)
img2 = cv2.imread('source2.jpg',0)
ret, thresh = cv2.threshold(img1, 127, 255,0)
ret, thresh2 = cv2.threshold(img2, 127, 255,0)
contours,hierarchy,a = cv2.findContours(thresh,2,1)
cnt1 = contours[0]
contours,hierarchy,a = cv2.findContours(thresh2,2,1)
cnt2 = contours[0]
ret = cv2.matchShapes(cnt1,cnt2,1,0.0)
print(ret)
sys.exit()
| 20.65 | 52 | 0.72155 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.062954 |
e45c3482ede83aa24d104869dacc8d42f601273f | 25,556 | py | Python | SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py | jcfr/TubeTK | 3791790e206b5627a35c46f86eeb9671c8d4190f | [
"Apache-2.0"
] | 1 | 2019-07-19T09:27:37.000Z | 2019-07-19T09:27:37.000Z | SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py | jcfr/TubeTK | 3791790e206b5627a35c46f86eeb9671c8d4190f | [
"Apache-2.0"
] | null | null | null | SlicerModules/SegmentConnectedParzenPDF/SegmentConnectedParzenPDF.py | jcfr/TubeTK | 3791790e206b5627a35c46f86eeb9671c8d4190f | [
"Apache-2.0"
] | 1 | 2019-07-19T09:28:56.000Z | 2019-07-19T09:28:56.000Z | import os
from __main__ import vtk, qt, ctk, slicer
import EditorLib
from EditorLib.EditOptions import HelpButton
from EditorLib.EditOptions import EditOptions
from EditorLib import EditUtil
from EditorLib import LabelEffect
class InteractiveConnectedComponentsUsingParzenPDFsOptions(EditorLib.LabelEffectOptions):
""" Editor Effect gui
"""
def __init__(self, parent=0):
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).__init__(parent)
self.attributes = ('PaintTool')
self.displayName = 'Interactive Connected Components using Parzen PDFs'
self.undoRedo = EditorLib.EditUtil.UndoRedo()
# Create the normal PDF segmenter node cli if it doesn't exists yet.
# This is because we want the normal module's cli to be selected
# when opening the cli module.
module = slicer.modules.segmentconnectedcomponentsusingparzenpdfs
self.logic.getCLINode(module, module.title)
def __del__(self):
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).__del__()
def create(self):
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).create()
ioCollapsibleButton = ctk.ctkCollapsibleGroupBox()
ioCollapsibleButton.title = "IO"
ioCollapsibleButton.collapsed = 0
self.frame.layout().addWidget(ioCollapsibleButton)
# Layout within the io collapsible button
ioFormLayout = qt.QFormLayout(ioCollapsibleButton)
self.additionalInputNodeSelectors = []
for i in range(0,2):
self.additionalInputNodeSelectors.append(self.addInputNodeSelector(i, ioFormLayout))
self.additionalInputNodeSelectors[0].toolTip = "Select the 1st additional input volume to be segmented"
self.additionalInputNodeSelectors[1].toolTip = "Select the 2nd additional input volume to be segmented"
# Objects
objectCollapsibleGroupBox = ctk.ctkCollapsibleGroupBox()
objectCollapsibleGroupBox.title = "Objects"
self.frame.layout().addWidget(objectCollapsibleGroupBox)
# Layout within the io collapsible button
objectFormLayout = qt.QFormLayout(objectCollapsibleGroupBox)
foregroundLayout = qt.QHBoxLayout()
foregroundLabel = slicer.qMRMLLabelComboBox()
foregroundLabel.objectName = 'Foreground label'
foregroundLabel.setMRMLScene(slicer.mrmlScene)
foregroundLabel.setMRMLColorNode(self.editUtil.getColorNode())
foregroundLabel.labelValueVisible = True
foregroundLabel.currentColor = 1
self.foregroundLabel = foregroundLabel
self.connections.append( (self.foregroundLabel, 'currentColorChanged(int)', self.updateMRMLFromGUI ) )
foregroundWeightSpinBox = qt.QDoubleSpinBox(foregroundLabel)
self.foregroundWeightSpinBox = foregroundWeightSpinBox
foregroundWeightSpinBox.setRange(0.0, 1.0)
foregroundWeightSpinBox.setSingleStep(0.1)
foregroundWeightSpinBox.value = 1.0
foregroundPopup = ctk.ctkPopupWidget( foregroundWeightSpinBox )
foregroundPopupLayout = qt.QHBoxLayout( foregroundPopup )
foregroundPopupSlider = ctk.ctkDoubleSlider( foregroundPopup )
self.foregroundPopupSlider = foregroundPopupSlider
foregroundPopupSlider.maximum = 1.0
foregroundPopupSlider.minimum = 0.0
foregroundPopupSlider.singleStep = 0.1
foregroundPopupSlider.connect('valueChanged(double)', self.foregroundWeightSpinBox.setValue)
foregroundWeightSpinBox.connect('valueChanged(double)', self.foregroundPopupSlider.setValue)
self.connections.append( (self.foregroundWeightSpinBox, 'valueChanged(double)', self.updateMRMLFromGUI ) )
foregroundLayout.addWidget( foregroundLabel )
foregroundLayout.addWidget( foregroundWeightSpinBox )
foregroundPopupLayout.addWidget( foregroundPopupSlider )
objectFormLayout.addRow("Foreground Label:", foregroundLayout )
self.objectLabel = foregroundLabel
# http://qt-project.org/doc/qt-4.7/qt.html
foregroundPopup.alignment = 0x0082 # Qt::AlignVCenter | Qt::AlignRight
foregroundPopup.horizontalDirection = 0 # Qt::LeftToRight
foregroundPopup.verticalDirection = 0 # Qt::TopToBottom
foregroundPopup.animationEffect = 1 # Qt::ScrollEffect
backgroundLayout = qt.QHBoxLayout()
backgroundLabel = slicer.qMRMLLabelComboBox()
backgroundLabel.objectName = 'Background label'
backgroundLabel.setMRMLScene(slicer.mrmlScene)
backgroundLabel.setMRMLColorNode(self.editUtil.getColorNode())
backgroundLabel.labelValueVisible = True
backgroundLabel.currentColor = 2
self.backgroundLabel = backgroundLabel
self.connections.append( (self.backgroundLabel, 'currentColorChanged(int)', self.updateMRMLFromGUI ) )
backgroundWeightSpinBox = qt.QDoubleSpinBox(backgroundLabel)
self.backgroundWeightSpinBox = backgroundWeightSpinBox
backgroundWeightSpinBox.setRange(0.0, 1.0)
backgroundWeightSpinBox.setSingleStep(0.1)
backgroundWeightSpinBox.value = 1.0
backgroundPopup = ctk.ctkPopupWidget( backgroundWeightSpinBox )
backgroundPopupLayout = qt.QHBoxLayout( backgroundPopup )
backgroundPopupSlider = ctk.ctkDoubleSlider( backgroundPopup )
self.backgroundPopupSlider = backgroundPopupSlider
backgroundPopupSlider.maximum = 1.0
backgroundPopupSlider.minimum = 0.0
backgroundPopupSlider.singleStep = 0.1
backgroundPopupSlider.connect('valueChanged(double)', self.backgroundWeightSpinBox.setValue)
backgroundWeightSpinBox.connect('valueChanged(double)', self.backgroundPopupSlider.setValue)
self.connections.append( (self.backgroundWeightSpinBox, 'valueChanged(double)', self.updateMRMLFromGUI ) )
backgroundLayout.addWidget( backgroundLabel )
backgroundLayout.addWidget( backgroundWeightSpinBox )
backgroundPopupLayout.addWidget( backgroundPopupSlider )
objectFormLayout.addRow("Background Label:", backgroundLayout)
self.backgroundLabel = backgroundLabel
# http://qt-project.org/doc/qt-4.7/qt.html
backgroundPopup.alignment = 0x0082 # Qt::AlignVCenter | Qt::AlignRight
backgroundPopup.horizontalDirection = 0 # Qt::LeftToRight
backgroundPopup.verticalDirection = 0 # Qt::TopToBottom
backgroundPopup.animationEffect = 1 # Qt::ScrollEffect
# Presets
# Placeholder
presetsCollapsibleGroupBox = ctk.ctkCollapsibleGroupBox()
presetsCollapsibleGroupBox.title = "Preset"
self.frame.layout().addWidget(presetsCollapsibleGroupBox)
presetComboBox = slicer.qSlicerPresetComboBox()
# Advanced Parameters
paramsCollapsibleGroupBox = ctk.ctkCollapsibleGroupBox()
paramsCollapsibleGroupBox.title = "Advanced Parameters"
paramsCollapsibleGroupBox.collapsed = 1
self.frame.layout().addWidget(paramsCollapsibleGroupBox)
# Layout within the io collapsible button
paramsFormLayout = qt.QFormLayout(paramsCollapsibleGroupBox)
erosionSpinBox = qt.QSpinBox()
erosionSpinBox.objectName = 'erosionSpinBox'
erosionSpinBox.toolTip = "Set the erosion radius."
erosionSpinBox.setMinimum(0)
erosionSpinBox.setValue(5) # Default
paramsFormLayout.addRow("Erosion Radius:", erosionSpinBox)
self.erosionSpinBox = erosionSpinBox
self.connections.append( (self.erosionSpinBox, "valueChanged(int)", self.updateMRMLFromGUI ) )
holeFillSpinBox = qt.QSpinBox()
holeFillSpinBox.objectName = 'holeFillSpinBox'
holeFillSpinBox.toolTip = "Set the hole fill iterations."
holeFillSpinBox.setMinimum(0)
holeFillSpinBox.setValue(5) #Default
paramsFormLayout.addRow("Hole Fill Iterations:", holeFillSpinBox)
self.holeFillSpinBox = holeFillSpinBox
self.connections.append( (self.holeFillSpinBox, "valueChanged(int)", self.updateMRMLFromGUI ) )
# probabilitySmoothingStandardDeviation spin box
probabilitySmoothingStdDevSpinBox = qt.QDoubleSpinBox()
probabilitySmoothingStdDevSpinBox.objectName = 'probabilitySmoothingStdDevSpinBox'
probabilitySmoothingStdDevSpinBox.toolTip = "Standard deviation of blur applied to probability images prior to computing maximum likelihood of each class at each pixel."
probabilitySmoothingStdDevSpinBox.setMinimum(0.0)
probabilitySmoothingStdDevSpinBox.setValue(1.0) # Default
probabilitySmoothingStdDevSpinBox.setSingleStep(0.5)
paramsFormLayout.addRow("Probability Smoothing Standard Deviation:", probabilitySmoothingStdDevSpinBox)
self.probabilitySmoothingStdDevSpinBox = probabilitySmoothingStdDevSpinBox
self.connections.append( (self.probabilitySmoothingStdDevSpinBox, "valueChanged(double)", self.updateMRMLFromGUI ) )
# histogramSmoothingStandardDeviation spin box
histogramSmoothingStdDevSpinBox = qt.QDoubleSpinBox()
histogramSmoothingStdDevSpinBox.objectName = 'histogramSmoothingStdDevSpinBox'
histogramSmoothingStdDevSpinBox.toolTip = "Standard deviation of blur applied to histograms to convert them to probability density function estimates."
histogramSmoothingStdDevSpinBox.setMinimum(0.0)
histogramSmoothingStdDevSpinBox.setValue(5.0) # Default
histogramSmoothingStdDevSpinBox.setSingleStep(0.5)
paramsFormLayout.addRow("Probability Smoothing Standard Deviation:", histogramSmoothingStdDevSpinBox)
self.histogramSmoothingStdDevSpinBox = histogramSmoothingStdDevSpinBox
self.connections.append( (self.histogramSmoothingStdDevSpinBox, "valueChanged(double)", self.updateMRMLFromGUI ) )
# draft check box
draftCheckBox = qt.QCheckBox()
draftCheckBox.objectName = 'draftCheckBox'
draftCheckBox.toolTip = "Downsamples results by a factor of 4."
paramsFormLayout.addRow("Draft Mode:", draftCheckBox)
self.draftCheckBox = draftCheckBox
self.connections.append( (self.draftCheckBox, "stateChanged(int)", self.updateMRMLFromGUI ) )
# force classification check box
forceClassificationCheckBox = qt.QCheckBox()
forceClassificationCheckBox.objectName = 'forceClassificationCheckBox'
forceClassificationCheckBox.toolTip = "Perform the classification of all voxels?"
forceClassificationCheckBox.setChecked(False)
paramsFormLayout.addRow("Classify all voxels: ", forceClassificationCheckBox)
self.forceClassificationCheckBox = forceClassificationCheckBox
self.connections.append( (self.forceClassificationCheckBox, "stateChanged(int)", self.updateMRMLFromGUI ) )
# dilate first check box
dilateFirstCheckBox = qt.QCheckBox()
dilateFirstCheckBox.objectName = 'dilateFirstCheckBox'
dilateFirstCheckBox.toolTip = "Dilate and then erode so as to fill-in holes?"
dilateFirstCheckBox.setChecked(False)
paramsFormLayout.addRow("Dilate First: ", dilateFirstCheckBox)
self.dilateFirstCheckBox = dilateFirstCheckBox
self.connections.append( (self.dilateFirstCheckBox, "stateChanged(int)", self.updateMRMLFromGUI ) )
self.helpLabel = qt.QLabel("Run the PDF Segmentation on the current label map.", self.frame)
self.frame.layout().addWidget(self.helpLabel)
self.apply = qt.QPushButton("Apply", self.frame)
self.apply.setToolTip("Apply to run segmentation.\nCreates a new label volume using the current volume as input")
self.frame.layout().addWidget(self.apply)
self.widgets.append(self.apply)
EditorLib.HelpButton(self.frame, "Use this tool to apply segmentation using Parzen windowed PDFs.\n\n Select different label colors and paint on the foreground or background voxels using the paint effect.\nTo run segmentation correctly, you need to supply a minimum or two class labels.")
self.connections.append( (self.apply, 'clicked()', self.onApply) )
def destroy(self):
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).destroy()
# note: this method needs to be implemented exactly as-is
# in each leaf subclass so that "self" in the observer
# is of the correct type
def updateParameterNode(self, caller, event):
node = EditUtil.EditUtil().getParameterNode()
if node != self.parameterNode:
if self.parameterNode:
node.RemoveObserver(self.parameterNodeTag)
self.parameterNode = node
self.parameterNodeTag = node.AddObserver(vtk.vtkCommand.ModifiedEvent, self.updateGUIFromMRML)
def setMRMLDefaults(self):
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).setMRMLDefaults()
disableState = self.parameterNode.GetDisableModifiedEvent()
self.parameterNode.SetDisableModifiedEvent(1)
defaults = [
("outputVolume", "0"),
("labelmap", "0"),
("objectId", "1,2"),
("erodeRadius", "5"),
("holeFillIterations", "5"),
("objectPDFWeight", "1.0,1.0"),
("probImageSmoothingStdDev", "1.0"),
("histogramSmoothingStdDev", "5.0"),
("draft", "0"),
("forceClassification", "0"),
("dilateFirst", "0"),
]
for i in range(0, 2):
defaults.append(("additionalInputVolumeID" + str(i), "0"))
# Set logic here because this function is called before the end
# of the superclass constructor
self.logic = InteractiveConnectedComponentsUsingParzenPDFsLogic(None)
for default in defaults:
pvalue = self.getParameter(default[0])
if pvalue == "":
self.setParameter(default[0], default[1])
self.parameterNode.SetDisableModifiedEvent(disableState)
def updateGUIFromMRML(self,caller,event):
parameters = ["objectId",
"erodeRadius",
"holeFillIterations",
"objectPDFWeight",
"probImageSmoothingStdDev",
"histogramSmoothingStdDev",
"draft",
"forceClassification",
"dilateFirst",
]
for i in range(0, 2):
parameters.append("additionalInputVolumeID" + str(i))
for parameter in parameters:
if self.getParameter(parameter) == "":
# don't update if the parameter node has not got all values yet
return
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).updateGUIFromMRML(caller,event)
self.disconnectWidgets()
# Additional inputs
for i in range(0, 2):
self.additionalInputNodeSelectors[i].currentNodeID = self.getParameter("additionalInputVolumeID" + str(i))
# labels
objectIds = self.logic.listFromStringList(self.getParameter("objectId"))
self.foregroundLabel.currentColor = objectIds[0]
self.backgroundLabel.currentColor = objectIds[1]
# Parameters
self.erosionSpinBox.value = int(self.getParameter("erodeRadius"))
self.holeFillSpinBox.value = int(self.getParameter("holeFillIterations"))
self.probabilitySmoothingStdDevSpinBox.value = float(self.getParameter("probImageSmoothingStdDev"))
self.histogramSmoothingStdDevSpinBox.value = float(self.getParameter("histogramSmoothingStdDev"))
self.draftCheckBox.setChecked(int(self.getParameter("draft")))
self.forceClassificationCheckBox.setChecked(int(self.getParameter("forceClassification")))
self.dilateFirstCheckBox.setChecked(int(self.getParameter("dilateFirst")))
self.connectWidgets()
def onApply(self):
self.undoRedo.saveState()
self.logic.applyPDFSegmenter()
def updateMRMLFromGUI(self):
if self.updatingGUI:
return
disableState = self.parameterNode.GetDisableModifiedEvent()
self.parameterNode.SetDisableModifiedEvent(1)
super(InteractiveConnectedComponentsUsingParzenPDFsOptions,self).updateMRMLFromGUI()
# Input
for i in range(0, 2):
self.setParameter("additionalInputVolumeID" + str(i), self.additionalInputNodeSelectors[i].currentNodeID)
# Labels
objectIds = (str(self.foregroundLabel.currentColor) + ","
+ str(self.backgroundLabel.currentColor)
)
self.setParameter("objectId", objectIds)
# Parameters
self.setParameter("erodeRadius", self.erosionSpinBox.text)
self.setParameter("holeFillIterations", self.holeFillSpinBox.text)
self.setParameter("probImageSmoothingStdDev", self.probabilitySmoothingStdDevSpinBox.text)
self.setParameter("histogramSmoothingStdDev", self.histogramSmoothingStdDevSpinBox.text)
self.setParameter("draft", str(int(self.draftCheckBox.isChecked())))
self.setParameter("forceClassification", str(int(self.forceClassificationCheckBox.isChecked())))
self.setParameter("dilateFirst", str(int(self.dilateFirstCheckBox.isChecked())))
self.parameterNode.SetDisableModifiedEvent(disableState)
if not disableState:
self.parameterNode.InvokePendingModifiedEvent()
def addInputNodeSelector(self, index, layout):
inputNodeSelector = slicer.qMRMLNodeComboBox()
inputNodeSelector.objectName = 'additionalInputNodeSelector'+str(index+1)
inputNodeSelector.nodeTypes = ['vtkMRMLScalarVolumeNode']
inputNodeSelector.noneEnabled = True
inputNodeSelector.addEnabled = False
inputNodeSelector.removeEnabled = False
inputNodeSelector.editEnabled = True
inputNodeSelector.enabled = 1
inputNodeSelector.setMRMLScene(slicer.mrmlScene)
layout.addRow("Additional Input Volume "+str(index+1)+":", inputNodeSelector)
self.connections.append( (inputNodeSelector, "currentNodeChanged(vtkMRMLNode*)", self.updateMRMLFromGUI ) )
return inputNodeSelector
def setParameter(self, parameterName, value):
self.logic.setParameter(parameterName, value)
def getParameter(self, parameterName):
return self.logic.getParameter(parameterName)
#
# EditorEffectTemplateTool
#
class InteractiveConnectedComponentsUsingParzenPDFsTool(LabelEffect.LabelEffectTool):
"""
One instance of this will be created per-view when the effect
is selected. It is responsible for implementing feedback and
label map changes in response to user input.
This class observes the editor parameter node to configure itself
and queries the current view for background and label volume
nodes to operate on.
"""
def __init__(self, sliceWidget):
super(InteractiveConnectedComponentsUsingParzenPDFsTool,self).__init__(sliceWidget)
# create a logic instance to do the non-gui work
self.logic = InteractiveConnectedComponentsUsingParzenPDFsLogic(self.sliceWidget.sliceLogic())
def cleanup(self):
super(InteractiveConnectedComponentsUsingParzenPDFsTool,self).cleanup()
def processEvent(self, caller=None, event=None):
"""
handle events from the render window interactor
"""
# let the superclass deal with the event if it wants to
if super(InteractiveConnectedComponentsUsingParzenPDFsTool,self).processEvent(caller,event):
return
if event == "LeftButtonPressEvent":
xy = self.interactor.GetEventPosition()
sliceLogic = self.sliceWidget.sliceLogic()
logic = InteractiveConnectedComponentsUsingParzenPDFsLogic(sliceLogic)
logic.apply(xy)
print("Got a %s at %s in %s", (event,str(xy),self.sliceWidget.sliceLogic().GetSliceNode().GetName()))
self.abortEvent(event)
else:
pass
#if event == "LeftButtonPressEvent"
# self.actionState = "painting"
# if not self.pixelMode:
# self.cursorOff()
# xy = self.interactor.GetEventPosition()
#elif event == "LeftButtonReleaseEvent":
# self.paintApply
# events from the slice node
#if caller and caller.IsA('vtkMRMLSliceNode'):
# here you can respond to pan/zoom or other changes
# to the view
# pass
#
# EditorEffectTemplateLogic
#
class InteractiveConnectedComponentsUsingParzenPDFsLogic(LabelEffect.LabelEffectLogic):
"""
This class contains helper methods for a given effect
type. It can be instanced as needed by an EditorEffectTemplateTool
or EditorEffectTemplateOptions instance in order to compute intermediate
results (say, for user feedback) or to implement the final
segmentation editing operation. This class is split
from the EditorEffectTemplateTool so that the operations can be used
by other code without the need for a view context.
"""
def __init__(self,sliceLogic):
super(InteractiveConnectedComponentsUsingParzenPDFsLogic,self).__init__(sliceLogic)
self.effectName = 'InteractiveConnectedComponentsUsingParzenPDFsOptions'
self.parameterNode = self.editUtil.getParameterNode()
def getCLINode(self, module, nodeName):
cliNode = slicer.mrmlScene.GetFirstNodeByName(nodeName)
# Also check path to make sure the CLI isn't a scripted module
if (cliNode == None) and ("qt-scripted-modules" not in module.path):
cliNode = slicer.cli.createNode(module)
cliNode.SetName(nodeName)
return cliNode
def setParameter(self, parameterName, value):
self.parameterNode.SetParameter(self.getFullParameterName(parameterName), value)
def getParameter(self, parameterName):
return self.parameterNode.GetParameter(self.getFullParameterName(parameterName))
def getFullParameterName(self, parameterName):
return self.effectName + ',' + parameterName
def listFromStringList(self, stringlist):
'''Convert a stringlist of the format '1.0, 2.0, 3.0' to a list
of the format [1.0, 2.0, 3.0].'''
list = []
for string in stringlist.split(","):
try:
list.append(int(string))
except ValueError:
list.append(float(string))
return list
def apply(self,xy):
pass
def applyPDFSegmenter(self):
#
# Apply PDF segmenter based on the parameter node
#
if not self.sliceLogic:
self.sliceLogic = self.editUtil.getSliceLogic()
cliParameters = {}
# IO
cliParameters["inputVolume1"] = self.editUtil.getBackgroundVolume()
for i in range(0,2):
# Get input nodes by their IDs
nodeID = self.getParameter("additionalInputVolumeID" + str(i))
cliParameters["inputVolume"+str(i+2)] = slicer.mrmlScene.GetNodeByID(nodeID)
cliParameters["labelmap"] = self.editUtil.getLabelVolume()
cliParameters["outputVolume"] = self.editUtil.getLabelVolume()
# Labels
cliParameters["objectId"] = self.listFromStringList(self.getParameter("objectId"))
# Parameters
cliParameters["erodeRadius"] = int(self.getParameter( "erodeRadius"))
cliParameters["holeFillIterations"] = int(self.getParameter("holeFillIterations"))
cliParameters["objectPDFWeight"] = self.listFromStringList(self.getParameter("objectPDFWeight"))
cliParameters["probImageSmoothingStdDev"] = float(self.getParameter("probImageSmoothingStdDev"))
cliParameters["histogramSmoothingStdDev"] = float(self.getParameter("histogramSmoothingStdDev"))
cliParameters["draft"] = int(self.getParameter("draft"))
cliParameters["forceClassification"] = int(self.getParameter("forceClassification"))
cliParameters["dilateFirst"] = int(self.getParameter("dilateFirst"))
module = slicer.modules.segmentconnectedcomponentsusingparzenpdfs
cliNode = self.getCLINode(module, "PDFSegmenterEditorEffect")
slicer.cli.run(module, cliNode, cliParameters)
#
# The InteractiveConnectedComponentsUsingParzenPDFs Template class definition
#
class InteractiveConnectedComponentsUsingParzenPDFsExtension(LabelEffect.LabelEffect):
"""Organizes the Options, Tool, and Logic classes into a single instance
that can be managed by the EditBox
"""
def __init__(self):
# name is used to define the name of the icon image resource (e.g. EditorEffectTemplate.png)
self.name = "InteractiveConnectedComponentsUsingParzenPDFs"
self.title = "InteractiveConnectedComponentsUsingParzenPDFs"
# tool tip is displayed on mouse hover
self.toolTip = "Perform PDF Segmentation"
self.options = InteractiveConnectedComponentsUsingParzenPDFsOptions
self.tool = InteractiveConnectedComponentsUsingParzenPDFsTool
self.logic = InteractiveConnectedComponentsUsingParzenPDFsLogic
#
# EditorEffectTemplate
#
class InteractiveConnectedComponentsUsingParzenPDFs:
"""
This class is the 'hook' for slicer to detect and recognize the extension
as a loadable scripted module
"""
def __init__(self, parent):
parent.title = "Editor InteractiveConnectedComponentsUsingParzenPDFs Effect"
parent.categories = ["Developer Tools.Editor Extensions"]
parent.contributors = ["Danielle Pace (Kitware)",
"Christopher Mullins (Kitware)",
"Stephen Aylward (Kitware)",
"Johan Andruejol (Kitware)",]
parent.helpText = """
The PDF Segmenter is a framework for using connected components in
conjunction with intensity histograms for classifying images in pixel space.
This module is available as an editor tool via the editor module in Slicer.
This module cannot be run as a standard module in Slicer.
"""
parent.acknowledgementText = """
This work is part of the TubeTK project at Kitware.
Module implemented by Danielle Pace. PDF Segmenter implemented by
Stephen Aylward.
"""
# TODO:
# don't show this module - it only appears in the Editor module
#parent.hidden = True
# Add this extension to the editor's list for discovery when the module
# is created. Since this module may be discovered before the Editor itself,
# create the list if it doesn't already exist.
try:
slicer.modules.editorExtensions
except AttributeError:
slicer.modules.editorExtensions = {}
slicer.modules.editorExtensions['InteractiveConnectedComponentsUsingParzenPDFs'] = InteractiveConnectedComponentsUsingParzenPDFsExtension
#
# EditorEffectTemplateWidget
#
class InteractiveConnectedComponentsUsingParzenPDFsWidget:
def __init__(self, parent = None):
self.parent = parent
def setup(self):
# don't display anything for this widget - it will be hidden anyway
pass
def enter(self):
pass
def exit(self):
pass
| 44.138169 | 292 | 0.755204 | 25,106 | 0.982392 | 0 | 0 | 0 | 0 | 0 | 0 | 7,704 | 0.301456 |
e45d9ac1d7f7347063075b259a658688aa945eb7 | 415 | py | Python | category/urls.py | amin-bahiraei-75/shop_back | afcc5907fe33de2db1615f14df71443d1a35bbd0 | [
"MIT"
] | 1 | 2021-12-24T15:20:37.000Z | 2021-12-24T15:20:37.000Z | category/urls.py | amin-bahiraei-75/shop_back | afcc5907fe33de2db1615f14df71443d1a35bbd0 | [
"MIT"
] | null | null | null | category/urls.py | amin-bahiraei-75/shop_back | afcc5907fe33de2db1615f14df71443d1a35bbd0 | [
"MIT"
] | null | null | null | from django.urls import path
from category.views import List,Detail,Create,Delete,Update,Search,All
urlpatterns = [
path('all',All.as_view()),
path('list/<int:pk>',List.as_view()),
path('search/<str:pk>',Search.as_view()),
path('detail/<int:pk>',Detail.as_view()),
path('create', Create.as_view()),
path('delete/<int:pk>', Delete.as_view()),
path('update/<int:pk>', Update.as_view()),
] | 34.583333 | 70 | 0.653012 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.231325 |
e460d64b915b9a1607000858e70b226926b3124a | 3,488 | py | Python | led_motor_switch.py | scarmel/iot-demo | 02c6d810098720803196bf32ee1780925011f57c | [
"Apache-2.0"
] | null | null | null | led_motor_switch.py | scarmel/iot-demo | 02c6d810098720803196bf32ee1780925011f57c | [
"Apache-2.0"
] | null | null | null | led_motor_switch.py | scarmel/iot-demo | 02c6d810098720803196bf32ee1780925011f57c | [
"Apache-2.0"
] | null | null | null | # ------------------------------------------
# Description: This python script will update AWS Thing Shadow for a Device/Thing
# ------------------------------------------
# Import package
import paho.mqtt.client as mqtt
import ssl, time, sys
# =======================================================
# Set Following Variables
# AWS IoT Endpoint
MQTT_HOST = "your aws iot endpoint"
# CA Root Certificate File Path
CA_ROOT_CERT_FILE = "path for the aws root certificate file"
# AWS IoT Thing Name
THING_NAME = "your thing name"
# AWS IoT Thing Certificate File Path
THING_CERT_FILE = "path for your device certificate file"
# AWS IoT Thing Private Key File Path
THING_PRIVATE_KEY_FILE = "path for your device private key"
# =======================================================
# =======================================================
# No need to change following variables
MQTT_PORT = 8883
MQTT_KEEPALIVE_INTERVAL = 45
SHADOW_UPDATE_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update"
SHADOW_UPDATE_ACCEPTED_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update/accepted"
SHADOW_UPDATE_REJECTED_TOPIC = "$aws/things/" + THING_NAME + "/shadow/update/rejected"
SHADOW_STATE_DOC_LED_ON = """{"state" : {"desired" : {"LED" : "ON"}}}"""
SHADOW_STATE_DOC_LED_OFF = """{"state" : {"desired" : {"LED" : "OFF"}}}"""
RESPONSE_RECEIVED = False
# =======================================================
# Initiate MQTT Client
mqttc = mqtt.Client("led_switch_client")
def on_connect(mosq, obj, rc, another):
mqttc.subscribe(SHADOW_UPDATE_ACCEPTED_TOPIC, 1)
mqttc.subscribe(SHADOW_UPDATE_REJECTED_TOPIC, 1)
# Define on_message event function.
# This function will be invoked every time,
# a new message arrives for the subscribed topic
def on_message(mosq, obj, msg):
if str(msg.topic) == SHADOW_UPDATE_ACCEPTED_TOPIC:
print "\n---SUCCESS---\nShadow State Doc Accepted by AWS IoT."
print "Response JSON:\n" + str(msg.payload)
elif str(msg.topic) == SHADOW_UPDATE_REJECTED_TOPIC:
print "\n---FAILED---\nShadow State Doc Rejected by AWS IoT."
print "Error Response JSON:\n" + str(msg.payload)
else:
print "AWS Response Topic: " + str(msg.topic)
print "QoS: " + str(msg.qos)
print "Payload: " + str(msg.payload)
# Disconnect from MQTT_Broker
mqttc.disconnect()
global RESPONSE_RECEIVED
RESPONSE_RECEIVED = True
# Register callback functions
mqttc.on_message = on_message
mqttc.on_connect = on_connect
# Configure TLS Set
mqttc.tls_set(CA_ROOT_CERT_FILE, certfile=THING_CERT_FILE, keyfile=THING_PRIVATE_KEY_FILE, cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
# Connect with MQTT Broker
mqttc.connect(MQTT_HOST, MQTT_PORT, MQTT_KEEPALIVE_INTERVAL)
mqttc.loop_start()
print "Enter 1 to Turn On the LED"
print "Enter 2 to Turn OFF the LED"
print "Enter 3 to exit"
data = raw_input("Select an option:")
if data == "1":
mqttc.publish(SHADOW_UPDATE_TOPIC, SHADOW_STATE_DOC_LED_ON, qos=1)
elif data == "2":
mqttc.publish(SHADOW_UPDATE_TOPIC, SHADOW_STATE_DOC_LED_OFF, qos=1)
elif data == "3":
sys.exit()
else:
print("Invalid input try again...")
sys.exit()
# Wait for Response
Counter = 1
while True:
if RESPONSE_RECEIVED == True:
break
print "I have finished my work!!!"
# time.sleep(1)
# if Counter == 10:
# print "No response from AWS IoT. Check your Settings."
# break
# elif RESPONSE_RECEIVED == True:
# break
| 32.90566 | 119 | 0.65539 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,755 | 0.503154 |
e462bb80e8e5cfe48f10d58ffcdefb6c7a4fc2ec | 680 | py | Python | test.py | jsayles/LPD8806 | 6f13b65ae92f3bd903df684459964b8f5f621942 | [
"MIT"
] | null | null | null | test.py | jsayles/LPD8806 | 6f13b65ae92f3bd903df684459964b8f5f621942 | [
"MIT"
] | null | null | null | test.py | jsayles/LPD8806 | 6f13b65ae92f3bd903df684459964b8f5f621942 | [
"MIT"
] | null | null | null | import time
from lightpi.hardware import strip, string1, string2
DELAY_SEC = 0.3
# Test the RGB Strip
strip.red()
time.sleep(DELAY_SEC)
strip.green()
time.sleep(DELAY_SEC)
strip.blue()
time.sleep(DELAY_SEC)
strip.off()
# Test the LED Strings
string1.on()
time.sleep(DELAY_SEC)
string1.off()
time.sleep(DELAY_SEC)
string2.on()
time.sleep(DELAY_SEC)
string2.off()
################################################################################
# Helper Methods
################################################################################
def allOn():
strip.white()
string1.on()
string2.on()
def allOff():
strip.off()
string1.off()
string2.off()
| 17.435897 | 80 | 0.535294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 218 | 0.320588 |
e4634c0a0adb3cc0d16bbbb61f40f718de94ef2b | 3,141 | py | Python | wind_direction.py | simseve/weatherstation | 68196a032a2cd39062f3924ce6d386f5f54af393 | [
"MIT"
] | null | null | null | wind_direction.py | simseve/weatherstation | 68196a032a2cd39062f3924ce6d386f5f54af393 | [
"MIT"
] | null | null | null | wind_direction.py | simseve/weatherstation | 68196a032a2cd39062f3924ce6d386f5f54af393 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# wind_direction.py
#
# Copyright 2020 <Simone Severini>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import time
import board
import busio
import adafruit_ads1x15.ads1115 as ADS
from adafruit_ads1x15.analog_in import AnalogIn
class wind_direction():
def __init__(self):
# create i2c bus
self.i2c = busio.I2C(board.SCL, board.SDA)
# Create the ADC object using the I2C bus
self.ads = ADS.ADS1115(self.i2c)
self.ads.gain = 1
def get_wind_dir(self):
# Calculate wind direction based on ADC reading
self.chan = AnalogIn(self.ads, ADS.P0)
self.val = self.chan.value
self.windDir = "Not Connected"
self.windDeg = 999
if 20000 <= self.val <= 20500:
self.windDir = "N"
self.windDeg = 0
if 10000 <= self.val <= 10500:
self.windDir = "NNE"
self.windDeg = 22.5
if 11500 <= self.val <= 12000:
self.windDir = "NE"
self.windDeg = 45
if 2000 <= self.val <= 2250:
self.windDir = "ENE"
self.windDeg = 67.5
if 2300 <= self.val <= 2500:
self.windDir = "E"
self.windDeg = 90
if 1500 <= self.val <= 1950:
self.windDir = "ESE"
self.windDeg = 112.5
if 4500 <= self.val <= 4900:
self.windDir = "SE"
self.windDeg = 135
if 3000 <= self.val <= 3500:
self.windDir = "SSE"
self.windDeg = 157.5
if 7000 <= self.val <= 7500:
self.windDir = "S"
self.windDeg = 180
if 6000 <= self.val <= 6500:
self.windDir = "SSW"
self.windDeg = 202.5
if 16000 <= self.val <= 16500:
self.windDir = "SW"
self.windDeg = 225
if 15000 <= self.val <= 15500:
self.windDir = "WSW"
self.windDeg = 247.5
if 24000 <= self.val <= 24500:
self.windDir = "W"
self.windDeg = 270
if 21000 <= self.val <= 21500:
self.windDir = "WNW"
self.windDeg = 292.5
if 22500 <= self.val <= 23000:
self.windDir = "NW"
self.windDeg = 315
if 17500 <= self.val <= 18500:
self.windDir = "NNW"
self.windDeg = 337.5
return self.windDir, self.windDeg
| 27.313043 | 71 | 0.561605 | 2,166 | 0.689589 | 0 | 0 | 0 | 0 | 0 | 0 | 1,009 | 0.321235 |
e4639c8948f8a93b0256a4c34b5d407b8adc42bc | 3,875 | py | Python | oswin_tempest_plugin/tests/_mixins/migrate.py | openstack/oswin-tempest-plugin | 59e6a14d01dda304c7d11fda1d35198f25799d6c | [
"Apache-2.0"
] | 6 | 2017-10-31T10:40:24.000Z | 2019-01-28T22:08:15.000Z | oswin_tempest_plugin/tests/_mixins/migrate.py | openstack/oswin-tempest-plugin | 59e6a14d01dda304c7d11fda1d35198f25799d6c | [
"Apache-2.0"
] | null | null | null | oswin_tempest_plugin/tests/_mixins/migrate.py | openstack/oswin-tempest-plugin | 59e6a14d01dda304c7d11fda1d35198f25799d6c | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.common import waiters
import testtools
from oswin_tempest_plugin import config
CONF = config.CONF
class _MigrateMixin(object):
"""Cold migration mixin.
This mixin will add a cold migration test. It will perform the
following operations:
* Spawn instance.
* Cold migrate the instance.
* Check the server connectivity.
"""
def _migrate_server(self, server_tuple):
server = server_tuple.server
self.admin_servers_client.migrate_server(server['id'])
self._wait_for_server_status(server, 'VERIFY_RESIZE')
self.servers_client.confirm_resize_server(server['id'])
@testtools.skipUnless(CONF.compute.min_compute_nodes >= 2,
'Expected at least 2 compute nodes.')
def test_migration(self):
server_tuple = self._create_server()
self._migrate_server(server_tuple)
self._check_scenario(server_tuple)
class _LiveMigrateMixin(object):
"""Live migration mixin.
This mixin will add a live migration test. It will perform the
following operations:
* Spawn instance.
* Live migrate the instance.
* Check the server connectivity.
"""
# TODO(amuresan): Different mixins may be used at the same time.
# Each of them may override some fields such as
# 'max_microversion'. This has to be sorted out.
max_microversion = '2.24'
def _live_migrate_server(self, server_tuple, destination_host=None,
state='ACTIVE', volume_backed=False):
server = server_tuple.server
admin_server = self._get_server_as_admin(server)
current_host = admin_server['OS-EXT-SRV-ATTR:host']
block_migration = (CONF.compute_feature_enabled.
block_migration_for_live_migration
and not volume_backed)
self.admin_servers_client.live_migrate_server(
server['id'],
host=destination_host,
block_migration=block_migration,
disk_over_commit=False)
waiters.wait_for_server_status(self.admin_servers_client, server['id'],
state)
admin_server = self._get_server_as_admin(server)
after_migration_host = admin_server['OS-EXT-SRV-ATTR:host']
migration_list = (self.admin_migrations_client.list_migrations()
['migrations'])
msg = ("Live Migration failed. Migrations list for Instance "
"%s: [" % server['id'])
for live_migration in migration_list:
if live_migration['instance_uuid'] == server['id']:
msg += "\n%s" % live_migration
msg += "]"
if destination_host:
self.assertEqual(destination_host, after_migration_host, msg)
else:
self.assertNotEqual(current_host, after_migration_host, msg)
@testtools.skipUnless(CONF.compute_feature_enabled.live_migration,
'Live migration option enabled.')
def test_live_migration(self):
server_tuple = self._create_server()
self._live_migrate_server(server_tuple)
self._check_scenario(server_tuple)
| 35.87963 | 79 | 0.659613 | 3,117 | 0.804387 | 0 | 0 | 580 | 0.149677 | 0 | 0 | 1,515 | 0.390968 |
e466a39aa7123e6924bb036424ddce439a785489 | 2,572 | py | Python | articulation_structure/nodes/process_bags.py | tum-vision/articulation | 3bb714fcde14b8d47977bd3b3da2c2cd13ebe685 | [
"BSD-2-Clause"
] | 3 | 2017-03-15T16:50:05.000Z | 2021-02-28T05:27:24.000Z | articulation_structure/nodes/process_bags.py | AbdelrahmanElsaid/articulation | 3bb714fcde14b8d47977bd3b3da2c2cd13ebe685 | [
"BSD-2-Clause"
] | null | null | null | articulation_structure/nodes/process_bags.py | AbdelrahmanElsaid/articulation | 3bb714fcde14b8d47977bd3b3da2c2cd13ebe685 | [
"BSD-2-Clause"
] | 7 | 2015-07-14T14:47:51.000Z | 2018-04-02T16:22:23.000Z | #!/usr/bin/python
import rospy
import rosbag
import time
def callback(topic, msg):
global current_time
try:
header = getattr(msg, "header")
if header.stamp < current_time:
print "received old message, skipping '%s'" % topic
return
except AttributeError:
pass
global result
result[topic] = msg
def wait_for_result(timeout, out_topics):
#print " waiting for result"
t = 0
while len(result) != len(out_topics) and not rospy.is_shutdown() and timeout > t:
rospy.sleep(0.1)
t += 0.1
# print " timing: %f"%t
return len(result) == len(out_topics)
def process_bags(infile, in_topics, in_types,
outfile, out_topics, out_types,
timeout):
publishers = dict([ (topic, rospy.Publisher(topic, t))
for topic, t in zip(in_topics, in_types) ])
# subscribers=[
# rospy.Subscriber(out_topic,out_type,lambda x:callback(out_topic,x))
# for out_topic,out_type in zip(out_topics,out_types) ]
if(len(out_topics) > 0):
rospy.Subscriber(out_topics[0], out_types[0], lambda x:callback(out_topics[0], x))
if(len(out_topics) > 1):
rospy.Subscriber(out_topics[1], out_types[1], lambda x:callback(out_topics[1], x))
if(len(out_topics) > 2):
rospy.Subscriber(out_topics[2], out_types[2], lambda x:callback(out_topics[2], x))
if(len(out_topics) > 3):
rospy.Subscriber(out_topics[3], out_types[3], lambda x:callback(out_topics[3], x))
if(len(out_topics) > 4):
print "lambda trouble, add more lines.."
return
global result
result = {}
outbag = rosbag.Bag(outfile, "w")
global current_time
current_time = rospy.get_rostime()
for topic, msg, t in rosbag.Bag(infile).read_messages(topics=in_topics):
if(t != current_time):
print "frame: ", current_time
current_time = t
in_msgs = {}
in_msgs[topic] = msg
if len(in_msgs) == len(in_topics):
result = {}
print " in topics:"
for topic, msg in in_msgs.iteritems():
print " %s" % topic
publishers[topic].publish(msg)
t0 = time.time()
complete = wait_for_result(timeout, out_topics)
print " out topics (time=%f):" % (time.time() - t0)
for topic, msg in result.iteritems():
print " %s" % topic
if complete:
if(time.time() - t0 > timeout * 0.5):
timeout = (time.time() - t0) * 2
print " increasing timeout to %f" % timeout
for topic, msg in in_msgs.iteritems():
outbag.write(topic, msg, t)
for topic, msg in result.iteritems():
outbag.write(topic, msg, t)
print " saving frame"
else:
print " skipping frame"
if rospy.is_shutdown():
break;
outbag.close()
| 25.72 | 84 | 0.66563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 419 | 0.162908 |