hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e105caf515da97595cf131c9228511ab5a47c2b
| 313 |
py
|
Python
|
2-mouth02/socket/communnication.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | 4 |
2021-02-01T10:28:11.000Z
|
2021-02-01T10:34:40.000Z
|
2-mouth02/socket/communnication.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | null | null | null |
2-mouth02/socket/communnication.py
|
gary-gggggg/gary
|
d8ba30ea4bc2b662a2d6a87d247f813e5680d63e
|
[
"Apache-2.0"
] | null | null | null |
from socket import *
a=input("IP")
b=input("")
ADDR = ("176.17.12.178", 31414)
giao = socket(AF_INET, SOCK_DGRAM)
while 1:
m = input(":")
if not m:
break
else:
giao.sendto(m.encode(), ADDR)
d, a = giao.recvfrom(1024)
print("", d.decode())
giao.close()
| 18.411765 | 37 | 0.5623 |
3e11bd4f5fe50f533d78b84480d62520eb696807
| 151 |
py
|
Python
|
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | null | null | null |
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | null | null | null |
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | 1 |
2018-11-01T11:10:58.000Z
|
2018-11-01T11:10:58.000Z
|
from django.conf.urls import url
from django.conf import settings
from panel.views import *
urlpatterns = [
url(r'^$', index, name='index'),
]
| 13.727273 | 36 | 0.688742 |
3e11beb96e30d1e453934e9af1acf5d6478cd742
| 244 |
py
|
Python
|
nice_paintig.py
|
rushdi21-meet/meet2019y1lab6
|
e87c2f04593c8f7e3a5c1c66260c49a3690db90c
|
[
"MIT"
] | null | null | null |
nice_paintig.py
|
rushdi21-meet/meet2019y1lab6
|
e87c2f04593c8f7e3a5c1c66260c49a3690db90c
|
[
"MIT"
] | null | null | null |
nice_paintig.py
|
rushdi21-meet/meet2019y1lab6
|
e87c2f04593c8f7e3a5c1c66260c49a3690db90c
|
[
"MIT"
] | null | null | null |
import turtle
color=["green", "yellow",'orange','blue','pruple','red','pink']
x=10
y= 270
i=0
turtle.bgcolor("black")
while True:
turtle.color(color[0])
turtle.forward(x)
turtle.left(y)
x+=10
y-=1
i+=1
turtle.mainloop()
| 16.266667 | 63 | 0.614754 |
3e1247da76756de4876b84765ac8609022ba7513
| 2,446 |
py
|
Python
|
enzynet/models.py
|
gdarkwah/enzynet
|
7367635ae73595822133577054743a4c4c327cf3
|
[
"MIT"
] | 189 |
2017-07-20T22:16:22.000Z
|
2022-02-21T17:57:41.000Z
|
enzynet/models.py
|
gdarkwah/enzynet
|
7367635ae73595822133577054743a4c4c327cf3
|
[
"MIT"
] | 16 |
2019-05-09T14:47:44.000Z
|
2021-09-19T00:25:59.000Z
|
enzynet/models.py
|
gdarkwah/enzynet
|
7367635ae73595822133577054743a4c4c327cf3
|
[
"MIT"
] | 93 |
2017-07-20T22:55:41.000Z
|
2022-03-12T19:42:14.000Z
|
"""Model definitions."""
# Authors: Afshine Amidi <[email protected]>
# Shervine Amidi <[email protected]>
# MIT License
import numpy as np
from enzynet import constants
from keras import initializers
from keras import layers
from keras.layers import advanced_activations
from keras import models
from keras import regularizers
def enzynet(input_v_size: int, n_channels: int) -> models.Sequential:
"""Returns EnzyNet as a Keras model."""
# Parameters.
stddev_conv3d = np.sqrt(2.0/n_channels)
# Initialization.
model = models.Sequential()
# Add layers.
model.add(
layers.Conv3D(
filters=32,
kernel_size=9,
strides=2,
padding='valid',
kernel_initializer=initializers.RandomNormal(
mean=0.0,
stddev=stddev_conv3d * 9 ** (-3 / 2)),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None,
input_shape=(input_v_size,)*constants.N_DIMENSIONS + (n_channels,)))
model.add(advanced_activations.LeakyReLU(alpha=0.1))
model.add(layers.Dropout(rate=0.2))
model.add(
layers.Conv3D(
filters=64,
kernel_size=5,
strides=1,
padding='valid',
kernel_initializer=initializers.RandomNormal(
mean=0.0,
stddev=stddev_conv3d * 5 ** (-3 / 2)),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(advanced_activations.LeakyReLU(alpha=0.1))
model.add(layers.MaxPooling3D(pool_size=(2, 2, 2)))
model.add(layers.Dropout(rate=0.3))
model.add(layers.Flatten())
model.add(
layers.Dense(
units=128,
kernel_initializer=initializers.RandomNormal(mean=0.0, stddev=0.01),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(layers.Dropout(rate=0.4))
model.add(
layers.Dense(
units=constants.N_CLASSES,
kernel_initializer=initializers.RandomNormal(mean=0.0, stddev=0.01),
bias_initializer='zeros',
kernel_regularizer=regularizers.l2(0.001),
bias_regularizer=None))
model.add(layers.Activation('softmax'))
return model
| 28.114943 | 80 | 0.618561 |
3e13f1a614fdcd99556bcda63d31e15a470031fa
| 998 |
py
|
Python
|
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | 1 |
2019-09-23T20:00:52.000Z
|
2019-09-23T20:00:52.000Z
|
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | null | null | null |
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | null | null | null |
from db import db
| 39.92 | 99 | 0.700401 |
3e140c63bd33992dd5d90e07a79edb1db5f260ce
| 10,357 |
py
|
Python
|
FeatureCloud/api/cli/test/commands.py
|
FeatureCloud/FeatureCloud
|
3421bc9621201ae4a888192f09886122b0cb571a
|
[
"Apache-2.0"
] | null | null | null |
FeatureCloud/api/cli/test/commands.py
|
FeatureCloud/FeatureCloud
|
3421bc9621201ae4a888192f09886122b0cb571a
|
[
"Apache-2.0"
] | null | null | null |
FeatureCloud/api/cli/test/commands.py
|
FeatureCloud/FeatureCloud
|
3421bc9621201ae4a888192f09886122b0cb571a
|
[
"Apache-2.0"
] | null | null | null |
import os
import click
import requests
from FeatureCloud.api.imp.exceptions import FCException
from FeatureCloud.api.imp.test import commands
from FeatureCloud.api.cli.test.workflow.commands import workflow
test.add_command(workflow)
if __name__ == "__main__":
test()
| 51.527363 | 220 | 0.663609 |
3e14c4fe464f76c3e655c88c87bd66bc84933f25
| 4,188 |
py
|
Python
|
axi_plot/utils.py
|
zoso95/axi_plot
|
1a8c1f601c75e149d60377ccc4a437c33b3620bb
|
[
"MIT"
] | null | null | null |
axi_plot/utils.py
|
zoso95/axi_plot
|
1a8c1f601c75e149d60377ccc4a437c33b3620bb
|
[
"MIT"
] | null | null | null |
axi_plot/utils.py
|
zoso95/axi_plot
|
1a8c1f601c75e149d60377ccc4a437c33b3620bb
|
[
"MIT"
] | null | null | null |
import subprocess
import logging
import os, time
from pathlib import Path
from shutil import copyfile
import pandas as pd
from datetime import datetime
def res_plot(filename, config, checkpoint_file):
"""
base_commands = ['axicli', filename, '--config', config, '--mode', 'res_plot']
end_commands = ['-o', checkpoint_file]
commands = base_commands + end_commands
process = subprocess.run(commands, stdout=subprocess.PIPE, universal_newlines=True)
return process.stdout
"""
raise Exception()
def backup_drawing(file):
"""
Check to see if $PLOTTER_BACKUP exists. If it does, then copy over the file
if it doesnt exist, and add to the print logs that we are printing it.
"""
if 'PLOTTER_BACKUP' in os.environ:
logging.info("backing up {}".format(file))
filename = os.path.basename(file)
backup_dir = os.path.join(os.environ.get('PLOTTER_BACKUP'))
backup_path = os.path.join(backup_dir, filename)
if not os.path.exists(backup_path):
copyfile(file, backup_path)
print_logs = os.path.join(backup_dir, "print_logs.csv")
if os.path.exists(print_logs):
logs = pd.read_csv(print_logs)
else:
logs = pd.DataFrame({})
df = pd.DataFrame([{'name':filename, 'time_printed':datetime.now().strftime('%Y-%m-%d %H:%M')}], columns=['name', 'time_printed'])
logs = logs.append(df, sort=False)
logs.to_csv(print_logs, index=False)
else:
logging.info("Skipping backup for {}, no $PLOTTER_BACKUP path given".format(file))
| 36.417391 | 138 | 0.680755 |
3e14f76f2adf0f315a94c191c5946f1de65d9fa9
| 5,258 |
py
|
Python
|
scripts/regions_optimize.py
|
jason-neal/Starfish
|
4ffa45e0190fb6f3262511d57d1a563e5ee711de
|
[
"BSD-3-Clause"
] | 1 |
2017-07-10T00:06:36.000Z
|
2017-07-10T00:06:36.000Z
|
scripts/regions_optimize.py
|
jason-neal/Starfish
|
4ffa45e0190fb6f3262511d57d1a563e5ee711de
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/regions_optimize.py
|
jason-neal/Starfish
|
4ffa45e0190fb6f3262511d57d1a563e5ee711de
|
[
"BSD-3-Clause"
] | 5 |
2016-06-11T09:48:16.000Z
|
2019-08-07T19:52:41.000Z
|
#!/usr/bin/env python
import argparse
parser = argparse.ArgumentParser(prog="region_optimize.py", description="Find the kernel parameters for Gaussian region zones.")
parser.add_argument("spectrum", help="JSON file containing the data, model, and residual.")
parser.add_argument("--sigma0", type=float, default=2, help="(AA) to use in fitting")
args = parser.parse_args()
import json
import numpy as np
from scipy.optimize import fmin
from scipy.linalg import cho_factor, cho_solve
from numpy.linalg import slogdet
import Starfish
from Starfish.model import PhiParam
from Starfish.covariance import get_dense_C, make_k_func
from Starfish import constants as C
# Load the spectrum and then take the data products.
f = open(args.spectrum, "r")
read = json.load(f) # read is a dictionary
f.close()
wl = np.array(read["wl"])
# data_full = np.array(read["data"])
# model = np.array(read["model"])
resid = np.array(read["resid"])
sigma = np.array(read["sigma"])
spectrum_id = read["spectrum_id"]
order = read["order"]
fname = Starfish.specfmt.format(spectrum_id, order) + "regions.json"
f = open(fname, "r")
read = json.load(f) # read is a dictionary
f.close()
mus = np.array(read["mus"])
assert spectrum_id == read["spectrum_id"], "Spectrum/Order mismatch"
assert order == read["order"], "Spectrum/Order mismatch"
# Load the guesses for the global parameters from the .json
# If the file exists, optionally initiliaze to the chebyshev values
fname = Starfish.specfmt.format(spectrum_id, order) + "phi.json"
try:
phi = PhiParam.load(fname)
except FileNotFoundError:
print("No order parameter file found (e.g. sX_oXXphi.json), please run `star.py --initPhi` first.")
raise
# Puposely set phi.regions to none for this exercise, since we don't care about existing regions, and likely we want to overwrite them.
phi.regions = None
def optimize_region_residual(wl, residuals, sigma, mu):
'''
Determine the optimal parameters for the line kernels by fitting a Gaussian directly to the residuals.
'''
# Using sigma0, truncate the wavelength vector and residulas to include
# only those portions that fall in the range [mu - sigma, mu + sigma]
ind = (wl > mu - args.sigma0) & (wl < mu + args.sigma0)
wl = wl[ind]
R = residuals[ind]
sigma = sigma[ind]
sigma_mat = phi.sigAmp * sigma**2 * np.eye(len(wl))
max_r = 6.0 * phi.l # [km/s]
k_func = make_k_func(phi)
# Use the full covariance matrix when doing the likelihood eval
CC = get_dense_C(wl, k_func=k_func, max_r=max_r) + sigma_mat
factor, flag = cho_factor(CC)
logdet = np.sum(2 * np.log((np.diag(factor))))
rr = C.c_kms/mu * np.abs(mu - wl) # Km/s
par = Starfish.config["region_params"]
p0 = np.array([10**par["logAmp"], par["sigma"]])
f = lambda x: -fprob(x)
try:
p = fmin(f, p0, maxiter=10000, maxfun=10000, disp=False)
# print(p)
return p
except np.linalg.linalg.LinAlgError:
return p0
def optimize_region_covariance(wl, residuals, sigma, mu):
'''
Determine the optimal parameters for the line kernels by actually using a chunk of the covariance matrix.
Note this actually uses the assumed global parameters.
'''
# Using sigma0, truncate the wavelength vector and residulas to include
# only those portions that fall in the range [mu - sigma, mu + sigma]
ind = (wl > mu - args.sigma0) & (wl < mu + args.sigma0)
wl = wl[ind]
R = residuals[ind]
sigma = sigma[ind]
sigma_mat = phi.sigAmp * sigma**2 * np.eye(len(wl))
max_rl = 6.0 * phi.l # [km/s]
# Define a probability function for the residuals
par = Starfish.config["region_params"]
p0 = np.array([par["logAmp"], par["sigma"]])
f = lambda x: -fprob(x)
try:
p = fmin(f, p0, maxiter=10000, maxfun=10000)
print(p)
return p
except np.linalg.linalg.LinAlgError:
return p0
# Regions will be a 2D array with shape (nregions, 3)
regions = []
for mu in mus:
# amp, sig = optimize_region_residual(wl, resid, sigma, mu)
# regions.append([np.log10(np.abs(amp)), mu, sig])
logAmp, sig = optimize_region_covariance(wl, resid, sigma, mu)
regions.append([logAmp, mu, sig])
# Add these values back to the phi parameter file and save
phi.regions = np.array(regions)
phi.save()
| 30.929412 | 135 | 0.659947 |
3e15b565f2c5c8e4188c7106981c4468935c3719
| 2,261 |
py
|
Python
|
Bases/download_bases.py
|
lucas26xd/Estudo-Dados-COVID19-BR
|
cba0278e1cbd2464b4b4c7faa866d05d9968247d
|
[
"MIT"
] | null | null | null |
Bases/download_bases.py
|
lucas26xd/Estudo-Dados-COVID19-BR
|
cba0278e1cbd2464b4b4c7faa866d05d9968247d
|
[
"MIT"
] | null | null | null |
Bases/download_bases.py
|
lucas26xd/Estudo-Dados-COVID19-BR
|
cba0278e1cbd2464b4b4c7faa866d05d9968247d
|
[
"MIT"
] | null | null | null |
import requests
from urllib.request import urlopen
from bs4 import BeautifulSoup
print('Pegando informaes para download das bases...')
urls_bases, last_updates = get_urls_and_last_updates()
if len(urls_bases) > 0:
print('Iniciando Downloads...')
progress = 0
for url in urls_bases:
print(f'Baixando {url[url.rfind("/") + 1:]} - {last_updates[progress]} - ({progress + 1:0>2}/{len(urls_bases)})')
download(url)
progress += 1
else:
print('Problema ao resgatar as URLs das bases!')
| 39.666667 | 132 | 0.587793 |
3e16ddbf593ddf87a424ef3546058ed337f938d3
| 10,699 |
py
|
Python
|
rax/_src/utils_test.py
|
google/rax
|
d6370d574246db9fb0566317f7cac8cd331526d7
|
[
"Apache-2.0"
] | 19 |
2022-01-25T12:37:51.000Z
|
2022-03-30T17:12:45.000Z
|
rax/_src/utils_test.py
|
google/rax
|
d6370d574246db9fb0566317f7cac8cd331526d7
|
[
"Apache-2.0"
] | 1 |
2022-02-08T23:02:42.000Z
|
2022-02-08T23:02:42.000Z
|
rax/_src/utils_test.py
|
google/rax
|
d6370d574246db9fb0566317f7cac8cd331526d7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pytype: skip-file
"""Tests for rax._src.utils."""
import doctest
from absl.testing import absltest
import jax
import jax.numpy as jnp
import numpy as np
import rax
from rax._src import utils
def load_tests(loader, tests, ignore):
del loader, ignore # Unused.
tests.addTests(
doctest.DocTestSuite(
utils, extraglobs={
"jax": jax,
"jnp": jnp,
"rax": rax
}))
return tests
if __name__ == "__main__":
absltest.main()
| 34.291667 | 79 | 0.639873 |
3e182689577a11bad1e8f7437a3d622ced715f94
| 427 |
py
|
Python
|
examples/decorators.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | 5 |
2022-03-05T23:29:33.000Z
|
2022-03-20T07:44:20.000Z
|
examples/decorators.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | null | null | null |
examples/decorators.py
|
FusionSid/FusionSidAPI.py
|
e1b50622bf4fcec8265f8fd4e9b3ac79b580d286
|
[
"MIT"
] | null | null | null |
import asyncio
from fusionsid import Decorators
deco = Decorators
do_roast = deco.roast
loop = asyncio.new_event_loop()
loop.run_until_complete(main())
| 23.722222 | 97 | 0.744731 |
3e188c93ed7a3552c4548ac6fc5970107dcdbcdb
| 2,303 |
py
|
Python
|
configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
configs/raubtierv2b/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu.py
|
esf-bt2020/mmdetection
|
abc5fe060e0fcb716f845c85441be3741b22d3cf
|
[
"Apache-2.0"
] | null | null | null |
# Basiskonfigurationsfile
_base_ = '../centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py'
model = dict(
type='CornerNet',
backbone=dict(
type='HourglassNet',
downsample_times=5,
num_stacks=2,
stage_channels=[256, 256, 384, 384, 384, 512],
stage_blocks=[2, 2, 2, 2, 2, 4],
norm_cfg=dict(type='BN', requires_grad=True)),
neck=None,
bbox_head=dict(
type='CentripetalHead',
num_classes=3,
in_channels=256,
num_feat_levels=2,
corner_emb_channels=0,
loss_heatmap=dict(
type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1),
loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1),
loss_guiding_shift=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=0.05),
loss_centripetal_shift=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=1))
)
dataset_type = 'COCODataset'
classes = ('luchs', 'rotfuchs', 'wolf')
data = dict(
samples_per_gpu=3, #default 6
workers_per_gpu=1, #default 3
train=dict(
img_prefix='customData/train/',
classes=classes,
ann_file='customData/train/_annotations.coco.json'),
val=dict(
img_prefix='customData/valid/',
classes=classes,
ann_file='customData/valid/_annotations.coco.json'),
test=dict(
img_prefix='customData/test/',
classes=classes,
ann_file='customData/test/_annotations.coco.json'))
#optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) #8 GPUs => 8*6=48
optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001) #2 GPUs => 2*3=6 => 6/48= 1/8 cheetah
#optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001) #(1x6=6)
evaluation = dict(classwise=True, interval=4, metric='bbox')
load_from = 'checkpoints/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth'
work_dir = '/media/storage1/projects/WilLiCam/checkpoint_workdir/centripetalnet_hourglass104_mstest_16x6_210e_coco_raubtierv2b_2gpu'
#http://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth
| 37.145161 | 191 | 0.691706 |
3e1c92be5d3fa432577c6a625de6487e656413d6
| 3,175 |
py
|
Python
|
firecares/firestation/tests/test_feedback.py
|
FireCARES/firecares
|
aa708d441790263206dd3a0a480eb6ca9031439d
|
[
"MIT"
] | 12 |
2016-01-30T02:28:35.000Z
|
2019-05-29T15:49:56.000Z
|
firecares/firestation/tests/test_feedback.py
|
FireCARES/firecares
|
aa708d441790263206dd3a0a480eb6ca9031439d
|
[
"MIT"
] | 455 |
2015-07-27T20:21:56.000Z
|
2022-03-11T23:26:20.000Z
|
firecares/firestation/tests/test_feedback.py
|
FireCARES/firecares
|
aa708d441790263206dd3a0a480eb6ca9031439d
|
[
"MIT"
] | 14 |
2015-07-29T09:45:53.000Z
|
2020-10-21T20:03:17.000Z
|
import json
import mock
import os
from django.contrib.auth import get_user_model
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import Client
from firecares.firestation.models import FireDepartment, FireStation, DataFeedback
from firecares.firecares_core.tests.base import BaseFirecaresTestcase
User = get_user_model()
| 42.333333 | 110 | 0.627402 |
3e1d0a3af98bd0674017ebccc343c54e9d715f7e
| 325 |
py
|
Python
|
reactivex/scheduler/mainloop/__init__.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
reactivex/scheduler/mainloop/__init__.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
reactivex/scheduler/mainloop/__init__.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
from .gtkscheduler import GtkScheduler
from .pygamescheduler import PyGameScheduler
from .qtscheduler import QtScheduler
from .tkinterscheduler import TkinterScheduler
from .wxscheduler import WxScheduler
__all__ = [
"GtkScheduler",
"PyGameScheduler",
"QtScheduler",
"TkinterScheduler",
"WxScheduler",
]
| 23.214286 | 46 | 0.775385 |
3e1d352c7922c834041d16e3fdaa531ed2d9b63c
| 75 |
py
|
Python
|
map2annotation/__main__.py
|
aas-integration/integration-test2
|
dc9a9b4593cd59841f0d8348056cbff80a9c2a21
|
[
"MIT"
] | 3 |
2016-10-10T20:18:51.000Z
|
2018-05-01T19:42:10.000Z
|
map2annotation/__main__.py
|
aas-integration/integration-test2
|
dc9a9b4593cd59841f0d8348056cbff80a9c2a21
|
[
"MIT"
] | 38 |
2016-08-22T03:20:25.000Z
|
2018-06-11T19:13:05.000Z
|
map2annotation/__main__.py
|
aas-integration/integration-test2
|
dc9a9b4593cd59841f0d8348056cbff80a9c2a21
|
[
"MIT"
] | 7 |
2016-08-29T17:37:42.000Z
|
2022-01-28T00:30:10.000Z
|
import map2annotation
if __name__ == '__main__':
map2annotation.main()
| 18.75 | 26 | 0.746667 |
3e1f67673a3fc56ac1f18b117525630eebc9c6b7
| 207 |
py
|
Python
|
convenient_ai/nlp/spacy/types/RulePattern.py
|
leftshiftone/convenient-nlp
|
8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f
|
[
"MIT"
] | null | null | null |
convenient_ai/nlp/spacy/types/RulePattern.py
|
leftshiftone/convenient-nlp
|
8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f
|
[
"MIT"
] | null | null | null |
convenient_ai/nlp/spacy/types/RulePattern.py
|
leftshiftone/convenient-nlp
|
8f24ff070ffaa2117af02ae16aaf00bc4d7cf32f
|
[
"MIT"
] | null | null | null |
import dataclasses
from dataclasses import dataclass
| 14.785714 | 39 | 0.695652 |
3e1ff4f3721e11c343d5fecb39e8c79bd6aafa15
| 1,014 |
py
|
Python
|
cocos2d/tools/cocos2d-console/plugins/plugin_run.py
|
meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version
|
6611cf5bfac64e22f0b053681ef9bf563f93f06c
|
[
"MIT"
] | 7 |
2015-05-19T07:54:48.000Z
|
2021-12-17T06:01:24.000Z
|
cocos2d/tools/cocos2d-console/plugins/plugin_run.py
|
meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version
|
6611cf5bfac64e22f0b053681ef9bf563f93f06c
|
[
"MIT"
] | null | null | null |
cocos2d/tools/cocos2d-console/plugins/plugin_run.py
|
meiry/Cocos2d-x-EarthWarrior3D-win-desktop-version
|
6611cf5bfac64e22f0b053681ef9bf563f93f06c
|
[
"MIT"
] | 4 |
2015-11-24T06:27:16.000Z
|
2021-05-29T10:50:44.000Z
|
#!/usr/bin/python
# ----------------------------------------------------------------------------
# cocos2d "install" plugin
#
# Authr: Luis Parravicini
#
# License: MIT
# ----------------------------------------------------------------------------
'''
"run" plugin for cocos2d command line tool
'''
__docformat__ = 'restructuredtext'
import sys
import os
import cocos2d
| 24.142857 | 78 | 0.554241 |
3e22427e89b56fa4293c96f943f7ce0b77c3a1a7
| 2,759 |
py
|
Python
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 14 |
2019-01-08T14:32:31.000Z
|
2021-11-17T21:07:10.000Z
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 2 |
2019-05-14T08:56:36.000Z
|
2020-12-22T16:44:30.000Z
|
source/configuration.py
|
yux1991/PyRHEED
|
b39ad03651c92e3649069919ae48b1e5158cd3dd
|
[
"MIT"
] | 4 |
2019-03-12T20:03:54.000Z
|
2022-03-08T14:24:46.000Z
|
import configparser
| 51.092593 | 66 | 0.303733 |
3e24e04ad5a6a1e6faafb25c71a578a2c2c42a6c
| 4,772 |
py
|
Python
|
api/api/endpoints/sensor_info.py
|
andschneider/ss_api
|
4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522
|
[
"MIT"
] | null | null | null |
api/api/endpoints/sensor_info.py
|
andschneider/ss_api
|
4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522
|
[
"MIT"
] | 2 |
2019-12-26T17:31:56.000Z
|
2020-01-06T19:45:05.000Z
|
api/api/endpoints/sensor_info.py
|
andschneider/soil_sense
|
4ddf5cd60d5e0e87e7641e97c9fbe78965c4b522
|
[
"MIT"
] | null | null | null |
import datetime
import json
from flask import Response, request, Blueprint
from flask_jwt_extended import jwt_required
from flask_restplus import Api, Namespace, Resource, reqparse
from sqlalchemy.exc import IntegrityError
from api.core.db_execptions import bad_db_response
from api.core.models import SensorInfoModel, SensorDataModel
from api import db
api = Namespace(
"sensor_info",
description="Sensor information: sensor id, plant name, and moisture alert level.",
)
post_args = reqparse.RequestParser()
post_args.add_argument("plant", type=str, required=True, help="Plant name.")
post_args.add_argument(
"alert_level", type=int, required=True, help="Alert level for moisture."
)
| 33.843972 | 111 | 0.573135 |
3e24efcd76f37df58a59baf6375172236bdbc8f9
| 116 |
py
|
Python
|
notifications/utils.py
|
Natgeoed/django-notifications
|
a73c01f167bdf796b609d44a2be5323d08180b7f
|
[
"BSD-3-Clause"
] | 1 |
2019-05-28T14:55:58.000Z
|
2019-05-28T14:55:58.000Z
|
notifications/utils.py
|
Natgeoed/django-notifications
|
a73c01f167bdf796b609d44a2be5323d08180b7f
|
[
"BSD-3-Clause"
] | null | null | null |
notifications/utils.py
|
Natgeoed/django-notifications
|
a73c01f167bdf796b609d44a2be5323d08180b7f
|
[
"BSD-3-Clause"
] | 1 |
2020-03-29T10:13:28.000Z
|
2020-03-29T10:13:28.000Z
|
# -*- coding: utf-8 -*-
| 14.5 | 30 | 0.603448 |
3e263e2d36efcfc4b3135f0a65636317114a2c8d
| 995 |
py
|
Python
|
hash calculator.py
|
Andrea1141/hash-calculator
|
182d2f9bcfa0227ad70f7fdb03dde4599717cafa
|
[
"MIT"
] | 1 |
2021-10-02T12:48:25.000Z
|
2021-10-02T12:48:25.000Z
|
hash calculator.py
|
Andrea1141/hash-calculator
|
182d2f9bcfa0227ad70f7fdb03dde4599717cafa
|
[
"MIT"
] | null | null | null |
hash calculator.py
|
Andrea1141/hash-calculator
|
182d2f9bcfa0227ad70f7fdb03dde4599717cafa
|
[
"MIT"
] | 1 |
2021-10-18T12:34:26.000Z
|
2021-10-18T12:34:26.000Z
|
import tkinter, hashlib
root = tkinter.Tk()
root.title("Hash Calculator")
label = tkinter.Label(text="Write the string to hash")
label.pack()
option = tkinter.StringVar()
option.set("sha224")
string = tkinter.StringVar()
entry = tkinter.Entry(root, textvariable=string, width=150, justify="center")
entry.pack()
hexdigest = tkinter.StringVar()
label = tkinter.Entry(text="", textvariable=hexdigest, width=150, justify="center", state="readonly")
label.pack()
string.trace_add("write", callback)
option.trace_add("write", callback)
algorithms = ["sha224", "sha1", "blake2s", "sha3_384", "sha256", "blake2b", "sha384", "sha3_256", "sha3_512", "md5", "sha512", "sha3_224"]
menu = tkinter.OptionMenu(root, option, *algorithms)
menu.pack()
callback()
root.mainloop()
| 28.428571 | 139 | 0.684422 |
3e28aa85ecfaa56c22716b2abd9f954c4b0ab246
| 136 |
py
|
Python
|
tensorflow/__init__.py
|
vsilyaev/tensorflow
|
f41959ccb2d9d4c722fe8fc3351401d53bcf4900
|
[
"Apache-2.0"
] | 4 |
2015-11-10T14:11:39.000Z
|
2021-11-17T11:11:25.000Z
|
tensorflow/__init__.py
|
TheRockStarDBA/tensorflow
|
db0b5da485e1d1f23003ee08ed2e191451ee0319
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/__init__.py
|
TheRockStarDBA/tensorflow
|
db0b5da485e1d1f23003ee08ed2e191451ee0319
|
[
"Apache-2.0"
] | 2 |
2015-11-13T21:11:49.000Z
|
2015-11-29T04:13:49.000Z
|
# Bring in all of the public TensorFlow interface into this
# module.
# pylint: disable=wildcard-import
from tensorflow.python import *
| 27.2 | 59 | 0.786765 |
3e28b00c8be476ae7052942943b00801e357b52e
| 4,337 |
py
|
Python
|
cpauto/objects/threat.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 16 |
2016-12-07T02:45:31.000Z
|
2022-01-20T11:46:24.000Z
|
cpauto/objects/threat.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 2 |
2017-07-20T21:12:27.000Z
|
2021-09-09T14:57:01.000Z
|
cpauto/objects/threat.py
|
krnnrt/cpauto
|
6076ee0e3f55769aac5b2480453d82f99371a31f
|
[
"Apache-2.0"
] | 5 |
2017-07-28T14:06:25.000Z
|
2021-09-06T12:01:18.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2016 Dana James Traversie and Check Point Software Technologies, Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# cpauto.objects.threat
# ~~~~~~~~~~~~~~~~~~~~~
"""This module contains the classes needed to manage threat prevention objects."""
from ._common import _CommonClient
| 44.255102 | 113 | 0.679733 |
3e28e0f9797870a68b28678349b8f468bf2771ae
| 387 |
py
|
Python
|
src/tandlr/notifications/routing.py
|
shrmoud/schoolapp
|
7349ce18f56658d67daedf5e1abb352b5c15a029
|
[
"Apache-2.0"
] | null | null | null |
src/tandlr/notifications/routing.py
|
shrmoud/schoolapp
|
7349ce18f56658d67daedf5e1abb352b5c15a029
|
[
"Apache-2.0"
] | null | null | null |
src/tandlr/notifications/routing.py
|
shrmoud/schoolapp
|
7349ce18f56658d67daedf5e1abb352b5c15a029
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from channels.staticfiles import StaticFilesConsumer
from tandlr.notifications import consumers
channel_routing = {
'http.request': StaticFilesConsumer(),
# Wire up websocket channels to our consumers:
'websocket.connect': consumers.ws_connect,
'websocket.receive': consumers.ws_receive,
'websocket.disconnect': consumers.ws_disconnect,
}
| 25.8 | 52 | 0.74677 |
3e297317547f88cd2d57145599c9dcd9b0299b5a
| 646 |
py
|
Python
|
2018/d03.py
|
m1el/advent-of-code
|
0944579fd58c586ce5a72b4152c5105ec07846a1
|
[
"MIT"
] | null | null | null |
2018/d03.py
|
m1el/advent-of-code
|
0944579fd58c586ce5a72b4152c5105ec07846a1
|
[
"MIT"
] | null | null | null |
2018/d03.py
|
m1el/advent-of-code
|
0944579fd58c586ce5a72b4152c5105ec07846a1
|
[
"MIT"
] | null | null | null |
from collections import defaultdict, Counter
from itertools import product
import re
with open('03.txt') as fd:
inp = []
for l in fd.readlines():
groups = re.findall(r'\d+', l)
inp.append(list(map(int, groups)))
claims = defaultdict(int)
for (id, l,t, w,h) in inp:
for y in range(t,t+h):
for x in range(l,l+w):
claims[(x,y)] += 1
c=0
for n in claims.values():
if n > 1: c+= 1
print(c)
for (id, l,t, w,h) in inp:
bad = False
for y in range(t,t+h):
for x in range(l,l+w):
if claims[(x,y)] > 1:
bad = True
break
if bad: break
if not bad:
print(id)
| 20.1875 | 45 | 0.547988 |
3e2a44b8d417cc833a2bb62cb532d7fa7ff0e6b8
| 2,591 |
py
|
Python
|
files/lambda/tagger.py
|
mbasri/generic-spot-cluster
|
cccfbee4660ae26742e1442f495dc9f523d0a2fd
|
[
"MIT"
] | 1 |
2019-12-24T18:53:34.000Z
|
2019-12-24T18:53:34.000Z
|
files/lambda/tagger.py
|
mbasri/generic-spot-cluster
|
cccfbee4660ae26742e1442f495dc9f523d0a2fd
|
[
"MIT"
] | null | null | null |
files/lambda/tagger.py
|
mbasri/generic-spot-cluster
|
cccfbee4660ae26742e1442f495dc9f523d0a2fd
|
[
"MIT"
] | null | null | null |
import os
import sys
import logging
import boto3
| 22.530435 | 81 | 0.582015 |
3e2c2255a47604390ebe0475399a05f36907ad33
| 4,478 |
py
|
Python
|
Python/biopsy/data/bergman_fly_motifs.py
|
JohnReid/biopsy
|
1eeb714ba5b53f2ecf776d865d32e2078cbc0338
|
[
"MIT"
] | null | null | null |
Python/biopsy/data/bergman_fly_motifs.py
|
JohnReid/biopsy
|
1eeb714ba5b53f2ecf776d865d32e2078cbc0338
|
[
"MIT"
] | null | null | null |
Python/biopsy/data/bergman_fly_motifs.py
|
JohnReid/biopsy
|
1eeb714ba5b53f2ecf776d865d32e2078cbc0338
|
[
"MIT"
] | null | null | null |
#
# Copyright John Reid 2010
#
"""
Code to deal with the Bergman curated set of fly motifs.
"""
import os, biopsy.data as D, numpy as N
import xml.etree.ElementTree as ET
def xms_filename():
"@return: The filename of the XMS file where the motifs are stored."
return os.path.join(D.data_dir(), "Bergman-Fly-Motifs", "SelexConsensus1.1.xms")
def write_as_custom_pssm(f, id_, name, matrix, comments=None, url=None, field_width=3, scale=1):
"""
Write the motif as a custom PSSM to the file, f.
#
# Drosophila Hunchback from JASPAR
#
ID DN-000001
NA D$Hunchback
WI 10
PO 01 02 03 04 05 06 07 08 09 10
CA 01 06 09 04 13 16 16 14 15 09
CC 05 08 03 03 01 00 00 00 01 02
CG 08 02 04 01 00 00 00 02 00 02
CT 02 00 00 08 02 00 00 00 00 03
IU G C A T A A A A A A
UR None
"""
if None != comments:
print >> f, '#'
for comment in comments:
print >> f, '# %s' % comment
print >> f, '#'
print >> f, 'ID %s' % id_
print >> f, 'NA %s' % name
print >> f, 'WI %s' % len(matrix)
print >> f, 'PO %s' % ' '.join('%*d' % (field_width, i+1) for i in xrange(len(matrix)))
for b, tag in enumerate(('CA', 'CC', 'CG', 'CT')):
print >> f, '%s %s' % (tag, ' '.join('%*d' % (field_width, int(v)) for v in matrix[:,b]*scale))
print >> f, 'UR %s' % (None != url and url or 'None')
def normalise_matrix(matrix):
"@return: A normalised version of the argument."
return (matrix.T / matrix.sum(axis=1)).T
def smooth_matrix_with_pseudo_count(matrix, pseudo_count):
"@return: A smoothed version the matrix using the given pseudo counts."
smoothed = matrix + pseudo_count
return normalise_matrix(smoothed)
def write_matrix_to_file(f, id_, name, alphabet, matrix, properties, threshold, scale=1):
"Write the matrix to the file in the custom PSSM format."
comments = [
'PSSM parsed from set of fly TFs curated by Bergman.'
]
comments.extend('%20s : %s' % (k, v) for k, v in properties.iteritems())
write_as_custom_pssm(f, id_, name, matrix, comments=comments, scale=scale)
if '__main__' == __name__:
import sys
output_dir = '/home/john/Data/custom-pssms'
pssm_set_tag = 'BG'
scale = 30
pssm_set_f = open(os.path.join(output_dir, 'bergman-fly.pssm_set'), 'w')
print >> pssm_set_f, '#'
print >> pssm_set_f, '# Set of fly TFs curated by Bergman.'
print >> pssm_set_f, '# PSSMs were scaled as if there were %d observations.' % scale
print >> pssm_set_f, '#'
for i, (name, alphabet, matrix, properties, threshold) in enumerate(parse_xms(open(xms_filename()))):
id_ = '%s-%06d' % (pssm_set_tag, i+1)
print id_, name
print >> pssm_set_f, id_
f = open(os.path.join(output_dir, '%s.pssm' % id_), 'w')
properties['Equivalent # observations'] = str(scale)
write_matrix_to_file(f, id_, name, alphabet, matrix, properties, threshold, scale=scale)
f.close()
pssm_set_f.close()
| 33.41791 | 105 | 0.585306 |
3e2c4ce8c6ded9f25bc03ff3e20ecd6211356ad1
| 7,950 |
py
|
Python
|
addressbook/views.py
|
webskate101/django-polymer-addressbook
|
bf41b6a83e7b9228b383129958488f1c8075c728
|
[
"Apache-2.0"
] | null | null | null |
addressbook/views.py
|
webskate101/django-polymer-addressbook
|
bf41b6a83e7b9228b383129958488f1c8075c728
|
[
"Apache-2.0"
] | null | null | null |
addressbook/views.py
|
webskate101/django-polymer-addressbook
|
bf41b6a83e7b9228b383129958488f1c8075c728
|
[
"Apache-2.0"
] | null | null | null |
"""Holds the HTTP handlers for the addressbook app."""
from django import db
from django import http
from django.views import generic
import json
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from addressbook import models
JSON_XSSI_PREFIX = ")]}'\n"
class PersonListRESTHandler(LoginRequiredRESTHandler):
"""REST handler for multiple person requests."""
class PersonRESTHandler(LoginRequiredRESTHandler):
"""REST handler for single person requests."""
def _update_person(self, person, update_dict):
person.first_name = update_dict['firstName']
person.last_name = update_dict['lastName']
_update_contact_details(person, update_dict)
person.save()
| 33.544304 | 80 | 0.684528 |
3e2dc345629e84a8ce9faa979c3f69774ad29ef7
| 132 |
py
|
Python
|
backend/actions.py
|
HiroshiFuu/django-rest-drf-yasg-boilerplate
|
93221b2dbca0635eb42a18096e805b00f36ff9c1
|
[
"Apache-2.0"
] | null | null | null |
backend/actions.py
|
HiroshiFuu/django-rest-drf-yasg-boilerplate
|
93221b2dbca0635eb42a18096e805b00f36ff9c1
|
[
"Apache-2.0"
] | null | null | null |
backend/actions.py
|
HiroshiFuu/django-rest-drf-yasg-boilerplate
|
93221b2dbca0635eb42a18096e805b00f36ff9c1
|
[
"Apache-2.0"
] | null | null | null |
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
| 33 | 54 | 0.863636 |
3e2de9f463b88672a9f0881711bb0f7f45018e12
| 1,124 |
py
|
Python
|
Housing Price/HouseRegression.py
|
anupriyamranjit/machinelearning
|
5e1deef38d356fddcedfe0a23094571500c1c82d
|
[
"MIT"
] | null | null | null |
Housing Price/HouseRegression.py
|
anupriyamranjit/machinelearning
|
5e1deef38d356fddcedfe0a23094571500c1c82d
|
[
"MIT"
] | null | null | null |
Housing Price/HouseRegression.py
|
anupriyamranjit/machinelearning
|
5e1deef38d356fddcedfe0a23094571500c1c82d
|
[
"MIT"
] | null | null | null |
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import tensorflow as tf
import keras
import os
print(os.listdir("../input"))
print("Success")
# Any results you write to the current directory are saved as output.
# importing models/layers
from keras.models import Sequential
from keras.layers import Dense
print("Success")
my_data = pd.read_csv('../input/kc_house_data.csv')
my_data.head()
#Splitting Data Up
predictors = my_data.drop(columns=["price","date"])
output = my_data['price']
print("Success")
model = Sequential()
n_cols = predictors.shape[1]
print("Success")
#Dense Layers
model.add(Dense(5,activation ="relu", input_shape=(n_cols,)))
model.add(Dense(5,activation ="relu"))
model.add(Dense(1))
print("Success")
#Optimizer
model.compile(optimizer="adam", loss ="mean_squared_error")
print("Success")
#fitting
from keras.callbacks import EarlyStopping
early_stopping_monitor = EarlyStopping(patience=3)
model.fit(predictors,output,validation_split=0.2, epochs=30, callbacks=[early_stopping_monitor])
#prediction
prediction = model.predict()
| 22.039216 | 96 | 0.758897 |
3e2e001920079b806a3731784374226e2f26379a
| 1,194 |
py
|
Python
|
migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py
|
GitauHarrison/somasoma_V1
|
2d74ad3b58f7e4ea5334e240d5bd30938f615e24
|
[
"MIT"
] | null | null | null |
migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py
|
GitauHarrison/somasoma_V1
|
2d74ad3b58f7e4ea5334e240d5bd30938f615e24
|
[
"MIT"
] | 2 |
2021-11-11T19:04:10.000Z
|
2021-11-11T19:08:42.000Z
|
migrations/versions/29e48091912e_remove_unique_constraint_from_user_table.py
|
GitauHarrison/somasoma_V1
|
2d74ad3b58f7e4ea5334e240d5bd30938f615e24
|
[
"MIT"
] | 1 |
2021-09-09T13:44:26.000Z
|
2021-09-09T13:44:26.000Z
|
"""remove unique constraint from user table
Revision ID: 29e48091912e
Revises: f73df8de1f1f
Create Date: 2021-12-22 22:26:20.918461
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '29e48091912e'
down_revision = 'f73df8de1f1f'
branch_labels = None
depends_on = None
| 30.615385 | 83 | 0.69598 |
3e2e6a8e43d315af581125fc3cb4dc17b915f7a7
| 6,065 |
py
|
Python
|
VBx/models/resnet.py
|
Jamiroquai88/VBx
|
35e7954ac0042ea445dcec657130e2c3c0b94ee0
|
[
"Apache-2.0"
] | 145 |
2020-02-13T09:08:59.000Z
|
2022-03-28T02:05:38.000Z
|
VBx/models/resnet.py
|
Jamiroquai88/VBx
|
35e7954ac0042ea445dcec657130e2c3c0b94ee0
|
[
"Apache-2.0"
] | 39 |
2021-01-12T02:49:37.000Z
|
2022-02-17T18:49:54.000Z
|
VBx/models/resnet.py
|
Jamiroquai88/VBx
|
35e7954ac0042ea445dcec657130e2c3c0b94ee0
|
[
"Apache-2.0"
] | 44 |
2020-02-13T03:57:35.000Z
|
2022-03-31T07:05:09.000Z
|
'''ResNet in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
def ResNet101(feat_dim, embed_dim, squeeze_excitation=False):
return ResNet(Bottleneck, [3, 4, 23, 3], feat_dim=feat_dim, embed_dim=embed_dim, squeeze_excitation=squeeze_excitation)
| 40.433333 | 123 | 0.622754 |
3e345a0575b803502ed9bfed61051d0d9fb3fa57
| 5,159 |
py
|
Python
|
bc/recruitment/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1 |
2021-02-27T07:27:17.000Z
|
2021-02-27T07:27:17.000Z
|
bc/recruitment/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | null | null | null |
bc/recruitment/utils.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1 |
2021-06-09T15:56:54.000Z
|
2021-06-09T15:56:54.000Z
|
import json
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchRank, SearchVector
from django.core.exceptions import ValidationError
from django.db.models import F
from django.db.models.functions import ACos, Cos, Radians, Sin
import requests
from bc.recruitment.constants import JOB_FILTERS
from bc.recruitment.models import JobCategory, RecruitmentHomePage, TalentLinkJob
def get_current_search(querydict):
"""
Returns search query and filters in request.GET as json string
"""
search = {}
if querydict.get("query", None):
search["query"] = querydict["query"]
if querydict.get("postcode", None):
search["postcode"] = querydict["postcode"]
# Loop through our filters so we don't just store any query params
for filter in JOB_FILTERS:
selected = querydict.getlist(filter["name"])
if selected:
selected = list(dict.fromkeys(selected)) # Remove duplicate options
search[filter["name"]] = sorted(selected) # Sort options alphabetically
return json.dumps(search)
| 34.393333 | 107 | 0.652064 |
3e37452fb8273aa4b7fb354676b63c94081558fd
| 7,264 |
py
|
Python
|
classification/ClassificationModelLargeViz.py
|
geigerf/STAG_slim
|
391e7a8031a7e128509f276113b19fd7f13897ec
|
[
"Apache-2.0"
] | null | null | null |
classification/ClassificationModelLargeViz.py
|
geigerf/STAG_slim
|
391e7a8031a7e128509f276113b19fd7f13897ec
|
[
"Apache-2.0"
] | null | null | null |
classification/ClassificationModelLargeViz.py
|
geigerf/STAG_slim
|
391e7a8031a7e128509f276113b19fd7f13897ec
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import os
import shutil
import time, math, datetime, re
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import numpy as np
from torch.autograd.variable import Variable
from shared.BaseModel import BaseModel
from shared.resnet_9x9 import resnet18
from shared import dataset_tools
'''
Vizualization version of the final model.
'''
| 31.859649 | 135 | 0.56663 |
3e38f387d0ad96aa627dd060a7aa1188e154c4a3
| 10,017 |
py
|
Python
|
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | 1 |
2022-02-10T07:07:35.000Z
|
2022-02-10T07:07:35.000Z
|
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | null | null | null |
graphingVisHullTwoD.py
|
cm-1/2D-External-Visual-Hulls
|
579e7d18d048d403b636d326840e5cb2a4e3a3e8
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
from visHullTwoD import Scene, SegmentType
#%%
#%%
world0 = Scene()
world1 = Scene()
world2 = Scene()
world3 = Scene()
world4 = Scene()
world5 = Scene()
world6 = Scene()
world7 = Scene()
world8 = Scene()
world9 = Scene()
world10 = Scene()
world11 = Scene()
world12 = Scene()
# These are the tris from Petitjean's diagram
polygon1 = [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world0.addPolygon(polygon1)
world0.addPolygon(polygon2)
world0.addPolygon(polygon3)
#world0.addPolygon(polygon4)
polygon1 = [(0, 0), (5, 0), (5, 5), (4, 5), (4, 3), (1, 3), (1, 5), (0, 5)]
world1.addPolygon(polygon1)
polygon1 = [(0, 0), (5, 0), (5, 3), (4, 3), (4, 5), (1, 5), (1, 3), (0, 3)]
polygon2 = [(1, 7), (3, 7), (5, 9), (4, 11), (4, 9), (1, 8), (2, 10), (0, 10)]
world2.addPolygon(polygon1)
world2.addPolygon(polygon2)
polygon1 = [(0, 2), (1,1), (2,2), (1,0)]
polygon2 = [(3,3), (4,2), (5,3)]
# polygon2 = [(p[0] - 3, p[1]) for p in polygon2]
# Horizontal flip for testing purposes.
polygon1 = [(-p[0], p[1]) for p in polygon1]
polygon2 = [(-p[0], p[1]) for p in polygon2]
world3.addPolygon(polygon1)
world3.addPolygon(polygon2)
polygon1 = [(0, 7), (2.25, 5), (1.25, 4), (5, 5)] # [(0, 0), (2.25, 0.5), (1.25, 2.3)] # [(0,3),(1,1),(3,0),(4,0),(3,4)]
polygon2 = [(1.15, -3.15), (4, -4), (2, -7), (0.9, -5.25)] #[(1.15, 3.15), (4, 4), (0.9, 5.25)] # [(1,4),(2,5),(2,1),(1,3)]
polygon3 = [(3, 1), (3, 0.0), (4.85, 0.75), (4.85, 2.4), (5,4)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
polygon4 = [(-0.5, -1), (-0.5, 1.0), (0.5, 1), (0.5, -1)] #[(3, 0.7), (4.85, 1.75), (4.85, 3.4)]
world4.addPolygon(polygon1)
world4.addPolygon(polygon2)
world4.addPolygon(polygon3)
world4.addPolygon(polygon4)
polygon1 = [(0, 0.6), (1.5, 0), (2.5, 1.25), (1.25, 0.75), (1.125, 1.8)]
polygon2 = [(1.3, 2.25), (2.8, 2.8), (1.65, 3.125)]
polygon3 = [(2.8, 1.25), (4.125, 0.25), (3.5, 2.0)]
world5.addPolygon(polygon1)
world5.addPolygon(polygon2)
world5.addPolygon(polygon3)
polygon1 = [(0,0), (2.5, 0), (0, 1.5)]
polygon2 = [(0, 3.25), (5, 4.25), (0, 4.25)]
polygon3 = [(3.5, 0), (5, 0), (5, 2.75), (3.5, 2.75)]
world6.addPolygon(polygon1)
world6.addPolygon(polygon2)
world6.addPolygon(polygon3)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1), (0, 0), (1, -1), (2, -1), (2, 1), (1, 1), (0, 2)]
world7.addPolygon(polygon1)
polygon1 = [(-1, 1), (-2, 1), (-2, -1), (-1, -1)]
polygon2 = [(-1, -1), (0, 0), (1, -1), (1, 1), (0, 2), (-1, 1)]
polygon3 = [(1, -1), (2, -1), (2, 1), (1, 1)]
# polygon1 = [(p[0], 0.9*p[1]) for p in polygon1]
# polygon3 = [(p[0], 0.9*p[1]) for p in polygon3]
world8.addPolygon(polygon1)
world8.addPolygon(polygon2)
world8.addPolygon(polygon3)
# 0.9999995231628418
polygon1 = [(-1, -1), (1, -1), (1, 1), (-1, 1)]
polygon2 = [(1, 1), (2, -1), (3, 0), (2, 1)]
world9.addPolygon(polygon1)
world9.addPolygon(polygon2)
polygon1 = [(0.734870970249176, 0.26040399074554443), (-0.045375000685453415, 0.8651400208473206), (-0.8234530091285706, 0.4177840054035187), (-0.14182999730110168, 0.21450699865818024)]
polygon2 = [(-1.0, 1.0108875036239624), (1.0, 1.010890007019043), (1.0, 1.3735400438308716), (-1.0, 1.373543620109558)]
world10.addPolygon(polygon2)
world10.addPolygon(polygon1)
polygon0 = [(0.734870970249176, -1.1526894569396973), (-0.045375000685453415, 1.1651400327682495), (-0.8234530091285706, -0.9953095316886902), (-0.14182999730110168, -1.1985864639282227)]
polygon1 = [(2.1045942306518555, -2.0704498291015625), (2.1045916080474854, 1.9576737880706787), (1.7419415712356567, 1.9576740264892578), (1.7419381141662598, -2.0704498291015625)]
polygon2 = [(-1.7419382333755493, -2.0704498291015625), (-1.741940975189209, 1.9576740264892578), (-2.10459041595459, 1.9576740264892578), (-2.1045944690704346, -2.0704495906829834)]
world11.addPolygon(polygon0)
world11.addPolygon(polygon1)
world11.addPolygon(polygon2)
polygon0 = [(0.7000000476837158, -1.2000000476837158), (-0.10000000149011612, 1.2000000476837158), (-0.800000011920929, -1.0), (-0.10000000149011612, -1.25)]
polygon1 = [(2.0999999046325684, -2.0999999046325684), (2.0999999046325684, 1.899999976158142), (1.7000000476837158, 1.899999976158142), (1.7000000476837158, -2.0999999046325684)]
polygon2 = [(-1.7000000476837158, -2.0999999046325684), (-1.7000000476837158, 1.899999976158142), (-2.1000001430511475, 1.899999976158142), (-2.1000001430511475, -2.0999999046325684)]
world12.addPolygon(polygon0)
world12.addPolygon(polygon1)
world12.addPolygon(polygon2)
#world.addLine((0, 2.5), (3, 2.5))
worlds = [world0, world1, world2, world3, world4, world5, world6, world7, world8, world9, world10]
worldIndex = 0
for w in worlds:
print("\nWorld:", worldIndex)
worldIndex += 1
w.calcFreeLines()
drawScene(w)
faceList = w.partitionMesh.faces
for k in faceList:
doubleFaceTest(faceList[k])
checkEventEquality(world12, world11)
#%%
reminders = [
"Is there a better way, using cos(), to handle parallelism in isLineInsideEdgeAngle()?",
"Pruning of lines that intersect obj at CONTACT verts. (I sort of forget what this self-reminder meant...)",
"Pruning of segments outside convex hull.",
"Right now, swapDir() side effect in findIntersections(). Should this be changed?",
"Just generally take a second look at how floating-point precision problems are handled.\nEspecially for the y-intercept of MyLine, since a very small difference in coordinates can lead to a larger difference in y-intercepts.\nSo instead of comparing y-intercepts, something else should maybe be compared!"
]
for reminder in reminders:
sep = "==========="
print("\n" + sep + "\n" + reminder + "\n" + sep + "\n")
| 38.526923 | 311 | 0.605471 |
3e3c50b123745c81d1f91068db3b602d8d3f128d
| 5,966 |
py
|
Python
|
dynamo/preprocessing/dynast.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 236 |
2019-07-09T22:06:21.000Z
|
2022-03-31T17:56:07.000Z
|
dynamo/preprocessing/dynast.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 115 |
2019-07-12T19:06:21.000Z
|
2022-03-31T17:34:18.000Z
|
dynamo/preprocessing/dynast.py
|
xing-lab-pitt/dynamo-release
|
76c1f2a270dd6722b88f4700aac1a1a725a0c261
|
[
"BSD-3-Clause"
] | 34 |
2019-07-10T03:34:04.000Z
|
2022-03-22T12:44:22.000Z
|
import numpy as np
from scipy.sparse import issparse
from sklearn.utils import sparsefuncs
import anndata
from typing import Union
from ..dynamo_logger import LoggerManager, main_tqdm
from ..utils import copy_adata
def lambda_correction(
adata: anndata.AnnData,
lambda_key: str = "lambda",
inplace: bool = True,
copy: bool = False,
) -> Union[anndata.AnnData, None]:
"""Use lambda (cell-wise detection rate) to estimate the labelled RNA.
Parameters
----------
adata:
adata object generated from dynast.
lambda_key:
The key to the cell-wise detection rate.
inplace:
Whether to inplace update the layers. If False, new layers that append '_corrected" to the existing will be
used to store the updated data.
copy:
Whether to copy the adata object or update adata object inplace.
Returns
-------
adata: :class:`~anndata.AnnData`
An new or updated anndata object, based on copy parameter, that are updated with Size_Factor, normalized
expression values, X and reduced dimensions, etc.
"""
logger = LoggerManager.gen_logger("dynamo-lambda_correction")
logger.log_time()
adata = copy_adata(adata) if copy else adata
logger.info("apply detection rate correction to adata...", indent_level=1)
if lambda_key not in adata.obs.keys():
raise ValueError(
f"the lambda_key {lambda_key} is not included in adata.obs! Please ensure you have calculated "
"per-cell detection rate!"
)
logger.info("retrieving the cell-wise detection rate..", indent_level=1)
detection_rate = adata.obs[lambda_key].values[:, None]
logger.info("identify the data type..", indent_level=1)
all_layers = adata.layers.keys()
has_ul = np.any([i.contains("ul_") for i in all_layers])
has_un = np.any([i.contains("un_") for i in all_layers])
has_sl = np.any([i.contains("sl_") for i in all_layers])
has_sn = np.any([i.contains("sn_") for i in all_layers])
has_l = np.any([i.contains("_l_") for i in all_layers])
has_n = np.any([i.contains("_n_") for i in all_layers])
if sum(has_ul + has_un + has_sl + has_sn) == 4:
datatype = "splicing_labeling"
elif sum(has_l + has_n):
datatype = "labeling"
logger.info(f"the data type identified is {datatype}", indent_level=2)
logger.info("retrieve relevant layers for detection rate correction", indent_level=1)
if datatype == "splicing_labeling":
layers, match_tot_layer = [], []
for layer in all_layers:
if "ul_" in layer:
layers += layer
match_tot_layer += "unspliced"
elif "un_" in layer:
layers += layer
match_tot_layer += "unspliced"
elif "sl_" in layer:
layers += layer
match_tot_layer += "spliced"
elif "sn_" in layer:
layers += layer
match_tot_layer += "spliced"
elif "spliced" in layer:
layers += layer
elif "unspliced" in layer:
layers += layer
if len(layers) != 6:
raise ValueError(
"the adata object has to include ul, un, sl, sn, unspliced, spliced, "
"six relevant layers for splicing and labeling quantified datasets."
)
elif datatype == "labeling":
layers, match_tot_layer = [], []
for layer in all_layers:
if "_l_" in layer:
layers += layer
match_tot_layer += ["total"]
elif "_n_" in layer:
layers += layer
match_tot_layer += ["total"]
elif "total" in layer:
layers += layer
if len(layers) != 3:
raise ValueError(
"the adata object has to include labeled, unlabeled, three relevant layers for labeling quantified "
"datasets."
)
logger.info("detection rate correction starts", indent_level=1)
for i, layer in enumerate(main_tqdm(layers, desc="iterating all relevant layers")):
if i < len(match_tot_layer):
cur_layer = adata.layers[layer] if inplace else adata.layers[layer].copy()
cur_total = adata.layers[match_tot_layer[i]]
# even layers is labeled RNA and odd unlabeled RNA
if i % 2 == 0:
# formula: min(L / lambda, (L + U)) from scNT-seq
if issparse(cur_layer):
sparsefuncs.inplace_row_scale(cur_layer, 1 / detection_rate)
else:
cur_layer /= detection_rate
if inplace:
adata.layers[layer] = sparse_mimmax(cur_layer, cur_total)
else:
adata.layers[layer + "_corrected"] = sparse_mimmax(cur_layer, cur_total)
else:
if inplace:
adata.layers[layer] = cur_total - adata.layers[layer[i - 1]]
else:
adata.layers[layer + "_corrected"] = cur_total - adata.layers[layer[i - 1]]
logger.finish_progress(progress_name="lambda_correction")
if copy:
return adata
return None
def sparse_mimmax(A, B, type="mim"):
"""Return the element-wise mimimum/maximum of sparse matrices `A` and `B`.
Parameters
----------
A:
The first sparse matrix
B:
The second sparse matrix
type:
The type of calculation, either mimimum or maximum.
Returns
-------
M:
A sparse matrix that contain the element-wise maximal or mimimal of two sparse matrices.
"""
AgtB = (A < B).astype(int) if type == "min" else (A > B).astype(int)
M = AgtB.multiply(A - B) + B
return M
| 35.301775 | 120 | 0.578947 |
3e3cee6ba011350960f8e52993ae0b2666144798
| 4,095 |
py
|
Python
|
tests/fullscale/poroelasticity/cryer/TestCryer.py
|
cehanagan/pylith
|
cf5c1c34040460a82f79b6eb54df894ed1b1ee93
|
[
"MIT"
] | 93 |
2015-01-08T16:41:22.000Z
|
2022-02-25T13:40:02.000Z
|
tests/fullscale/poroelasticity/cryer/TestCryer.py
|
sloppyjuicy/pylith
|
ac2c1587f87e45c948638b19560813d4d5b6a9e3
|
[
"MIT"
] | 277 |
2015-02-20T16:27:35.000Z
|
2022-03-30T21:13:09.000Z
|
tests/fullscale/poroelasticity/cryer/TestCryer.py
|
sloppyjuicy/pylith
|
ac2c1587f87e45c948638b19560813d4d5b6a9e3
|
[
"MIT"
] | 71 |
2015-03-24T12:11:08.000Z
|
2022-03-03T04:26:02.000Z
|
#!/usr/bin/env nemesis
#
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University at Buffalo
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2021 University of California, Davis
#
# See LICENSE.md for license information.
#
# ----------------------------------------------------------------------
#
# @file tests/fullscale/poroelasticity/cryer/TestCryer.py
#
# @brief Test suite for testing pylith with Cryer's problem.
import unittest
from pylith.testing.FullTestApp import (FullTestCase, Check, check_data)
import meshes
import cryer_soln
# We do not include trace_strain in the test of the solution fields, because of the
# poor convergence of the series solution.
SOLUTION_FIELDS = ["displacement", "pressure"]
SOLUTION_TOLERANCE = 0.5
# -------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------------
def test_cases():
return [
TestHex,
TestTet,
]
# -------------------------------------------------------------------------------------------------
if __name__ == '__main__':
FullTestCase.parse_args()
suite = unittest.TestSuite()
for test in test_cases():
suite.addTest(unittest.makeSuite(test))
unittest.TextTestRunner(verbosity=2).run(suite)
# End of file
| 31.022727 | 99 | 0.477656 |
3e3e8c87814094936e4351a80831e5bb8fce82f9
| 3,551 |
py
|
Python
|
util/data.py
|
pinaryazgan/GDN
|
469e63fa8c2dce596c6f7e99f2620ac6eec7dadf
|
[
"MIT"
] | 156 |
2021-03-01T12:49:25.000Z
|
2022-03-28T08:27:33.000Z
|
util/data.py
|
pinaryazgan/GDN
|
469e63fa8c2dce596c6f7e99f2620ac6eec7dadf
|
[
"MIT"
] | 24 |
2021-04-19T10:08:35.000Z
|
2022-03-28T11:42:54.000Z
|
util/data.py
|
pinaryazgan/GDN
|
469e63fa8c2dce596c6f7e99f2620ac6eec7dadf
|
[
"MIT"
] | 54 |
2021-04-16T17:26:30.000Z
|
2022-03-28T06:08:43.000Z
|
# util functions about data
from scipy.stats import rankdata, iqr, trim_mean
from sklearn.metrics import f1_score, mean_squared_error
import numpy as np
from numpy import percentile
# calculate F1 scores
| 28.18254 | 101 | 0.664602 |
3e41a3d23f1cd5e224926d0f23ef2a864d4c94cb
| 5,654 |
py
|
Python
|
rrl-sysadmin/sysadmin.py
|
HyeokjuJang/sr-drl
|
01fa8264c7b36f34f721303f455f37545dbce1fe
|
[
"MIT"
] | 14 |
2020-10-02T17:14:04.000Z
|
2022-02-26T19:26:58.000Z
|
rrl-sysadmin/sysadmin.py
|
HyeokjuJang/sr-drl
|
01fa8264c7b36f34f721303f455f37545dbce1fe
|
[
"MIT"
] | 1 |
2022-02-26T08:23:13.000Z
|
2022-02-26T08:23:13.000Z
|
rrl-sysadmin/sysadmin.py
|
jaromiru/sr-drl
|
01fa8264c7b36f34f721303f455f37545dbce1fe
|
[
"MIT"
] | 6 |
2021-05-04T13:24:12.000Z
|
2021-12-06T12:51:30.000Z
|
import gym, random, copy, string, uuid
import numpy as np
rddl_template = string.Template('''
non-fluents nf_sysadmin_inst_$uid {
domain = sysadmin_mdp;
objects {
computer : {$objects};
};
non-fluents {
REBOOT-PROB = $reboot_prob;
$connections
};
}
instance sysadmin_inst_$uid {
domain = sysadmin_mdp;
non-fluents = nf_sysadmin_inst_$uid;
init-state {
$running
};
max-nondef-actions = $maxactions;
horizon = $horizon;
discount = $discount;
}
''')
# ----------------------------------------------------------
# ----------------------------------------------------------
import networkx as nx
import matplotlib.pyplot as plt
COLOR_RUNNING = "#cad5fa"
COLOR_DOWN = "#e33c30"
COLOR_SELECTED_R = "#1b3eb5"
COLOR_SELECTED_D = "#701812"
# ----------------------------------------------------------
if __name__ == '__main__':
NODES = 5
env = SysAdminEnv(env_num_obj=NODES, env_max_steps=10)
s = env.reset()
gvis = GraphVisualization(env)
a = -1
while(True):
# a = np.random.randint(env.num_obj)
a = np.random.choice(NODES, np.random.randint(0, NODES), replace=False)
probs = np.random.rand(NODES)
print(a)
print(probs)
gvis.update_state(env, a, probs)
gvis.plot().show()
s, r, d, i = env.step(a)
print(a, r)
if d:
gvis = GraphVisualization(env)
| 25.241071 | 194 | 0.662363 |
3e43d8b9a039af747051e4f38665ccd61353394f
| 3,974 |
py
|
Python
|
core/language_modelling.py
|
lkwate/e-greedy-lm
|
02e81fee93ee93faca0c1eb339b3c5ad55b4a639
|
[
"MIT"
] | 1 |
2021-11-09T19:18:00.000Z
|
2021-11-09T19:18:00.000Z
|
core/language_modelling.py
|
lkwate/e-greedy-lm
|
02e81fee93ee93faca0c1eb339b3c5ad55b4a639
|
[
"MIT"
] | null | null | null |
core/language_modelling.py
|
lkwate/e-greedy-lm
|
02e81fee93ee93faca0c1eb339b3c5ad55b4a639
|
[
"MIT"
] | null | null | null |
import torch
import torch.optim as optim
from transformers import AutoTokenizer
from .utils import epsilon_greedy_transform_label, uid_variance_fn, OPTIMIZER_DIC
import pytorch_lightning as pl
| 32.842975 | 87 | 0.638903 |
3e46f25a0298cc777cd9c283c93eaadaceb537e7
| 324 |
py
|
Python
|
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from elastic_workplace_search.client import Client
| 23.142857 | 53 | 0.756173 |
3e49611f7036088bee4b0176d2681701d3c8a29d
| 4,437 |
py
|
Python
|
test/hash_url.py
|
neotext/neotext-django-server
|
7cfe98cd541ade9b26a1877f627e45a986b011e8
|
[
"MIT"
] | null | null | null |
test/hash_url.py
|
neotext/neotext-django-server
|
7cfe98cd541ade9b26a1877f627e45a986b011e8
|
[
"MIT"
] | 7 |
2015-11-30T02:59:23.000Z
|
2016-10-06T15:52:52.000Z
|
test/hash_url.py
|
neotext/neotext-django-server
|
7cfe98cd541ade9b26a1877f627e45a986b011e8
|
[
"MIT"
] | null | null | null |
from neotext.lib.neotext_quote_context.quote import Quote
t0 = Quote(
citing_quote="""<p>I am sick and tired of watching folks like Boris Johnson, Marine Le Pen, Donald Trump and others appeal to the worst racial instincts of our species, only to be shushed by folks telling me that it’s not <i>really</i> racism driving their popularity. It’s economic angst. It’s regular folks tired of being spurned by out-of-touch elites. It’s a natural anxiety over rapid cultural change.</p>
<p>Maybe it’s all those things. But at its core, it’s the last stand of old people who have been frightened to death by cynical right-wing media empires and the demagogues who enable themall of whom have based their appeals on racism as overt as anything we’ve seen in decades. It’s loathsome beyond belief, and not something I thought I’d ever see in my lifetime. But that’s where we are.</p>""",
citing_url='http://www.neotext.net/www.interfluidity.com/v2/6602.html',
cited_url='http://www.motherjones.com/kevin-drum/2016/06/brexit-wins'
)
t0.hashkey()
t0.hash()
from neotext.lib.neotext_quote_context.quote import Quote
t2 = Quote(
citing_quote="""
<p>I am sick and tired of watching folks like Boris Johnson, Marine Le Pen, Donald Trump and others appeal to the worst racial instincts of our species, only to be shushed by folks telling me that it’s not <i>really</i> racism driving their popularity. It’s economic angst. It’s regular folks tired of being spurned by out-of-touch elites. It’s a natural anxiety over rapid cultural change.</p>
<p>Maybe it’s all those things. But at its core, it’s the last stand of old people who have been frightened to death by cynical right-wing media empires and the demagogues who enable themall of whom have based their appeals on racism as overt as anything we’ve seen in decades. It’s loathsome beyond belief, and not something I thought I’d ever see in my lifetime. But that’s where we are.</p>
""",
citing_url='http://www.neotext.net/www.interfluidity.com/v2/6602.html',
cited_url='http://www.motherjones.com/kevin-drum/2016/06/brexit-wins'
)
t2.hashkey()
t2.hash()
t1 = Quote (
citing_quote="one does not live by bread alone, "
"but by every word that comes from the mouth of the Lord",
citing_url='http://www.neotext.net/demo/',
cited_url='https://www.biblegateway.com/passage/?search=Deuteronomy+8&version=NRSV'
)
t1.hash() = '32b19d9333fff69d16d5bf89bc1eb76f6b39eb58'
t1.data()['citing_context_before'] = 'ted texts on biblegateway.com; and the Al Gore example referenced an article on the washingtonpost.com. Using Neotext allows the reader to more easily make the intertextual connections between the two verses, without having to leave the current page. How the Neotext Quote-Context Service Works The example Ive given you is made possible through a WordPress Plugin that operates on the html <blockquote> tag: <blockquotecite=https://www.biblegateway.com/passage/?search=Deuteronomy+8&version=NRSV>'
t1.data()['citing_context_after'] = '</blockquote> As part of the wordpress saving process, the WordPress Plugin submits the url of the post to the Neotext Web Service, which looks up the surrounding context of each quote and creates json files for each citation. Each quotes citation file is uploaded to Amazon S3 for later retreival by the client. On the client side, the Neotext custom jQuery library: uses the url from each blockquote cite attribute and the quote hashes the url and quote text looks up the previously generated json from the hash: http://read.neotext.net/quote/sha1/0.02/32/32b19d9333fff69d16d5bf89bc1eb76f6b39eb58.json injects the content from the json fields into hidden divs, which are made visible when the user clicks the arrows or link: Code Example: The code for displaying the looked-up information is part of a free open source jQuery plugin called neotext-quote-context.js, available as a wordpress or jQuery plugin. Get simplified html source of this page Download neotext-sample.html (view online) Sav'
t2 = Quote(
citing_quote="I took the initiative in creating the Internet.",
citing_url="http://www.neotext.net/demo/"
cited_url="https://www.washingtonpost.com/news/fact-checker/wp/2013/11/04/a-cautionary-tale-for-politicians-al-gore-and-the-invention-of-the-internet/"
)
| 90.55102 | 1,038 | 0.771918 |
3e49ee4375c4fdbca12777a89f48b0e9f1e01d7a
| 3,590 |
py
|
Python
|
tests/imperative_vs_reactive/test_get_daily_average.py
|
BastiTee/bastis-python-toolbox
|
c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8
|
[
"Apache-2.0"
] | 1 |
2016-04-06T14:09:43.000Z
|
2016-04-06T14:09:43.000Z
|
tests/imperative_vs_reactive/test_get_daily_average.py
|
BastiTee/bastis-python-toolbox
|
c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8
|
[
"Apache-2.0"
] | null | null | null |
tests/imperative_vs_reactive/test_get_daily_average.py
|
BastiTee/bastis-python-toolbox
|
c313cf12607a973a1a8b8a9fbd73b2c8a47a82d8
|
[
"Apache-2.0"
] | 1 |
2022-03-19T04:21:40.000Z
|
2022-03-19T04:21:40.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Test suite for the daily average Toggl API process."""
from random import random
from tempfile import NamedTemporaryFile
from time import sleep, time
from unittest import TestCase
from recipes.imperative_vs_reactive.get_daily_average_imp import \
get_avg_daily_working_hours as imp
from recipes.imperative_vs_reactive.get_daily_average_rx import \
get_avg_daily_working_hours as rx
if __name__ == '__main__':
TestSuite().test_integration()
| 39.450549 | 79 | 0.620334 |
3e4a37d31db8b27c20ff44c3b6b28b18b2dd20b1
| 4,077 |
py
|
Python
|
pox/stats_monitor.py
|
nachtkatze/sdn-diagnosis
|
22b187d276bf302ef5811abc946b1af125dd17bc
|
[
"Apache-2.0"
] | null | null | null |
pox/stats_monitor.py
|
nachtkatze/sdn-diagnosis
|
22b187d276bf302ef5811abc946b1af125dd17bc
|
[
"Apache-2.0"
] | null | null | null |
pox/stats_monitor.py
|
nachtkatze/sdn-diagnosis
|
22b187d276bf302ef5811abc946b1af125dd17bc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Oscar Araque
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A skeleton POX component
You can customize this to do whatever you like. Don't forget to
adjust the Copyright above, and to delete the Apache license if you
don't want to release under Apache (but consider doing so!).
Rename this file to whatever you like, .e.g., mycomponent.py. You can
then invoke it with "./pox.py mycomponent" if you leave it in the
ext/ directory.
Implement a launch() function (as shown below) which accepts commandline
arguments and starts off your component (e.g., by listening to events).
Edit this docstring and your launch function's docstring. These will
show up when used with the help component ("./pox.py help --mycomponent").
"""
# Import some POX stuff
from pox.core import core # Main POX object
import pox.openflow.libopenflow_01 as of # OpenFlow 1.0 library
import pox.lib.packet as pkt # Packet parsing/construction
from pox.lib.addresses import EthAddr, IPAddr # Address types
import pox.lib.util as poxutil # Various util functions
import pox.lib.revent as revent # Event library
import pox.lib.recoco as recoco # Multitasking library
from pox.openflow.of_json import *
import multiprocessing
import json
# Create a logger for this component
log = core.getLogger("Monitor")
| 36.72973 | 81 | 0.709345 |
3e4a39484ed02c469223ab4065ec6d989a83a302
| 7,623 |
py
|
Python
|
tests/app_example.py
|
omarryhan/flask-stateless-auth
|
c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a
|
[
"MIT"
] | 3 |
2018-09-13T19:55:47.000Z
|
2018-09-15T18:31:22.000Z
|
tests/app_example.py
|
omarryhan/flask-stateless-auth
|
c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a
|
[
"MIT"
] | null | null | null |
tests/app_example.py
|
omarryhan/flask-stateless-auth
|
c6acefc55050d1a53235ead20cb7d5e9eb4bbf9a
|
[
"MIT"
] | null | null | null |
import os
import datetime
import secrets
import json
from flask import Flask, abort, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from werkzeug.security import safe_str_cmp
from flask_stateless_auth import (
StatelessAuthError,
StatelessAuthManager,
current_stateless_user,
UserMixin,
TokenMixin,
token_required,
)
db = SQLAlchemy()
stateless_auth_manager = StatelessAuthManager()
app = Flask(__name__.split(".")[0])
if __name__ == "__main__":
app.config.from_object(Config())
db.init_app(app)
with app.app_context():
db.create_all()
stateless_auth_manager.init_app(app)
app.run()
| 30.987805 | 88 | 0.674406 |
3e4e3e3f65d730e416b620ade003178d96c61532
| 920 |
py
|
Python
|
stereo/stereo.py
|
whaleygeek/microbit_python
|
1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105
|
[
"MIT"
] | 8 |
2016-11-15T23:04:25.000Z
|
2021-05-17T17:42:47.000Z
|
stereo/stereo.py
|
whaleygeek/microbit_python
|
1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105
|
[
"MIT"
] | null | null | null |
stereo/stereo.py
|
whaleygeek/microbit_python
|
1fa8e0f34cfa2a92d7c5c32fc5ee5287c5d5b105
|
[
"MIT"
] | null | null | null |
from microbit import *
import music
A = False
B = False
PITCH = 440
# PIN2 read_analog()
ACTION_VALUE = 50
VOLUMEUP_VALUE = 150
VOLUMEDOWN_VALUE = 350
#nothing: 944
prev_l = False
prev_r = False
l = False
r = False
while True:
v = pin2.read_analog()
if v < ACTION_VALUE:
l,r = True, True
elif v < VOLUMEUP_VALUE:
l,r = False, True
elif v < VOLUMEDOWN_VALUE:
l,r = True, False
else:
l,r = False, False
if l != prev_l:
prev_l = l
if l:
music.pitch(PITCH, pin=pin0)
display.set_pixel(0,2,9)
else:
display.set_pixel(0,2,0)
music.stop(pin0)
if r != prev_r:
prev_r = r
if r:
display.set_pixel(4,2,9)
music.pitch(PITCH, pin=pin1)
else:
display.set_pixel(4,2,0)
music.stop(pin1)
| 18.77551 | 40 | 0.519565 |
3e50073943f2d59f2a64f9e25a36110605822852
| 1,062 |
py
|
Python
|
comments/migrations/0004_auto_20170531_1011.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
comments/migrations/0004_auto_20170531_1011.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
comments/migrations/0004_auto_20170531_1011.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
| 34.258065 | 114 | 0.615819 |
3e50929d0bf53c378eb006069dd354ec7a7241ac
| 1,096 |
py
|
Python
|
hospital/migrations/0011_auto_20210502_1057.py
|
Shreyashm16/Hospital-Appointment-and-Information-System
|
929b7eb22cc6a0399e6fff3c7012d1c65d7c47cb
|
[
"MIT"
] | 7 |
2021-07-15T08:59:58.000Z
|
2021-12-29T20:21:36.000Z
|
hospital/migrations/0011_auto_20210502_1057.py
|
siddharth25pandey/Hospital-Information-Appointment-System
|
1df5edd1f0dc2f0f385e7195db221027b4f64efb
|
[
"MIT"
] | null | null | null |
hospital/migrations/0011_auto_20210502_1057.py
|
siddharth25pandey/Hospital-Information-Appointment-System
|
1df5edd1f0dc2f0f385e7195db221027b4f64efb
|
[
"MIT"
] | 4 |
2021-05-11T08:36:02.000Z
|
2021-08-08T11:45:11.000Z
|
# Generated by Django 3.1.5 on 2021-05-02 05:27
from django.db import migrations, models
import django.db.models.deletion
| 34.25 | 148 | 0.583942 |
3e509827f57ba47184b958f8189726f8a1765c22
| 87 |
py
|
Python
|
division.py
|
ReverseScale/PyDemo
|
9cc6f3cbb8482f6e403bf65419537b0163798e61
|
[
"MIT"
] | null | null | null |
division.py
|
ReverseScale/PyDemo
|
9cc6f3cbb8482f6e403bf65419537b0163798e61
|
[
"MIT"
] | null | null | null |
division.py
|
ReverseScale/PyDemo
|
9cc6f3cbb8482f6e403bf65419537b0163798e61
|
[
"MIT"
] | null | null | null |
try:
print(5/0)
except ZeroDivisionError:
print('!')
else:
print('')
| 14.5 | 25 | 0.62069 |
3e522957a432795bf32198db1cc68b1e2615e3f9
| 1,924 |
py
|
Python
|
Script/calculate_RMSD.py
|
dhruvsangamwar/Protein-structure-prediction
|
99364bfd62f8293ddbe8e2c9a86ca7850b270d44
|
[
"MIT"
] | 1 |
2022-01-30T08:20:08.000Z
|
2022-01-30T08:20:08.000Z
|
Script/calculate_RMSD.py
|
dhruvsangamwar/ECS_129_Protein-structure-prediction
|
99364bfd62f8293ddbe8e2c9a86ca7850b270d44
|
[
"MIT"
] | null | null | null |
Script/calculate_RMSD.py
|
dhruvsangamwar/ECS_129_Protein-structure-prediction
|
99364bfd62f8293ddbe8e2c9a86ca7850b270d44
|
[
"MIT"
] | null | null | null |
import pdbCleanup as pc
import fxndefinitions as f
import numpy as np
from numpy.linalg import eig
pc.takeInput1()
DataFrame1 = []
pc.CsvToDataframe(DataFrame1)
pc.takeInput2()
DataFrame2 = []
pc.CsvToDataframe(DataFrame2)
xtil = [0, 0, 0]
ytil = [0, 0, 0]
x = np.array(DataFrame1)
y = np.array(DataFrame2)
# This finds the number of CA atoms in both of the proteins
N1 = np.size(xtil, 0)
N2 = np.size(ytil, 0)
# finding the average of the x coords in protein 1 and 2 (arr1 & 2)
# these two functions calculate the barycenter
# Here we will be finding Xtil && Ytil = X && Y - G
Gx = f.findG(x, N1)
Gy = f.findG(y, N2)
xtil = np.subtract(x, Gx)
ytil = np.subtract(x, Gy)
# we now have the ~x_k Coords and the ~y_k Coords respectively
# this function will calculate all the 9 R values
R11 = R12 = R13 = R21 = R22 = R23 = R31 = R32 = R33 = 0
for i in range(0, N1):
R11 += xtil[i][0] * ytil[i][0]
R12 += xtil[i][0] * ytil[i][1]
R13 += xtil[i][0] * ytil[i][2]
R21 += xtil[i][1] * ytil[i][0]
R22 += xtil[i][1] * ytil[i][1]
R23 += xtil[i][1] * ytil[i][2]
R31 += xtil[i][2] * ytil[i][0]
R32 += xtil[i][2] * ytil[i][1]
R33 += xtil[i][2] * ytil[i][2]
# matrix given by equation 10 from the paper
Matrix = np.array([[R11+R22+R33, R23-R32, R31-R13, R12-R21],
[R23-R32, R11-R22-R33, R12+R21, R13+R31],
[R31-R13, R12+R21, -R11+R22-R33, R23+R32],
[R12-R21, R13+R31, R23+R32, -R11-R22+R33]])
# Here we calculate the maxEigenvalue for the final calucaltion
w, v = eig(Matrix)
maxEig = np.amax(w)
# Now we will find the best fit RMSD using the steps below
temp = [0, 0, 0]
for i in range(0, N1):
temp += np.add((np.square(xtil[i])), np.square(ytil[i]))
n = temp[0] + temp[1] + temp[2]
var = np.subtract(n, 2*maxEig)
temp2 = np.true_divide(var, np.size(xtil, 0))
RMSD = np.sqrt(abs(temp2))
RMSD = round(RMSD, 2)
print(RMSD)
| 24.666667 | 67 | 0.613825 |
3e52b2bc37f4905d4ab47d9e200507510863fee4
| 626 |
py
|
Python
|
Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py
|
omkarsutar1255/Python-Data
|
169d0c54b23d9dd5a7f1aea41ab385121c3b3c63
|
[
"CC-BY-3.0"
] | null | null | null |
Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py
|
omkarsutar1255/Python-Data
|
169d0c54b23d9dd5a7f1aea41ab385121c3b3c63
|
[
"CC-BY-3.0"
] | null | null | null |
Python/Data Structure and Algorithm/Krish Naik/Amazon Que.py
|
omkarsutar1255/Python-Data
|
169d0c54b23d9dd5a7f1aea41ab385121c3b3c63
|
[
"CC-BY-3.0"
] | null | null | null |
# l1 = [1095, 1094, 1095]
# del l1[:]
# l1.extend([1005, 1094, 1095])
# print(l1)
l1 = [8676, 4444, 3333, 2222, 1111]
for i, n in enumerate(l1):
print(i, n)
if int(n / 1000) == 1:
l1[i] = n + 8000
elif int(n / 1000) == 2:
l1[i] = n + 6000
elif int(n / 1000) == 3:
l1[i] = n + 4000
elif int(n / 1000) == 4:
l1[i] = n + 2000
elif int(n / 1000) == 6:
l1[i] = n - 2000
elif int(n / 1000) == 7:
l1[i] = n + 4000
elif int(n / 1000) == 8:
l1[i] = n - 6000
elif int(n / 1000) == 9:
l1[i] = n - 8000
else:
pass
print(l1)
| 21.586207 | 35 | 0.4377 |
3e572d40ef88a1ec3058d9cc94eb6dce557f2d6d
| 4,728 |
py
|
Python
|
src/voicemaker/voicemaker.py
|
IAL32/voicemaker
|
66c9dd25749743d94bb9c3aac8ba2c858f327723
|
[
"MIT"
] | null | null | null |
src/voicemaker/voicemaker.py
|
IAL32/voicemaker
|
66c9dd25749743d94bb9c3aac8ba2c858f327723
|
[
"MIT"
] | 1 |
2022-03-04T14:52:16.000Z
|
2022-03-08T08:00:59.000Z
|
src/voicemaker/voicemaker.py
|
IAL32/voicemaker
|
66c9dd25749743d94bb9c3aac8ba2c858f327723
|
[
"MIT"
] | null | null | null |
import requests
LANGUAGES_LIST = [
'en-US', 'en-GB', 'en-AU', 'en-HK', 'en-NZ', 'en-SG', 'en-ZA', 'de-DE',
'ar-XA', 'ar-SA', 'bn-IN', 'bg-BG', 'ca-ES', 'cmn-CN', 'zh-HK', 'cmn-TW',
'cy-GB', 'cs-CZ', 'da-DK', 'de-CH', 'es-AR', 'es-CO', 'es-US', 'ga-IE',
'gu-IN', 'hr-HR', 'mr-IN', 'ms-MY', 'mt-MT', 'nl-NL', 'nl-BE', 'en-CA',
'en-IN', 'en-IE', 'et-EE', 'en-PH', 'fil-PH', 'fi-FI', 'fr-BE', 'fr-FR',
'fr-CA', 'fr-CH', 'el-GR', 'he-IL', 'hi-IN', 'hu-HU', 'id-ID', 'it-IT',
'ja-JP', 'lv-LV', 'lt-LT', 'ko-KR', 'nb-NO', 'pl-PL', 'pt-PT', 'pt-BR',
'ro-RO', 'ru-RU', 'sk-SK', 'sw-KE', 'es-ES', 'es-MX', 'es-LA', 'es-US',
'sl-SI', 'sv-SE', 'tr-TR', 'ta-IN', 'te-IN', 'th-TH', 'uk-UA', 'ur-PK',
'vi-VN'
]
| 39.4 | 164 | 0.597716 |
3e5810f45ee6abfb855c478735026a678b651dd9
| 1,365 |
py
|
Python
|
Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py
|
PhilippMatthes/tensorflow-playground
|
b5fee6e5f5044dc5cbcd54529d559388a3df7813
|
[
"MIT"
] | null | null | null |
Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py
|
PhilippMatthes/tensorflow-playground
|
b5fee6e5f5044dc5cbcd54529d559388a3df7813
|
[
"MIT"
] | null | null | null |
Lecture/Kapitel 9 - Seite 235 - Implementierung des Gradientenverfahrens.py
|
PhilippMatthes/tensorflow-playground
|
b5fee6e5f5044dc5cbcd54529d559388a3df7813
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
m, n = housing.data.shape
housing_data_plus_bias = np.c_[np.ones((m, 1)), housing.data]
X = tf.constant(housing_data_plus_bias, dtype=tf.float32, name="X")
y = tf.constant(housing.target.reshape(-1, 1), dtype=tf.float32, name="y")
n_epochs = 1000
learning_rate = 0.01
theta = tf.Variable(tf.random_uniform([n + 1, 1], -1.0, 1.0), name="theta")
y_pred = tf.matmul(X, theta, name="predictions")
error = y_pred - y
mse = tf.reduce_mean(tf.square(error), name="mse")
gradients = 2 / m * tf.matmul(tf.transpose(X), error)
training_op = tf.assign(theta, theta - learning_rate * gradients)
run()
gradients = tf.gradients(mse, [theta])[0]
run()
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
training_op = optimizer.minimize(mse)
run()
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=0.9)
training_op = optimizer.minimize(mse)
run()
| 26.764706 | 81 | 0.69304 |
3e582f1280b1545b27d8bb65ef57684f484bd7bc
| 1,634 |
py
|
Python
|
python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py
|
erisyon/whatprot
|
176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c
|
[
"MIT"
] | null | null | null |
python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py
|
erisyon/whatprot
|
176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c
|
[
"MIT"
] | 1 |
2021-06-12T00:50:08.000Z
|
2021-06-15T17:59:12.000Z
|
python/Fluoroseq/obsolete/scripts/intrinsic_pr_bounds.py
|
erisyon/whatprot
|
176cd7e6ee99ea3f91794dcf1ec14f3578b7ee3c
|
[
"MIT"
] | 1 |
2021-06-11T19:34:43.000Z
|
2021-06-11T19:34:43.000Z
|
# -*- coding: utf-8 -*-
"""
@author: Matthew Beauregard Smith (UT Austin)
"""
from common.peptide import Peptide
from plotting.plot_pr_curve import plot_pr_curve
from numpy import load
from simulate.label_peptides import label_peptides
TRUE_Y_FILE = 'C:/Users/Matthew/ICES/MarcotteLab/data/classification/control_15_proteins/true_pep_i.npy'
NUM_PEPTIDES = 705
NUM_CHANNELS = 3
LABEL_SET = ['DE','Y','C']
PEPTIDE_FILE = 'C:/Users/Matthew/ICES/MarcotteLab/data/classification/control_15_proteins/peps.csv'
true_y = load(TRUE_Y_FILE)
ground_truth = [0] * len(true_y)
for i in range(0, len(true_y)):
ground_truth[i] = GroundTruth(true_y[i])
f = open(PEPTIDE_FILE, 'r')
f.readline() # header
f.readline() # Zack's null line
line = f.readline()
peptides = [0] * NUM_PEPTIDES
i = 0
while line != '\n' and line != '':
items = line.split(",")
pep_id = items[0]
pep_str = items[-1]
peptides[i] = Peptide(pep_str, pep_id=pep_id)
line = f.readline()
i += 1
f.close()
dye_seqs = label_peptides(peptides, LABEL_SET)
id_to_prediction = {}
for dye_seq in dye_seqs:
for peptide in dye_seq.src_peptides:
id_to_prediction[int(peptide.pep_id)] = (
int(dye_seq.src_peptides[0].pep_id),
1 / len(dye_seq.src_peptides))
predictions = [0] * len(ground_truth)
for i in range(len(ground_truth)):
predictions[i] = id_to_prediction[ground_truth[i].value]
plot_pr_curve(predictions, ground_truth)
| 30.259259 | 105 | 0.676255 |
3e5961792d37ca4a7091e59c1c74180b0cb0ef47
| 337,498 |
py
|
Python
|
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | null | null | null |
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | 1 |
2019-01-03T08:56:11.000Z
|
2019-06-05T09:24:13.000Z
|
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | 1 |
2018-08-30T15:06:12.000Z
|
2018-08-30T15:06:12.000Z
|
################# BEGIN AUTOMATICALLY GENERATED FIT PROFILE ##################
########################### DO NOT EDIT THIS FILE ############################
####### EXPORTED PROFILE FROM SDK VERSION 20.33 AT 2017-05-17 22:36:12 #######
########## PARSED 118 TYPES (1699 VALUES), 76 MESSAGES (950 FIELDS) ##########
from fitparse.records import (
ComponentField,
Field,
FieldType,
MessageType,
ReferenceField,
SubField,
BASE_TYPES,
)
FIELD_TYPES = {
'activity': FieldType(
name='activity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto_multi_sport',
},
),
'activity_class': FieldType(
name='activity_class',
base_type=BASE_TYPES[0x00], # enum
values={
100: 'level_max',
0x7F: 'level', # 0 to 100
0x80: 'athlete',
},
),
'activity_level': FieldType(
name='activity_level',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'low',
1: 'medium',
2: 'high',
},
),
'activity_subtype': FieldType(
name='activity_subtype',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
254: 'all',
},
),
'activity_type': FieldType(
name='activity_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'walking',
8: 'sedentary',
254: 'all', # All is for goals only to include all sports.
},
),
'analog_watchface_layout': FieldType(
name='analog_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'minimal',
1: 'traditional',
2: 'modern',
},
),
'ant_network': FieldType(
name='ant_network',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'public',
1: 'antplus',
2: 'antfs',
3: 'private',
},
),
'antplus_device_type': FieldType(
name='antplus_device_type',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'antfs',
11: 'bike_power',
12: 'environment_sensor_legacy',
15: 'multi_sport_speed_distance',
16: 'control',
17: 'fitness_equipment',
18: 'blood_pressure',
19: 'geocache_node',
20: 'light_electric_vehicle',
25: 'env_sensor',
26: 'racquet',
27: 'control_hub',
31: 'muscle_oxygen',
35: 'bike_light_main',
36: 'bike_light_shared',
38: 'exd',
40: 'bike_radar',
119: 'weight_scale',
120: 'heart_rate',
121: 'bike_speed_cadence',
122: 'bike_cadence',
123: 'bike_speed',
124: 'stride_speed_distance',
},
),
'attitude_stage': FieldType(
name='attitude_stage',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'failed',
1: 'aligning',
2: 'degraded',
3: 'valid',
},
),
'attitude_validity': FieldType(
name='attitude_validity',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0001: 'track_angle_heading_valid',
0x0002: 'pitch_valid',
0x0004: 'roll_valid',
0x0008: 'lateral_body_accel_valid',
0x0010: 'normal_body_accel_valid',
0x0020: 'turn_rate_valid',
0x0040: 'hw_fail',
0x0080: 'mag_invalid',
0x0100: 'no_gps',
0x0200: 'gps_invalid',
0x0400: 'solution_coasting',
0x0800: 'true_track_angle',
0x1000: 'magnetic_heading',
},
),
'auto_activity_detect': FieldType(
name='auto_activity_detect',
base_type=BASE_TYPES[0x86], # uint32
values={
0x00000000: 'none',
0x00000001: 'running',
0x00000002: 'cycling',
0x00000004: 'swimming',
0x00000008: 'walking',
0x00000020: 'elliptical',
0x00000400: 'sedentary',
},
),
'auto_sync_frequency': FieldType(
name='auto_sync_frequency',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'never',
1: 'occasionally',
2: 'frequent',
3: 'once_a_day',
4: 'remote',
},
),
'autolap_trigger': FieldType(
name='autolap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'position_start',
3: 'position_lap',
4: 'position_waypoint',
5: 'position_marked',
6: 'off',
},
),
'autoscroll': FieldType(
name='autoscroll',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'slow',
2: 'medium',
3: 'fast',
},
),
'backlight_mode': FieldType(
name='backlight_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'manual',
2: 'key_and_messages',
3: 'auto_brightness',
4: 'smart_notifications',
5: 'key_and_messages_night',
6: 'key_and_messages_and_smart_notifications',
},
),
'battery_status': FieldType(
name='battery_status',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'new',
2: 'good',
3: 'ok',
4: 'low',
5: 'critical',
6: 'charging',
7: 'unknown',
},
),
'bike_light_beam_angle_mode': FieldType(
name='bike_light_beam_angle_mode',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'manual',
1: 'auto',
},
),
'bike_light_network_config_type': FieldType(
name='bike_light_network_config_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto',
4: 'individual',
5: 'high_visibility',
6: 'trail',
},
),
'body_location': FieldType(
name='body_location',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'left_leg',
1: 'left_calf',
2: 'left_shin',
3: 'left_hamstring',
4: 'left_quad',
5: 'left_glute',
6: 'right_leg',
7: 'right_calf',
8: 'right_shin',
9: 'right_hamstring',
10: 'right_quad',
11: 'right_glute',
12: 'torso_back',
13: 'left_lower_back',
14: 'left_upper_back',
15: 'right_lower_back',
16: 'right_upper_back',
17: 'torso_front',
18: 'left_abdomen',
19: 'left_chest',
20: 'right_abdomen',
21: 'right_chest',
22: 'left_arm',
23: 'left_shoulder',
24: 'left_bicep',
25: 'left_tricep',
26: 'left_brachioradialis', # Left anterior forearm
27: 'left_forearm_extensors', # Left posterior forearm
28: 'right_arm',
29: 'right_shoulder',
30: 'right_bicep',
31: 'right_tricep',
32: 'right_brachioradialis', # Right anterior forearm
33: 'right_forearm_extensors', # Right posterior forearm
34: 'neck',
35: 'throat',
36: 'waist_mid_back',
37: 'waist_front',
38: 'waist_left',
39: 'waist_right',
},
),
'bool': FieldType(
name='bool',
base_type=BASE_TYPES[0x00], # enum
),
'bp_status': FieldType(
name='bp_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_error',
1: 'error_incomplete_data',
2: 'error_no_measurement',
3: 'error_data_out_of_range',
4: 'error_irregular_heart_rate',
},
),
'camera_event_type': FieldType(
name='camera_event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'video_start', # Start of video recording
1: 'video_split', # Mark of video file split (end of one file, beginning of the other)
2: 'video_end', # End of video recording
3: 'photo_taken', # Still photo taken
4: 'video_second_stream_start',
5: 'video_second_stream_split',
6: 'video_second_stream_end',
7: 'video_split_start', # Mark of video file split start
8: 'video_second_stream_split_start',
11: 'video_pause', # Mark when a video recording has been paused
12: 'video_second_stream_pause',
13: 'video_resume', # Mark when a video recording has been resumed
14: 'video_second_stream_resume',
},
),
'camera_orientation_type': FieldType(
name='camera_orientation_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'camera_orientation_0',
1: 'camera_orientation_90',
2: 'camera_orientation_180',
3: 'camera_orientation_270',
},
),
'checksum': FieldType(
name='checksum',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'clear', # Allows clear of checksum for flash memory where can only write 1 to 0 without erasing sector.
1: 'ok', # Set to mark checksum as valid if computes to invalid values 0 or 0xFF. Checksum can also be set to ok to save encoding computation time.
},
),
'comm_timeout_type': FieldType(
name='comm_timeout_type',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'wildcard_pairing_timeout', # Timeout pairing to any device
1: 'pairing_timeout', # Timeout pairing to previously paired device
2: 'connection_lost', # Temporary loss of communications
3: 'connection_timeout', # Connection closed due to extended bad communications
},
),
'connectivity_capabilities': FieldType(
name='connectivity_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'bluetooth',
0x00000002: 'bluetooth_le',
0x00000004: 'ant',
0x00000008: 'activity_upload',
0x00000010: 'course_download',
0x00000020: 'workout_download',
0x00000040: 'live_track',
0x00000080: 'weather_conditions',
0x00000100: 'weather_alerts',
0x00000200: 'gps_ephemeris_download',
0x00000400: 'explicit_archive',
0x00000800: 'setup_incomplete',
0x00001000: 'continue_sync_after_software_update',
0x00002000: 'connect_iq_app_download',
0x00004000: 'golf_course_download',
0x00008000: 'device_initiates_sync', # Indicates device is in control of initiating all syncs
0x00010000: 'connect_iq_watch_app_download',
0x00020000: 'connect_iq_widget_download',
0x00040000: 'connect_iq_watch_face_download',
0x00080000: 'connect_iq_data_field_download',
0x00100000: 'connect_iq_app_managment', # Device supports delete and reorder of apps via GCM
0x00200000: 'swing_sensor',
0x00400000: 'swing_sensor_remote',
0x00800000: 'incident_detection', # Device supports incident detection
0x01000000: 'audio_prompts',
0x02000000: 'wifi_verification', # Device supports reporting wifi verification via GCM
0x04000000: 'true_up', # Device supports True Up
0x08000000: 'find_my_watch', # Device supports Find My Watch
0x10000000: 'remote_manual_sync',
0x20000000: 'live_track_auto_start', # Device supports LiveTrack auto start
0x40000000: 'live_track_messaging', # Device supports LiveTrack Messaging
0x80000000: 'instant_input', # Device supports instant input feature
},
),
'course_capabilities': FieldType(
name='course_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'processed',
0x00000002: 'valid',
0x00000004: 'time',
0x00000008: 'distance',
0x00000010: 'position',
0x00000020: 'heart_rate',
0x00000040: 'power',
0x00000080: 'cadence',
0x00000100: 'training',
0x00000200: 'navigation',
0x00000400: 'bikeway',
},
),
'course_point': FieldType(
name='course_point',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'summit',
2: 'valley',
3: 'water',
4: 'food',
5: 'danger',
6: 'left',
7: 'right',
8: 'straight',
9: 'first_aid',
10: 'fourth_category',
11: 'third_category',
12: 'second_category',
13: 'first_category',
14: 'hors_category',
15: 'sprint',
16: 'left_fork',
17: 'right_fork',
18: 'middle_fork',
19: 'slight_left',
20: 'sharp_left',
21: 'slight_right',
22: 'sharp_right',
23: 'u_turn',
24: 'segment_start',
25: 'segment_end',
},
),
'date_mode': FieldType(
name='date_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'day_month',
1: 'month_day',
},
),
'date_time': FieldType( # seconds since UTC 00:00 Dec 31 1989
name='date_time',
base_type=BASE_TYPES[0x86], # uint32
),
'day_of_week': FieldType(
name='day_of_week',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'sunday',
1: 'monday',
2: 'tuesday',
3: 'wednesday',
4: 'thursday',
5: 'friday',
6: 'saturday',
},
),
'device_index': FieldType(
name='device_index',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'creator', # Creator of the file is always device index 0.
},
),
'digital_watchface_layout': FieldType(
name='digital_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'traditional',
1: 'modern',
2: 'bold',
},
),
'display_heart': FieldType(
name='display_heart',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bpm',
1: 'max',
2: 'reserve',
},
),
'display_measure': FieldType(
name='display_measure',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'metric',
1: 'statute',
2: 'nautical',
},
),
'display_orientation': FieldType(
name='display_orientation',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # automatic if the device supports it
1: 'portrait',
2: 'landscape',
3: 'portrait_flipped', # portrait mode but rotated 180 degrees
4: 'landscape_flipped', # landscape mode but rotated 180 degrees
},
),
'display_position': FieldType(
name='display_position',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'degree', # dd.dddddd
1: 'degree_minute', # dddmm.mmm
2: 'degree_minute_second', # dddmmss
3: 'austrian_grid', # Austrian Grid (BMN)
4: 'british_grid', # British National Grid
5: 'dutch_grid', # Dutch grid system
6: 'hungarian_grid', # Hungarian grid system
7: 'finnish_grid', # Finnish grid system Zone3 KKJ27
8: 'german_grid', # Gausss Krueger (German)
9: 'icelandic_grid', # Icelandic Grid
10: 'indonesian_equatorial', # Indonesian Equatorial LCO
11: 'indonesian_irian', # Indonesian Irian LCO
12: 'indonesian_southern', # Indonesian Southern LCO
13: 'india_zone_0', # India zone 0
14: 'india_zone_IA', # India zone IA
15: 'india_zone_IB', # India zone IB
16: 'india_zone_IIA', # India zone IIA
17: 'india_zone_IIB', # India zone IIB
18: 'india_zone_IIIA', # India zone IIIA
19: 'india_zone_IIIB', # India zone IIIB
20: 'india_zone_IVA', # India zone IVA
21: 'india_zone_IVB', # India zone IVB
22: 'irish_transverse', # Irish Transverse Mercator
23: 'irish_grid', # Irish Grid
24: 'loran', # Loran TD
25: 'maidenhead_grid', # Maidenhead grid system
26: 'mgrs_grid', # MGRS grid system
27: 'new_zealand_grid', # New Zealand grid system
28: 'new_zealand_transverse', # New Zealand Transverse Mercator
29: 'qatar_grid', # Qatar National Grid
30: 'modified_swedish_grid', # Modified RT-90 (Sweden)
31: 'swedish_grid', # RT-90 (Sweden)
32: 'south_african_grid', # South African Grid
33: 'swiss_grid', # Swiss CH-1903 grid
34: 'taiwan_grid', # Taiwan Grid
35: 'united_states_grid', # United States National Grid
36: 'utm_ups_grid', # UTM/UPS grid system
37: 'west_malayan', # West Malayan RSO
38: 'borneo_rso', # Borneo RSO
39: 'estonian_grid', # Estonian grid system
40: 'latvian_grid', # Latvian Transverse Mercator
41: 'swedish_ref_99_grid', # Reference Grid 99 TM (Swedish)
},
),
'display_power': FieldType(
name='display_power',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'watts',
1: 'percent_ftp',
},
),
'event': FieldType(
name='event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'timer', # Group 0. Start / stop_all
3: 'workout', # start / stop
4: 'workout_step', # Start at beginning of workout. Stop at end of each step.
5: 'power_down', # stop_all group 0
6: 'power_up', # stop_all group 0
7: 'off_course', # start / stop group 0
8: 'session', # Stop at end of each session.
9: 'lap', # Stop at end of each lap.
10: 'course_point', # marker
11: 'battery', # marker
12: 'virtual_partner_pace', # Group 1. Start at beginning of activity if VP enabled, when VP pace is changed during activity or VP enabled mid activity. stop_disable when VP disabled.
13: 'hr_high_alert', # Group 0. Start / stop when in alert condition.
14: 'hr_low_alert', # Group 0. Start / stop when in alert condition.
15: 'speed_high_alert', # Group 0. Start / stop when in alert condition.
16: 'speed_low_alert', # Group 0. Start / stop when in alert condition.
17: 'cad_high_alert', # Group 0. Start / stop when in alert condition.
18: 'cad_low_alert', # Group 0. Start / stop when in alert condition.
19: 'power_high_alert', # Group 0. Start / stop when in alert condition.
20: 'power_low_alert', # Group 0. Start / stop when in alert condition.
21: 'recovery_hr', # marker
22: 'battery_low', # marker
23: 'time_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
24: 'distance_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
25: 'calorie_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
26: 'activity', # Group 1.. Stop at end of activity.
27: 'fitness_equipment', # marker
28: 'length', # Stop at end of each length.
32: 'user_marker', # marker
33: 'sport_point', # marker
36: 'calibration', # start/stop/marker
42: 'front_gear_change', # marker
43: 'rear_gear_change', # marker
44: 'rider_position_change', # marker
45: 'elev_high_alert', # Group 0. Start / stop when in alert condition.
46: 'elev_low_alert', # Group 0. Start / stop when in alert condition.
47: 'comm_timeout', # marker
},
),
'event_type': FieldType(
name='event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'start',
1: 'stop',
2: 'consecutive_depreciated',
3: 'marker',
4: 'stop_all',
5: 'begin_depreciated',
6: 'end_depreciated',
7: 'end_all_depreciated',
8: 'stop_disable',
9: 'stop_disable_all',
},
),
'exd_data_units': FieldType(
name='exd_data_units',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_units',
1: 'laps',
2: 'miles_per_hour',
3: 'kilometers_per_hour',
4: 'feet_per_hour',
5: 'meters_per_hour',
6: 'degrees_celsius',
7: 'degrees_farenheit',
8: 'zone',
9: 'gear',
10: 'rpm',
11: 'bpm',
12: 'degrees',
13: 'millimeters',
14: 'meters',
15: 'kilometers',
16: 'feet',
17: 'yards',
18: 'kilofeet',
19: 'miles',
20: 'time',
21: 'enum_turn_type',
22: 'percent',
23: 'watts',
24: 'watts_per_kilogram',
25: 'enum_battery_status',
26: 'enum_bike_light_beam_angle_mode',
27: 'enum_bike_light_battery_status',
28: 'enum_bike_light_network_config_type',
29: 'lights',
30: 'seconds',
31: 'minutes',
32: 'hours',
33: 'calories',
34: 'kilojoules',
35: 'milliseconds',
36: 'second_per_mile',
37: 'second_per_kilometer',
38: 'centimeter',
39: 'enum_course_point',
40: 'bradians',
41: 'enum_sport',
42: 'inches_hg',
43: 'mm_hg',
44: 'mbars',
45: 'hecto_pascals',
46: 'feet_per_min',
47: 'meters_per_min',
48: 'meters_per_sec',
49: 'eight_cardinal',
},
),
'exd_descriptors': FieldType(
name='exd_descriptors',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bike_light_battery_status',
1: 'beam_angle_status',
2: 'batery_level',
3: 'light_network_mode',
4: 'number_lights_connected',
5: 'cadence',
6: 'distance',
7: 'estimated_time_of_arrival',
8: 'heading',
9: 'time',
10: 'battery_level',
11: 'trainer_resistance',
12: 'trainer_target_power',
13: 'time_seated',
14: 'time_standing',
15: 'elevation',
16: 'grade',
17: 'ascent',
18: 'descent',
19: 'vertical_speed',
20: 'di2_battery_level',
21: 'front_gear',
22: 'rear_gear',
23: 'gear_ratio',
24: 'heart_rate',
25: 'heart_rate_zone',
26: 'time_in_heart_rate_zone',
27: 'heart_rate_reserve',
28: 'calories',
29: 'gps_accuracy',
30: 'gps_signal_strength',
31: 'temperature',
32: 'time_of_day',
33: 'balance',
34: 'pedal_smoothness',
35: 'power',
36: 'functional_threshold_power',
37: 'intensity_factor',
38: 'work',
39: 'power_ratio',
40: 'normalized_power',
41: 'training_stress_Score',
42: 'time_on_zone',
43: 'speed',
44: 'laps',
45: 'reps',
46: 'workout_step',
47: 'course_distance',
48: 'navigation_distance',
49: 'course_estimated_time_of_arrival',
50: 'navigation_estimated_time_of_arrival',
51: 'course_time',
52: 'navigation_time',
53: 'course_heading',
54: 'navigation_heading',
55: 'power_zone',
56: 'torque_effectiveness',
57: 'timer_time',
58: 'power_weight_ratio',
59: 'left_platform_center_offset',
60: 'right_platform_center_offset',
61: 'left_power_phase_start_angle',
62: 'right_power_phase_start_angle',
63: 'left_power_phase_finish_angle',
64: 'right_power_phase_finish_angle',
65: 'gears', # Combined gear information
66: 'pace',
67: 'training_effect',
68: 'vertical_oscillation',
69: 'vertical_ratio',
70: 'ground_contact_time',
71: 'left_ground_contact_time_balance',
72: 'right_ground_contact_time_balance',
73: 'stride_length',
74: 'running_cadence',
75: 'performance_condition',
76: 'course_type',
77: 'time_in_power_zone',
78: 'navigation_turn',
79: 'course_location',
80: 'navigation_location',
81: 'compass',
82: 'gear_combo',
83: 'muscle_oxygen',
84: 'icon',
85: 'compass_heading',
86: 'gps_heading',
87: 'gps_elevation',
88: 'anaerobic_training_effect',
89: 'course',
90: 'off_course',
91: 'glide_ratio',
92: 'vertical_distance',
93: 'vmg',
94: 'ambient_pressure',
95: 'pressure',
},
),
'exd_display_type': FieldType(
name='exd_display_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'numerical',
1: 'simple',
2: 'graph',
3: 'bar',
4: 'circle_graph',
5: 'virtual_partner',
6: 'balance',
7: 'string_list',
8: 'string',
9: 'simple_dynamic_icon',
10: 'gauge',
},
),
'exd_layout': FieldType(
name='exd_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'full_screen',
1: 'half_vertical',
2: 'half_horizontal',
3: 'half_vertical_right_split',
4: 'half_horizontal_bottom_split',
5: 'full_quarter_split',
6: 'half_vertical_left_split',
7: 'half_horizontal_top_split',
},
),
'exd_qualifiers': FieldType(
name='exd_qualifiers',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_qualifier',
1: 'instantaneous',
2: 'average',
3: 'lap',
4: 'maximum',
5: 'maximum_average',
6: 'maximum_lap',
7: 'last_lap',
8: 'average_lap',
9: 'to_destination',
10: 'to_go',
11: 'to_next',
12: 'next_course_point',
13: 'total',
14: 'three_second_average',
15: 'ten_second_average',
16: 'thirty_second_average',
17: 'percent_maximum',
18: 'percent_maximum_average',
19: 'lap_percent_maximum',
20: 'elapsed',
21: 'sunrise',
22: 'sunset',
23: 'compared_to_virtual_partner',
24: 'maximum_24h',
25: 'minimum_24h',
26: 'minimum',
27: 'first',
28: 'second',
29: 'third',
30: 'shifter',
31: 'last_sport',
32: 'moving',
33: 'stopped',
242: 'zone_9',
243: 'zone_8',
244: 'zone_7',
245: 'zone_6',
246: 'zone_5',
247: 'zone_4',
248: 'zone_3',
249: 'zone_2',
250: 'zone_1',
},
),
'file': FieldType(
name='file',
base_type=BASE_TYPES[0x00], # enum
values={
1: 'device', # Read only, single file. Must be in root directory.
2: 'settings', # Read/write, single file. Directory=Settings
3: 'sport', # Read/write, multiple files, file number = sport type. Directory=Sports
4: 'activity', # Read/erase, multiple files. Directory=Activities
5: 'workout', # Read/write/erase, multiple files. Directory=Workouts
6: 'course', # Read/write/erase, multiple files. Directory=Courses
7: 'schedules', # Read/write, single file. Directory=Schedules
9: 'weight', # Read only, single file. Circular buffer. All message definitions at start of file. Directory=Weight
10: 'totals', # Read only, single file. Directory=Totals
11: 'goals', # Read/write, single file. Directory=Goals
14: 'blood_pressure', # Read only. Directory=Blood Pressure
15: 'monitoring_a', # Read only. Directory=Monitoring. File number=sub type.
20: 'activity_summary', # Read/erase, multiple files. Directory=Activities
28: 'monitoring_daily',
32: 'monitoring_b', # Read only. Directory=Monitoring. File number=identifier
34: 'segment', # Read/write/erase. Multiple Files. Directory=Segments
35: 'segment_list', # Read/write/erase. Single File. Directory=Segments
40: 'exd_configuration', # Read/write/erase. Single File. Directory=Settings
0xF7: 'mfg_range_min', # 0xF7 - 0xFE reserved for manufacturer specific file types
0xFE: 'mfg_range_max', # 0xF7 - 0xFE reserved for manufacturer specific file types
},
),
'file_flags': FieldType(
name='file_flags',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x02: 'read',
0x04: 'write',
0x08: 'erase',
},
),
'fit_base_type': FieldType(
name='fit_base_type',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'enum',
1: 'sint8',
2: 'uint8',
7: 'string',
10: 'uint8z',
13: 'byte',
131: 'sint16',
132: 'uint16',
133: 'sint32',
134: 'uint32',
136: 'float32',
137: 'float64',
139: 'uint16z',
140: 'uint32z',
142: 'sint64',
143: 'uint64',
144: 'uint64z',
},
),
'fit_base_unit': FieldType(
name='fit_base_unit',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'other',
1: 'kilogram',
2: 'pound',
},
),
'fitness_equipment_state': FieldType( # fitness equipment event data
name='fitness_equipment_state',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ready',
1: 'in_use',
2: 'paused',
3: 'unknown', # lost connection to fitness equipment
},
),
'garmin_product': FieldType(
name='garmin_product',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'hrm1',
2: 'axh01', # AXH01 HRM chipset
3: 'axb01',
4: 'axb02',
5: 'hrm2ss',
6: 'dsi_alf02',
7: 'hrm3ss',
8: 'hrm_run_single_byte_product_id', # hrm_run model for HRM ANT+ messaging
9: 'bsm', # BSM model for ANT+ messaging
10: 'bcm', # BCM model for ANT+ messaging
11: 'axs01', # AXS01 HRM Bike Chipset model for ANT+ messaging
12: 'hrm_tri_single_byte_product_id', # hrm_tri model for HRM ANT+ messaging
14: 'fr225_single_byte_product_id', # fr225 model for HRM ANT+ messaging
473: 'fr301_china',
474: 'fr301_japan',
475: 'fr301_korea',
494: 'fr301_taiwan',
717: 'fr405', # Forerunner 405
782: 'fr50', # Forerunner 50
987: 'fr405_japan',
988: 'fr60', # Forerunner 60
1011: 'dsi_alf01',
1018: 'fr310xt', # Forerunner 310
1036: 'edge500',
1124: 'fr110', # Forerunner 110
1169: 'edge800',
1199: 'edge500_taiwan',
1213: 'edge500_japan',
1253: 'chirp',
1274: 'fr110_japan',
1325: 'edge200',
1328: 'fr910xt',
1333: 'edge800_taiwan',
1334: 'edge800_japan',
1341: 'alf04',
1345: 'fr610',
1360: 'fr210_japan',
1380: 'vector_ss',
1381: 'vector_cp',
1386: 'edge800_china',
1387: 'edge500_china',
1410: 'fr610_japan',
1422: 'edge500_korea',
1436: 'fr70',
1446: 'fr310xt_4t',
1461: 'amx',
1482: 'fr10',
1497: 'edge800_korea',
1499: 'swim',
1537: 'fr910xt_china',
1551: 'fenix',
1555: 'edge200_taiwan',
1561: 'edge510',
1567: 'edge810',
1570: 'tempe',
1600: 'fr910xt_japan',
1623: 'fr620',
1632: 'fr220',
1664: 'fr910xt_korea',
1688: 'fr10_japan',
1721: 'edge810_japan',
1735: 'virb_elite',
1736: 'edge_touring', # Also Edge Touring Plus
1742: 'edge510_japan',
1743: 'hrm_tri',
1752: 'hrm_run',
1765: 'fr920xt',
1821: 'edge510_asia',
1822: 'edge810_china',
1823: 'edge810_taiwan',
1836: 'edge1000',
1837: 'vivo_fit',
1853: 'virb_remote',
1885: 'vivo_ki',
1903: 'fr15',
1907: 'vivo_active',
1918: 'edge510_korea',
1928: 'fr620_japan',
1929: 'fr620_china',
1930: 'fr220_japan',
1931: 'fr220_china',
1936: 'approach_s6',
1956: 'vivo_smart',
1967: 'fenix2',
1988: 'epix',
2050: 'fenix3',
2052: 'edge1000_taiwan',
2053: 'edge1000_japan',
2061: 'fr15_japan',
2067: 'edge520',
2070: 'edge1000_china',
2072: 'fr620_russia',
2073: 'fr220_russia',
2079: 'vector_s',
2100: 'edge1000_korea',
2130: 'fr920xt_taiwan',
2131: 'fr920xt_china',
2132: 'fr920xt_japan',
2134: 'virbx',
2135: 'vivo_smart_apac',
2140: 'etrex_touch',
2147: 'edge25',
2148: 'fr25',
2150: 'vivo_fit2',
2153: 'fr225',
2156: 'fr630',
2157: 'fr230',
2160: 'vivo_active_apac',
2161: 'vector_2',
2162: 'vector_2s',
2172: 'virbxe',
2173: 'fr620_taiwan',
2174: 'fr220_taiwan',
2175: 'truswing',
2188: 'fenix3_china',
2189: 'fenix3_twn',
2192: 'varia_headlight',
2193: 'varia_taillight_old',
2204: 'edge_explore_1000',
2219: 'fr225_asia',
2225: 'varia_radar_taillight',
2226: 'varia_radar_display',
2238: 'edge20',
2262: 'd2_bravo',
2266: 'approach_s20',
2276: 'varia_remote',
2327: 'hrm4_run',
2337: 'vivo_active_hr',
2347: 'vivo_smart_gps_hr',
2348: 'vivo_smart_hr',
2368: 'vivo_move',
2398: 'varia_vision',
2406: 'vivo_fit3',
2413: 'fenix3_hr',
2417: 'virb_ultra_30',
2429: 'index_smart_scale',
2431: 'fr235',
2432: 'fenix3_chronos',
2441: 'oregon7xx',
2444: 'rino7xx',
2496: 'nautix',
2530: 'edge_820',
2531: 'edge_explore_820',
2544: 'fenix5s',
2547: 'd2_bravo_titanium',
2593: 'running_dynamics_pod',
2604: 'fenix5x',
2606: 'vivo_fit_jr',
2691: 'fr935',
2697: 'fenix5',
10007: 'sdm4', # SDM4 footpod
10014: 'edge_remote',
20119: 'training_center',
65531: 'connectiq_simulator',
65532: 'android_antplus_plugin',
65534: 'connect', # Garmin Connect website
},
),
'gender': FieldType(
name='gender',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'female',
1: 'male',
},
),
'goal': FieldType(
name='goal',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'calories',
3: 'frequency',
4: 'steps',
5: 'ascent',
6: 'active_minutes',
},
),
'goal_recurrence': FieldType(
name='goal_recurrence',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'daily',
2: 'weekly',
3: 'monthly',
4: 'yearly',
5: 'custom',
},
),
'goal_source': FieldType(
name='goal_source',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # Device generated
1: 'community', # Social network sourced goal
2: 'user', # Manually generated
},
),
'hr_type': FieldType(
name='hr_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'normal',
1: 'irregular',
},
),
'hr_zone_calc': FieldType(
name='hr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_max_hr',
2: 'percent_hrr',
},
),
'intensity': FieldType(
name='intensity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'active',
1: 'rest',
2: 'warmup',
3: 'cooldown',
},
),
'language': FieldType(
name='language',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'english',
1: 'french',
2: 'italian',
3: 'german',
4: 'spanish',
5: 'croatian',
6: 'czech',
7: 'danish',
8: 'dutch',
9: 'finnish',
10: 'greek',
11: 'hungarian',
12: 'norwegian',
13: 'polish',
14: 'portuguese',
15: 'slovakian',
16: 'slovenian',
17: 'swedish',
18: 'russian',
19: 'turkish',
20: 'latvian',
21: 'ukrainian',
22: 'arabic',
23: 'farsi',
24: 'bulgarian',
25: 'romanian',
26: 'chinese',
27: 'japanese',
28: 'korean',
29: 'taiwanese',
30: 'thai',
31: 'hebrew',
32: 'brazilian_portuguese',
33: 'indonesian',
34: 'malaysian',
35: 'vietnamese',
36: 'burmese',
37: 'mongolian',
254: 'custom',
},
),
'language_bits_0': FieldType( # Bit field corresponding to language enum type (1 << language).
name='language_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'english',
0x02: 'french',
0x04: 'italian',
0x08: 'german',
0x10: 'spanish',
0x20: 'croatian',
0x40: 'czech',
0x80: 'danish',
},
),
'language_bits_1': FieldType(
name='language_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'dutch',
0x02: 'finnish',
0x04: 'greek',
0x08: 'hungarian',
0x10: 'norwegian',
0x20: 'polish',
0x40: 'portuguese',
0x80: 'slovakian',
},
),
'language_bits_2': FieldType(
name='language_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'slovenian',
0x02: 'swedish',
0x04: 'russian',
0x08: 'turkish',
0x10: 'latvian',
0x20: 'ukrainian',
0x40: 'arabic',
0x80: 'farsi',
},
),
'language_bits_3': FieldType(
name='language_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'bulgarian',
0x02: 'romanian',
0x04: 'chinese',
0x08: 'japanese',
0x10: 'korean',
0x20: 'taiwanese',
0x40: 'thai',
0x80: 'hebrew',
},
),
'language_bits_4': FieldType(
name='language_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'brazilian_portuguese',
0x02: 'indonesian',
0x04: 'malaysian',
0x08: 'vietnamese',
0x10: 'burmese',
0x20: 'mongolian',
},
),
'lap_trigger': FieldType(
name='lap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'time',
2: 'distance',
3: 'position_start',
4: 'position_lap',
5: 'position_waypoint',
6: 'position_marked',
7: 'session_end',
8: 'fitness_equipment',
},
),
'left_right_balance': FieldType(
name='left_right_balance',
base_type=BASE_TYPES[0x02], # uint8
values={
0x7F: 'mask', # % contribution
0x80: 'right', # data corresponds to right if set, otherwise unknown
},
),
'left_right_balance_100': FieldType(
name='left_right_balance_100',
base_type=BASE_TYPES[0x84], # uint16
values={
0x3FFF: 'mask', # % contribution scaled by 100
0x8000: 'right', # data corresponds to right if set, otherwise unknown
},
),
'length_type': FieldType(
name='length_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'idle', # Rest period. Length with no strokes
1: 'active', # Length with strokes.
},
),
'local_date_time': FieldType( # seconds since 00:00 Dec 31 1989 in local time zone
name='local_date_time',
base_type=BASE_TYPES[0x86], # uint32
values={
0x10000000: 'min', # if date_time is < 0x10000000 then it is system time (seconds from device power on)
},
),
'localtime_into_day': FieldType( # number of seconds into the day since local 00:00:00
name='localtime_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'manufacturer': FieldType(
name='manufacturer',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'garmin',
2: 'garmin_fr405_antfs', # Do not use. Used by FR405 for ANTFS man id.
3: 'zephyr',
4: 'dayton',
5: 'idt',
6: 'srm',
7: 'quarq',
8: 'ibike',
9: 'saris',
10: 'spark_hk',
11: 'tanita',
12: 'echowell',
13: 'dynastream_oem',
14: 'nautilus',
15: 'dynastream',
16: 'timex',
17: 'metrigear',
18: 'xelic',
19: 'beurer',
20: 'cardiosport',
21: 'a_and_d',
22: 'hmm',
23: 'suunto',
24: 'thita_elektronik',
25: 'gpulse',
26: 'clean_mobile',
27: 'pedal_brain',
28: 'peaksware',
29: 'saxonar',
30: 'lemond_fitness',
31: 'dexcom',
32: 'wahoo_fitness',
33: 'octane_fitness',
34: 'archinoetics',
35: 'the_hurt_box',
36: 'citizen_systems',
37: 'magellan',
38: 'osynce',
39: 'holux',
40: 'concept2',
42: 'one_giant_leap',
43: 'ace_sensor',
44: 'brim_brothers',
45: 'xplova',
46: 'perception_digital',
47: 'bf1systems',
48: 'pioneer',
49: 'spantec',
50: 'metalogics',
51: '4iiiis',
52: 'seiko_epson',
53: 'seiko_epson_oem',
54: 'ifor_powell',
55: 'maxwell_guider',
56: 'star_trac',
57: 'breakaway',
58: 'alatech_technology_ltd',
59: 'mio_technology_europe',
60: 'rotor',
61: 'geonaute',
62: 'id_bike',
63: 'specialized',
64: 'wtek',
65: 'physical_enterprises',
66: 'north_pole_engineering',
67: 'bkool',
68: 'cateye',
69: 'stages_cycling',
70: 'sigmasport',
71: 'tomtom',
72: 'peripedal',
73: 'wattbike',
76: 'moxy',
77: 'ciclosport',
78: 'powerbahn',
79: 'acorn_projects_aps',
80: 'lifebeam',
81: 'bontrager',
82: 'wellgo',
83: 'scosche',
84: 'magura',
85: 'woodway',
86: 'elite',
87: 'nielsen_kellerman',
88: 'dk_city',
89: 'tacx',
90: 'direction_technology',
91: 'magtonic',
92: '1partcarbon',
93: 'inside_ride_technologies',
94: 'sound_of_motion',
95: 'stryd',
96: 'icg', # Indoorcycling Group
97: 'MiPulse',
98: 'bsx_athletics',
99: 'look',
100: 'campagnolo_srl',
101: 'body_bike_smart',
102: 'praxisworks',
103: 'limits_technology', # Limits Technology Ltd.
104: 'topaction_technology', # TopAction Technology Inc.
105: 'cosinuss',
106: 'fitcare',
107: 'magene',
108: 'giant_manufacturing_co',
109: 'tigrasport', # Tigrasport
110: 'salutron',
111: 'technogym',
112: 'bryton_sensors',
255: 'development',
257: 'healthandlife',
258: 'lezyne',
259: 'scribe_labs',
260: 'zwift',
261: 'watteam',
262: 'recon',
263: 'favero_electronics',
264: 'dynovelo',
265: 'strava',
266: 'precor', # Amer Sports
267: 'bryton',
268: 'sram',
269: 'navman', # MiTAC Global Corporation (Mio Technology)
270: 'cobi', # COBI GmbH
271: 'spivi',
272: 'mio_magellan',
273: 'evesports',
274: 'sensitivus_gauge',
275: 'podoon',
276: 'life_time_fitness',
277: 'falco_e_motors', # Falco eMotors Inc.
5759: 'actigraphcorp',
},
),
'mesg_count': FieldType(
name='mesg_count',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'num_per_file',
1: 'max_per_file',
2: 'max_per_file_type',
},
),
'mesg_num': FieldType(
name='mesg_num',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'file_id',
1: 'capabilities',
2: 'device_settings',
3: 'user_profile',
4: 'hrm_profile',
5: 'sdm_profile',
6: 'bike_profile',
7: 'zones_target',
8: 'hr_zone',
9: 'power_zone',
10: 'met_zone',
12: 'sport',
15: 'goal',
18: 'session',
19: 'lap',
20: 'record',
21: 'event',
23: 'device_info',
26: 'workout',
27: 'workout_step',
28: 'schedule',
30: 'weight_scale',
31: 'course',
32: 'course_point',
33: 'totals',
34: 'activity',
35: 'software',
37: 'file_capabilities',
38: 'mesg_capabilities',
39: 'field_capabilities',
49: 'file_creator',
51: 'blood_pressure',
53: 'speed_zone',
55: 'monitoring',
72: 'training_file',
78: 'hrv',
80: 'ant_rx',
81: 'ant_tx',
82: 'ant_channel_id',
101: 'length',
103: 'monitoring_info',
105: 'pad',
106: 'slave_device',
127: 'connectivity',
128: 'weather_conditions',
129: 'weather_alert',
131: 'cadence_zone',
132: 'hr',
142: 'segment_lap',
145: 'memo_glob',
148: 'segment_id',
149: 'segment_leaderboard_entry',
150: 'segment_point',
151: 'segment_file',
158: 'workout_session',
159: 'watchface_settings',
160: 'gps_metadata',
161: 'camera_event',
162: 'timestamp_correlation',
164: 'gyroscope_data',
165: 'accelerometer_data',
167: 'three_d_sensor_calibration',
169: 'video_frame',
174: 'obdii_data',
177: 'nmea_sentence',
178: 'aviation_attitude',
184: 'video',
185: 'video_title',
186: 'video_description',
187: 'video_clip',
188: 'ohr_settings',
200: 'exd_screen_configuration',
201: 'exd_data_field_configuration',
202: 'exd_data_concept_configuration',
206: 'field_description',
207: 'developer_data_id',
208: 'magnetometer_data',
},
),
'message_index': FieldType(
name='message_index',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0FFF: 'mask', # index
0x7000: 'reserved', # reserved (default 0)
0x8000: 'selected', # message is selected if set
},
),
'power_phase_type': FieldType(
name='power_phase_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'power_phase_start_angle',
1: 'power_phase_end_angle',
2: 'power_phase_arc_length',
3: 'power_phase_center',
},
),
'pwr_zone_calc': FieldType(
name='pwr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_ftp',
},
),
'rider_position_type': FieldType(
name='rider_position_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'seated',
1: 'standing',
2: 'transition_to_seated',
3: 'transition_to_standing',
},
),
'schedule': FieldType(
name='schedule',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'workout',
1: 'course',
},
),
'segment_delete_status': FieldType(
name='segment_delete_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'do_not_delete',
1: 'delete_one',
2: 'delete_all',
},
),
'segment_lap_status': FieldType(
name='segment_lap_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'end',
1: 'fail',
},
),
'segment_leaderboard_type': FieldType(
name='segment_leaderboard_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'overall',
1: 'personal_best',
2: 'connections',
3: 'group',
4: 'challenger',
5: 'kom',
6: 'qom',
7: 'pr',
8: 'goal',
9: 'rival',
10: 'club_leader',
},
),
'segment_selection_type': FieldType(
name='segment_selection_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'starred',
1: 'suggested',
},
),
'sensor_type': FieldType(
name='sensor_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'accelerometer',
1: 'gyroscope',
2: 'compass', # Magnetometer
},
),
'session_trigger': FieldType(
name='session_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'activity_end',
1: 'manual', # User changed sport.
2: 'auto_multi_sport', # Auto multi-sport feature is enabled and user pressed lap button to advance session.
3: 'fitness_equipment', # Auto sport change caused by user linking to fitness equipment.
},
),
'side': FieldType(
name='side',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'right',
1: 'left',
},
),
'source_type': FieldType(
name='source_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ant', # External device connected with ANT
1: 'antplus', # External device connected with ANT+
2: 'bluetooth', # External device connected with BT
3: 'bluetooth_low_energy', # External device connected with BLE
4: 'wifi', # External device connected with Wifi
5: 'local', # Onboard device
},
),
'sport': FieldType(
name='sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'basketball',
7: 'soccer',
8: 'tennis',
9: 'american_football',
10: 'training',
11: 'walking',
12: 'cross_country_skiing',
13: 'alpine_skiing',
14: 'snowboarding',
15: 'rowing',
16: 'mountaineering',
17: 'hiking',
18: 'multisport',
19: 'paddling',
20: 'flying',
21: 'e_biking',
22: 'motorcycling',
23: 'boating',
24: 'driving',
25: 'golf',
26: 'hang_gliding',
27: 'horseback_riding',
28: 'hunting',
29: 'fishing',
30: 'inline_skating',
31: 'rock_climbing',
32: 'sailing',
33: 'ice_skating',
34: 'sky_diving',
35: 'snowshoeing',
36: 'snowmobiling',
37: 'stand_up_paddleboarding',
38: 'surfing',
39: 'wakeboarding',
40: 'water_skiing',
41: 'kayaking',
42: 'rafting',
43: 'windsurfing',
44: 'kitesurfing',
45: 'tactical',
46: 'jumpmaster',
47: 'boxing',
48: 'floor_climbing',
254: 'all', # All is for goals only to include all sports.
},
),
'sport_bits_0': FieldType( # Bit field corresponding to sport enum type (1 << sport).
name='sport_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'generic',
0x02: 'running',
0x04: 'cycling',
0x08: 'transition', # Mulitsport transition
0x10: 'fitness_equipment',
0x20: 'swimming',
0x40: 'basketball',
0x80: 'soccer',
},
),
'sport_bits_1': FieldType( # Bit field corresponding to sport enum type (1 << (sport-8)).
name='sport_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'tennis',
0x02: 'american_football',
0x04: 'training',
0x08: 'walking',
0x10: 'cross_country_skiing',
0x20: 'alpine_skiing',
0x40: 'snowboarding',
0x80: 'rowing',
},
),
'sport_bits_2': FieldType( # Bit field corresponding to sport enum type (1 << (sport-16)).
name='sport_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'mountaineering',
0x02: 'hiking',
0x04: 'multisport',
0x08: 'paddling',
0x10: 'flying',
0x20: 'e_biking',
0x40: 'motorcycling',
0x80: 'boating',
},
),
'sport_bits_3': FieldType( # Bit field corresponding to sport enum type (1 << (sport-24)).
name='sport_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'driving',
0x02: 'golf',
0x04: 'hang_gliding',
0x08: 'horseback_riding',
0x10: 'hunting',
0x20: 'fishing',
0x40: 'inline_skating',
0x80: 'rock_climbing',
},
),
'sport_bits_4': FieldType( # Bit field corresponding to sport enum type (1 << (sport-32)).
name='sport_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'sailing',
0x02: 'ice_skating',
0x04: 'sky_diving',
0x08: 'snowshoeing',
0x10: 'snowmobiling',
0x20: 'stand_up_paddleboarding',
0x40: 'surfing',
0x80: 'wakeboarding',
},
),
'sport_bits_5': FieldType( # Bit field corresponding to sport enum type (1 << (sport-40)).
name='sport_bits_5',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'water_skiing',
0x02: 'kayaking',
0x04: 'rafting',
0x08: 'windsurfing',
0x10: 'kitesurfing',
0x20: 'tactical',
0x40: 'jumpmaster',
0x80: 'boxing',
},
),
'sport_bits_6': FieldType( # Bit field corresponding to sport enum type (1 << (sport-48)).
name='sport_bits_6',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'floor_climbing',
},
),
'sport_event': FieldType(
name='sport_event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'uncategorized',
1: 'geocaching',
2: 'fitness',
3: 'recreation',
4: 'race',
5: 'special_event',
6: 'training',
7: 'transportation',
8: 'touring',
},
),
'stroke_type': FieldType(
name='stroke_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_event',
1: 'other', # stroke was detected but cannot be identified
2: 'serve',
3: 'forehand',
4: 'backhand',
5: 'smash',
},
),
'sub_sport': FieldType(
name='sub_sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run/Fitness Equipment
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling/Fitness Equipment
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
19: 'flexibility_training', # Training
20: 'strength_training', # Training
21: 'warm_up', # Tennis
22: 'match', # Tennis
23: 'exercise', # Tennis
24: 'challenge', # Tennis
25: 'indoor_skiing', # Fitness Equipment
26: 'cardio_training', # Training
27: 'indoor_walking', # Walking/Fitness Equipment
28: 'e_bike_fitness', # E-Biking
29: 'bmx', # Cycling
30: 'casual_walking', # Walking
31: 'speed_walking', # Walking
32: 'bike_to_run_transition', # Transition
33: 'run_to_bike_transition', # Transition
34: 'swim_to_bike_transition', # Transition
35: 'atv', # Motorcycling
36: 'motocross', # Motorcycling
37: 'backcountry', # Alpine Skiing/Snowboarding
38: 'resort', # Alpine Skiing/Snowboarding
39: 'rc_drone', # Flying
40: 'wingsuit', # Flying
41: 'whitewater', # Kayaking/Rafting
42: 'skate_skiing', # Cross Country Skiing
43: 'yoga', # Training
44: 'pilates', # Training
45: 'indoor_running', # Run
46: 'gravel_cycling', # Cycling
47: 'e_bike_mountain', # Cycling
48: 'commuting', # Cycling
49: 'mixed_surface', # Cycling
50: 'navigate',
51: 'track_me',
52: 'map',
254: 'all',
},
),
'supported_exd_screen_layouts': FieldType(
name='supported_exd_screen_layouts',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'full_screen',
0x00000002: 'half_vertical',
0x00000004: 'half_horizontal',
0x00000008: 'half_vertical_right_split',
0x00000010: 'half_horizontal_bottom_split',
0x00000020: 'full_quarter_split',
0x00000040: 'half_vertical_left_split',
0x00000080: 'half_horizontal_top_split',
},
),
'swim_stroke': FieldType(
name='swim_stroke',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'freestyle',
1: 'backstroke',
2: 'breaststroke',
3: 'butterfly',
4: 'drill',
5: 'mixed',
6: 'im', # IM is a mixed interval containing the same number of lengths for each of: Butterfly, Backstroke, Breaststroke, Freestyle, swam in that order.
},
),
'switch': FieldType(
name='switch',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'on',
2: 'auto',
},
),
'time_into_day': FieldType( # number of seconds into the day since 00:00:00 UTC
name='time_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'time_mode': FieldType(
name='time_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'hour12',
1: 'hour24', # Does not use a leading zero and has a colon
2: 'military', # Uses a leading zero and does not have a colon
3: 'hour_12_with_seconds',
4: 'hour_24_with_seconds',
5: 'utc',
},
),
'time_zone': FieldType(
name='time_zone',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'almaty',
1: 'bangkok',
2: 'bombay',
3: 'brasilia',
4: 'cairo',
5: 'cape_verde_is',
6: 'darwin',
7: 'eniwetok',
8: 'fiji',
9: 'hong_kong',
10: 'islamabad',
11: 'kabul',
12: 'magadan',
13: 'mid_atlantic',
14: 'moscow',
15: 'muscat',
16: 'newfoundland',
17: 'samoa',
18: 'sydney',
19: 'tehran',
20: 'tokyo',
21: 'us_alaska',
22: 'us_atlantic',
23: 'us_central',
24: 'us_eastern',
25: 'us_hawaii',
26: 'us_mountain',
27: 'us_pacific',
28: 'other',
29: 'auckland',
30: 'kathmandu',
31: 'europe_western_wet',
32: 'europe_central_cet',
33: 'europe_eastern_eet',
34: 'jakarta',
35: 'perth',
36: 'adelaide',
37: 'brisbane',
38: 'tasmania',
39: 'iceland',
40: 'amsterdam',
41: 'athens',
42: 'barcelona',
43: 'berlin',
44: 'brussels',
45: 'budapest',
46: 'copenhagen',
47: 'dublin',
48: 'helsinki',
49: 'lisbon',
50: 'london',
51: 'madrid',
52: 'munich',
53: 'oslo',
54: 'paris',
55: 'prague',
56: 'reykjavik',
57: 'rome',
58: 'stockholm',
59: 'vienna',
60: 'warsaw',
61: 'zurich',
62: 'quebec',
63: 'ontario',
64: 'manitoba',
65: 'saskatchewan',
66: 'alberta',
67: 'british_columbia',
68: 'boise',
69: 'boston',
70: 'chicago',
71: 'dallas',
72: 'denver',
73: 'kansas_city',
74: 'las_vegas',
75: 'los_angeles',
76: 'miami',
77: 'minneapolis',
78: 'new_york',
79: 'new_orleans',
80: 'phoenix',
81: 'santa_fe',
82: 'seattle',
83: 'washington_dc',
84: 'us_arizona',
85: 'chita',
86: 'ekaterinburg',
87: 'irkutsk',
88: 'kaliningrad',
89: 'krasnoyarsk',
90: 'novosibirsk',
91: 'petropavlovsk_kamchatskiy',
92: 'samara',
93: 'vladivostok',
94: 'mexico_central',
95: 'mexico_mountain',
96: 'mexico_pacific',
97: 'cape_town',
98: 'winkhoek',
99: 'lagos',
100: 'riyahd',
101: 'venezuela',
102: 'australia_lh',
103: 'santiago',
253: 'manual',
254: 'automatic',
},
),
'timer_trigger': FieldType( # timer event data
name='timer_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto',
2: 'fitness_equipment',
},
),
'turn_type': FieldType(
name='turn_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'arriving_idx',
1: 'arriving_left_idx',
2: 'arriving_right_idx',
3: 'arriving_via_idx',
4: 'arriving_via_left_idx',
5: 'arriving_via_right_idx',
6: 'bear_keep_left_idx',
7: 'bear_keep_right_idx',
8: 'continue_idx',
9: 'exit_left_idx',
10: 'exit_right_idx',
11: 'ferry_idx',
12: 'roundabout_45_idx',
13: 'roundabout_90_idx',
14: 'roundabout_135_idx',
15: 'roundabout_180_idx',
16: 'roundabout_225_idx',
17: 'roundabout_270_idx',
18: 'roundabout_315_idx',
19: 'roundabout_360_idx',
20: 'roundabout_neg_45_idx',
21: 'roundabout_neg_90_idx',
22: 'roundabout_neg_135_idx',
23: 'roundabout_neg_180_idx',
24: 'roundabout_neg_225_idx',
25: 'roundabout_neg_270_idx',
26: 'roundabout_neg_315_idx',
27: 'roundabout_neg_360_idx',
28: 'roundabout_generic_idx',
29: 'roundabout_neg_generic_idx',
30: 'sharp_turn_left_idx',
31: 'sharp_turn_right_idx',
32: 'turn_left_idx',
33: 'turn_right_idx',
34: 'uturn_left_idx',
35: 'uturn_right_idx',
36: 'icon_inv_idx',
37: 'icon_idx_cnt',
},
),
'user_local_id': FieldType(
name='user_local_id',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0000: 'local_min',
0x000F: 'local_max',
0x0010: 'stationary_min',
0x00FF: 'stationary_max',
0x0100: 'portable_min',
0xFFFE: 'portable_max',
},
),
'watchface_mode': FieldType(
name='watchface_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'digital',
1: 'analog',
2: 'connect_iq',
3: 'disabled',
},
),
'weather_report': FieldType(
name='weather_report',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'current',
1: 'forecast', # Deprecated use hourly_forecast instead
1: 'hourly_forecast',
2: 'daily_forecast',
},
),
'weather_severe_type': FieldType(
name='weather_severe_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unspecified',
1: 'tornado',
2: 'tsunami',
3: 'hurricane',
4: 'extreme_wind',
5: 'typhoon',
6: 'inland_hurricane',
7: 'hurricane_force_wind',
8: 'waterspout',
9: 'severe_thunderstorm',
10: 'wreckhouse_winds',
11: 'les_suetes_wind',
12: 'avalanche',
13: 'flash_flood',
14: 'tropical_storm',
15: 'inland_tropical_storm',
16: 'blizzard',
17: 'ice_storm',
18: 'freezing_rain',
19: 'debris_flow',
20: 'flash_freeze',
21: 'dust_storm',
22: 'high_wind',
23: 'winter_storm',
24: 'heavy_freezing_spray',
25: 'extreme_cold',
26: 'wind_chill',
27: 'cold_wave',
28: 'heavy_snow_alert',
29: 'lake_effect_blowing_snow',
30: 'snow_squall',
31: 'lake_effect_snow',
32: 'winter_weather',
33: 'sleet',
34: 'snowfall',
35: 'snow_and_blowing_snow',
36: 'blowing_snow',
37: 'snow_alert',
38: 'arctic_outflow',
39: 'freezing_drizzle',
40: 'storm',
41: 'storm_surge',
42: 'rainfall',
43: 'areal_flood',
44: 'coastal_flood',
45: 'lakeshore_flood',
46: 'excessive_heat',
47: 'heat',
48: 'weather',
49: 'high_heat_and_humidity',
50: 'humidex_and_health',
51: 'humidex',
52: 'gale',
53: 'freezing_spray',
54: 'special_marine',
55: 'squall',
56: 'strong_wind',
57: 'lake_wind',
58: 'marine_weather',
59: 'wind',
60: 'small_craft_hazardous_seas',
61: 'hazardous_seas',
62: 'small_craft',
63: 'small_craft_winds',
64: 'small_craft_rough_bar',
65: 'high_water_level',
66: 'ashfall',
67: 'freezing_fog',
68: 'dense_fog',
69: 'dense_smoke',
70: 'blowing_dust',
71: 'hard_freeze',
72: 'freeze',
73: 'frost',
74: 'fire_weather',
75: 'flood',
76: 'rip_tide',
77: 'high_surf',
78: 'smog',
79: 'air_quality',
80: 'brisk_wind',
81: 'air_stagnation',
82: 'low_water',
83: 'hydrological',
84: 'special_weather',
},
),
'weather_severity': FieldType(
name='weather_severity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unknown',
1: 'warning',
2: 'watch',
3: 'advisory',
4: 'statement',
},
),
'weather_status': FieldType(
name='weather_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'clear',
1: 'partly_cloudy',
2: 'mostly_cloudy',
3: 'rain',
4: 'snow',
5: 'windy',
6: 'thunderstorms',
7: 'wintry_mix',
8: 'fog',
11: 'hazy',
12: 'hail',
13: 'scattered_showers',
14: 'scattered_thunderstorms',
15: 'unknown_precipitation',
16: 'light_rain',
17: 'heavy_rain',
18: 'light_snow',
19: 'heavy_snow',
20: 'light_rain_snow',
21: 'heavy_rain_snow',
22: 'cloudy',
},
),
'weight': FieldType(
name='weight',
base_type=BASE_TYPES[0x84], # uint16
values={
0xFFFE: 'calculating',
},
),
'wkt_step_duration': FieldType(
name='wkt_step_duration',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'hr_less_than',
3: 'hr_greater_than',
4: 'calories',
5: 'open',
6: 'repeat_until_steps_cmplt',
7: 'repeat_until_time',
8: 'repeat_until_distance',
9: 'repeat_until_calories',
10: 'repeat_until_hr_less_than',
11: 'repeat_until_hr_greater_than',
12: 'repeat_until_power_less_than',
13: 'repeat_until_power_greater_than',
14: 'power_less_than',
15: 'power_greater_than',
16: 'training_peaks_tss',
17: 'repeat_until_power_last_lap_less_than',
18: 'repeat_until_max_power_last_lap_less_than',
19: 'power_3s_less_than',
20: 'power_10s_less_than',
21: 'power_30s_less_than',
22: 'power_3s_greater_than',
23: 'power_10s_greater_than',
24: 'power_30s_greater_than',
25: 'power_lap_less_than',
26: 'power_lap_greater_than',
27: 'repeat_until_training_peaks_tss',
28: 'repetition_time',
},
),
'wkt_step_target': FieldType(
name='wkt_step_target',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'speed',
1: 'heart_rate',
2: 'open',
3: 'cadence',
4: 'power',
5: 'grade',
6: 'resistance',
7: 'power_3s',
8: 'power_10s',
9: 'power_30s',
10: 'power_lap',
11: 'swim_stroke',
12: 'speed_lap',
13: 'heart_rate_lap',
},
),
'workout_capabilities': FieldType(
name='workout_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'interval',
0x00000002: 'custom',
0x00000004: 'fitness_equipment',
0x00000008: 'firstbeat',
0x00000010: 'new_leaf',
0x00000020: 'tcx', # For backwards compatibility. Watch should add missing id fields then clear flag.
0x00000080: 'speed', # Speed source required for workout step.
0x00000100: 'heart_rate', # Heart rate source required for workout step.
0x00000200: 'distance', # Distance source required for workout step.
0x00000400: 'cadence', # Cadence source required for workout step.
0x00000800: 'power', # Power source required for workout step.
0x00001000: 'grade', # Grade source required for workout step.
0x00002000: 'resistance', # Resistance source required for workout step.
0x00004000: 'protected',
},
),
'workout_equipment': FieldType(
name='workout_equipment',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'swim_fins',
2: 'swim_kickboard',
3: 'swim_paddles',
4: 'swim_pull_buoy',
5: 'swim_snorkel',
},
),
'workout_hr': FieldType( # 0 - 100 indicates% of max hr; >100 indicates bpm (255 max) plus 100
name='workout_hr',
base_type=BASE_TYPES[0x86], # uint32
values={
100: 'bpm_offset',
},
),
'workout_power': FieldType( # 0 - 1000 indicates % of functional threshold power; >1000 indicates watts plus 1000.
name='workout_power',
base_type=BASE_TYPES[0x86], # uint32
values={
1000: 'watts_offset',
},
),
}
FIELD_TYPE_TIMESTAMP = Field(name='timestamp', type=FIELD_TYPES['date_time'], def_num=253, units='s')
MESSAGE_TYPES = {
############################ Common Messages #############################
0: MessageType( # Must be first message in file.
name='file_id',
mesg_num=0,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field( # Only set for files that are can be created/erased.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
5: Field( # Only set for files that are not created/erased.
name='number',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
8: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
},
),
#################################### ####################################
1: MessageType(
name='capabilities',
mesg_num=1,
fields={
0: Field( # Use language_bits_x types where x is index of array.
name='languages',
type=BASE_TYPES[0x0A], # uint8z
def_num=0,
),
1: Field( # Use sport_bits_x types where x is index of array.
name='sports',
type=FIELD_TYPES['sport_bits_0'],
def_num=1,
),
21: Field(
name='workouts_supported',
type=FIELD_TYPES['workout_capabilities'],
def_num=21,
),
23: Field(
name='connectivity_supported',
type=FIELD_TYPES['connectivity_capabilities'],
def_num=23,
),
},
),
3: MessageType(
name='user_profile',
mesg_num=3,
fields={
0: Field(
name='friendly_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='gender',
type=FIELD_TYPES['gender'],
def_num=1,
),
2: Field(
name='age',
type=BASE_TYPES[0x02], # uint8
def_num=2,
units='years',
),
3: Field(
name='height',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=100,
units='m',
),
4: Field(
name='weight',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=10,
units='kg',
),
5: Field(
name='language',
type=FIELD_TYPES['language'],
def_num=5,
),
6: Field(
name='elev_setting',
type=FIELD_TYPES['display_measure'],
def_num=6,
),
7: Field(
name='weight_setting',
type=FIELD_TYPES['display_measure'],
def_num=7,
),
8: Field(
name='resting_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='bpm',
),
9: Field(
name='default_max_running_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='bpm',
),
10: Field(
name='default_max_biking_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='bpm',
),
11: Field(
name='default_max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=11,
units='bpm',
),
12: Field(
name='hr_setting',
type=FIELD_TYPES['display_heart'],
def_num=12,
),
13: Field(
name='speed_setting',
type=FIELD_TYPES['display_measure'],
def_num=13,
),
14: Field(
name='dist_setting',
type=FIELD_TYPES['display_measure'],
def_num=14,
),
16: Field(
name='power_setting',
type=FIELD_TYPES['display_power'],
def_num=16,
),
17: Field(
name='activity_class',
type=FIELD_TYPES['activity_class'],
def_num=17,
),
18: Field(
name='position_setting',
type=FIELD_TYPES['display_position'],
def_num=18,
),
21: Field(
name='temperature_setting',
type=FIELD_TYPES['display_measure'],
def_num=21,
),
22: Field(
name='local_id',
type=FIELD_TYPES['user_local_id'],
def_num=22,
),
23: Field(
name='global_id',
type=BASE_TYPES[0x0D], # byte
def_num=23,
),
28: Field( # Typical wake time
name='wake_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=28,
),
29: Field( # Typical bed time
name='sleep_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=29,
),
30: Field(
name='height_setting',
type=FIELD_TYPES['display_measure'],
def_num=30,
),
31: Field( # User defined running step length set to 0 for auto length
name='user_running_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=31,
scale=1000,
units='m',
),
32: Field( # User defined walking step length set to 0 for auto length
name='user_walking_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=32,
scale=1000,
units='m',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
4: MessageType(
name='hrm_profile',
mesg_num=4,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='hrm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='log_hrv',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='hrm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
5: MessageType(
name='sdm_profile',
mesg_num=5,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='sdm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='sdm_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='%',
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Use footpod for speed source instead of GPS
name='speed_source',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='sdm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=5,
),
7: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
6: MessageType(
name='bike_profile',
mesg_num=6,
fields={
0: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=1,
),
2: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=2,
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field(
name='bike_spd_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=4,
),
5: Field(
name='bike_cad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=5,
),
6: Field(
name='bike_spdcad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=6,
),
7: Field(
name='bike_power_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=7,
),
8: Field(
name='custom_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=8,
scale=1000,
units='m',
),
9: Field(
name='auto_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=1000,
units='m',
),
10: Field(
name='bike_weight',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=10,
units='kg',
),
11: Field(
name='power_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=11,
scale=10,
units='%',
),
12: Field(
name='auto_wheel_cal',
type=FIELD_TYPES['bool'],
def_num=12,
),
13: Field(
name='auto_power_zero',
type=FIELD_TYPES['bool'],
def_num=13,
),
14: Field(
name='id',
type=BASE_TYPES[0x02], # uint8
def_num=14,
),
15: Field(
name='spd_enabled',
type=FIELD_TYPES['bool'],
def_num=15,
),
16: Field(
name='cad_enabled',
type=FIELD_TYPES['bool'],
def_num=16,
),
17: Field(
name='spdcad_enabled',
type=FIELD_TYPES['bool'],
def_num=17,
),
18: Field(
name='power_enabled',
type=FIELD_TYPES['bool'],
def_num=18,
),
19: Field(
name='crank_length',
type=BASE_TYPES[0x02], # uint8
def_num=19,
scale=2,
offset=-110,
units='mm',
),
20: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=20,
),
21: Field(
name='bike_spd_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=21,
),
22: Field(
name='bike_cad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=22,
),
23: Field(
name='bike_spdcad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=23,
),
24: Field(
name='bike_power_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=24,
),
37: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=37,
),
38: Field( # Number of front gears
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=38,
),
39: Field( # Number of teeth on each gear 0 is innermost
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=39,
),
40: Field( # Number of rear gears
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=40,
),
41: Field( # Number of teeth on each gear 0 is innermost
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=41,
),
44: Field(
name='shimano_di2_enabled',
type=FIELD_TYPES['bool'],
def_num=44,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
8: MessageType(
name='hr_zone',
mesg_num=8,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
units='bpm',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
9: MessageType(
name='power_zone',
mesg_num=9,
fields={
1: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='watts',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
10: MessageType(
name='met_zone',
mesg_num=10,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='kcal/min',
),
3: Field(
name='fat_calories',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=10,
units='kcal/min',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
12: MessageType(
name='sport',
mesg_num=12,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
},
),
18: MessageType(
name='session',
mesg_num=18,
fields={
0: Field( # session
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field( # stop
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=5,
),
6: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=6,
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=5,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
13: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=13,
units='kcal',
),
14: Field( # total_distance / total_timer_time
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=124,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=15,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=125,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
16: Field( # average heart rate (excludes pause time)
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='bpm',
),
18: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
19: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=19,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=19,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
20: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='watts',
),
22: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=23,
units='m',
),
24: Field(
name='total_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=24,
scale=10,
),
25: Field(
name='first_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=25,
),
26: Field(
name='num_laps',
type=BASE_TYPES[0x84], # uint16
def_num=26,
),
27: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=27,
),
28: Field(
name='trigger',
type=FIELD_TYPES['session_trigger'],
def_num=28,
),
29: Field(
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=29,
units='semicircles',
),
30: Field(
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=30,
units='semicircles',
),
31: Field(
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=31,
units='semicircles',
),
32: Field(
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=32,
units='semicircles',
),
34: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='watts',
),
35: Field(
name='training_stress_score',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=10,
units='tss',
),
36: Field(
name='intensity_factor',
type=BASE_TYPES[0x84], # uint16
def_num=36,
scale=1000,
units='if',
),
37: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=37,
),
41: Field(
name='avg_stroke_count',
type=BASE_TYPES[0x86], # uint32
def_num=41,
scale=10,
units='strokes/lap',
),
42: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=100,
units='m',
),
43: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=43,
units='swim_stroke',
),
44: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=44,
scale=100,
units='m',
),
45: Field(
name='threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=45,
units='watts',
),
46: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=46,
),
47: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=47,
units='lengths',
),
48: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=48,
units='J',
),
49: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=49,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=126,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
50: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=50,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=128,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
51: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=51,
units='m',
),
52: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=52,
scale=100,
units='%',
),
53: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=100,
units='%',
),
54: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=100,
units='%',
),
55: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=100,
units='%',
),
56: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=100,
units='%',
),
57: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=57,
units='C',
),
58: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=58,
units='C',
),
59: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=60,
scale=1000,
units='m/s',
),
61: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=61,
scale=1000,
units='m/s',
),
62: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=62,
scale=1000,
units='m/s',
),
63: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=63,
scale=1000,
units='m/s',
),
64: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=64,
units='bpm',
),
65: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=65,
scale=1000,
units='s',
),
66: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=66,
scale=1000,
units='s',
),
67: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=67,
scale=1000,
units='s',
),
68: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=68,
scale=1000,
units='s',
),
69: Field(
name='avg_lap_time',
type=BASE_TYPES[0x86], # uint32
def_num=69,
scale=1000,
units='s',
),
70: Field(
name='best_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=71,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=127,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
82: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=82,
),
83: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field(
name='opponent_name',
type=BASE_TYPES[0x07], # string
def_num=84,
),
85: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=85,
units='counts',
),
86: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=86,
units='counts',
),
87: Field(
name='max_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=100,
units='m/s',
),
88: Field(
name='avg_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=100,
units='m/s',
),
89: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='mm',
),
90: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=90,
scale=100,
units='percent',
),
91: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=91,
scale=10,
units='ms',
),
92: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=128,
units='rpm',
),
93: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=128,
units='rpm',
),
94: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=128,
units='cycles',
),
95: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=95,
scale=100,
units='g/dL',
),
96: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=96,
scale=100,
units='g/dL',
),
97: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=97,
scale=100,
units='g/dL',
),
98: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=98,
scale=10,
units='%',
),
99: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=99,
scale=10,
units='%',
),
100: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=100,
scale=10,
units='%',
),
101: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=101,
scale=2,
units='percent',
),
102: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=2,
units='percent',
),
103: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=2,
units='percent',
),
104: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=2,
units='percent',
),
105: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=2,
units='percent',
),
111: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=111,
),
112: Field( # Total time spend in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=1000,
units='s',
),
113: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=113,
),
114: Field( # Average platform center offset Left
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=114,
units='mm',
),
115: Field( # Average platform center offset Right
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=115,
units='mm',
),
116: Field( # Average left power phase angles. Indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=116,
scale=0.7111111,
units='degrees',
),
117: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=0.7111111,
units='degrees',
),
118: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=118,
scale=0.7111111,
units='degrees',
),
119: Field( # Average right power phase peak angles data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=119,
scale=0.7111111,
units='degrees',
),
120: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=120,
units='watts',
),
121: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=121,
units='watts',
),
122: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=122,
units='rpm',
),
123: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=123,
units='rpm',
),
124: Field( # total_distance / total_timer_time
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=124,
scale=1000,
units='m/s',
),
125: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=125,
scale=1000,
units='m/s',
),
126: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=126,
scale=5,
offset=500,
units='m',
),
127: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=127,
scale=5,
offset=500,
units='m',
),
128: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=128,
scale=5,
offset=500,
units='m',
),
129: Field( # lev average motor power during session
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=129,
units='watts',
),
130: Field( # lev maximum motor power during session
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=130,
units='watts',
),
131: Field( # lev battery consumption during session
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=131,
scale=2,
units='percent',
),
132: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=132,
scale=100,
units='percent',
),
133: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=133,
scale=100,
units='percent',
),
134: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=134,
scale=10,
units='mm',
),
137: Field(
name='total_anaerobic_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=137,
scale=10,
),
139: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=139,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Sesson end time.
254: Field( # Selected bit is set for the current session.
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
19: MessageType(
name='lap',
mesg_num=19,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=25,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=110,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=111,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=17,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=23,
),
24: Field(
name='lap_trigger',
type=FIELD_TYPES['lap_trigger'],
def_num=24,
),
25: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=25,
),
26: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=26,
),
32: Field( # # of lengths of swim pool
name='num_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=32,
units='lengths',
),
33: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='watts',
),
34: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=34,
),
35: Field(
name='first_length_index',
type=BASE_TYPES[0x84], # uint16
def_num=35,
),
37: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=37,
scale=100,
units='m',
),
38: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=38,
),
39: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=39,
),
40: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=40,
units='lengths',
),
41: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=41,
units='J',
),
42: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=112,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
43: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=43,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=114,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
44: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=44,
units='m',
),
45: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=100,
units='%',
),
46: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=100,
units='%',
),
47: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=100,
units='%',
),
48: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=100,
units='%',
),
49: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=49,
scale=100,
units='%',
),
50: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=50,
units='C',
),
51: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=51,
units='C',
),
52: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=1000,
units='m/s',
),
54: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=1000,
units='m/s',
),
55: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=1000,
units='m/s',
),
56: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=1000,
units='m/s',
),
57: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=57,
scale=1000,
units='s',
),
58: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=58,
scale=1000,
units='s',
),
59: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=60,
scale=1000,
units='s',
),
61: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=61,
),
62: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=62,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=113,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
63: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=63,
units='bpm',
),
71: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=71,
),
74: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=74,
),
75: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=75,
units='counts',
),
76: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=76,
units='counts',
),
77: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=77,
scale=10,
units='mm',
),
78: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=78,
scale=100,
units='percent',
),
79: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=79,
scale=10,
units='ms',
),
80: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=80,
scale=128,
units='rpm',
),
81: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=128,
units='rpm',
),
82: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=82,
scale=128,
units='cycles',
),
83: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='g/dL',
),
85: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=100,
units='g/dL',
),
86: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=86,
scale=100,
units='g/dL',
),
87: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=10,
units='%',
),
88: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=10,
units='%',
),
89: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='%',
),
91: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=91,
scale=2,
units='percent',
),
92: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=2,
units='percent',
),
93: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=2,
units='percent',
),
94: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=2,
units='percent',
),
95: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=95,
scale=2,
units='percent',
),
98: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=98,
scale=1000,
units='s',
),
99: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=99,
),
100: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=100,
units='mm',
),
101: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=101,
units='mm',
),
102: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=0.7111111,
units='degrees',
),
103: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=0.7111111,
units='degrees',
),
104: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=0.7111111,
units='degrees',
),
105: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=0.7111111,
units='degrees',
),
106: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=106,
units='watts',
),
107: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=107,
units='watts',
),
108: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=108,
units='rpm',
),
109: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=109,
units='rpm',
),
110: Field(
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=110,
scale=1000,
units='m/s',
),
111: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=111,
scale=1000,
units='m/s',
),
112: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=5,
offset=500,
units='m',
),
113: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=113,
scale=5,
offset=500,
units='m',
),
114: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=114,
scale=5,
offset=500,
units='m',
),
115: Field( # lev average motor power during lap
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=115,
units='watts',
),
116: Field( # lev maximum motor power during lap
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=116,
units='watts',
),
117: Field( # lev battery consumption during lap
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=2,
units='percent',
),
118: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=118,
scale=100,
units='percent',
),
119: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=119,
scale=100,
units='percent',
),
120: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=120,
scale=10,
units='mm',
),
121: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=121,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
20: MessageType(
name='record',
mesg_num=20,
fields={
0: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=0,
units='semicircles',
),
1: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_altitude',
def_num=78,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=3,
units='bpm',
),
4: Field(
name='cadence',
type=BASE_TYPES[0x02], # uint8
def_num=4,
units='rpm',
),
5: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=100,
units='m',
),
6: Field(
name='speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_speed',
def_num=73,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
7: Field(
name='power',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='watts',
),
8: Field(
name='compressed_speed_distance',
type=BASE_TYPES[0x0D], # byte
def_num=8,
components=(
ComponentField(
name='speed',
def_num=6,
scale=100,
units='m/s',
accumulate=False,
bits=12,
bit_offset=0,
),
ComponentField(
name='distance',
def_num=5,
scale=16,
units='m',
accumulate=True,
bits=12,
bit_offset=12,
),
),
),
9: Field(
name='grade',
type=BASE_TYPES[0x83], # sint16
def_num=9,
scale=100,
units='%',
),
10: Field( # Relative. 0 is none 254 is Max.
name='resistance',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='time_from_course',
type=BASE_TYPES[0x85], # sint32
def_num=11,
scale=1000,
units='s',
),
12: Field(
name='cycle_length',
type=BASE_TYPES[0x02], # uint8
def_num=12,
scale=100,
units='m',
),
13: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
17: Field( # Speed at 1s intervals. Timestamp field indicates time of last array element.
name='speed_1s',
type=BASE_TYPES[0x02], # uint8
def_num=17,
scale=16,
units='m/s',
),
18: Field(
name='cycles',
type=BASE_TYPES[0x02], # uint8
def_num=18,
components=(
ComponentField(
name='total_cycles',
def_num=19,
units='cycles',
accumulate=True,
bits=8,
bit_offset=0,
),
),
),
19: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=19,
units='cycles',
),
28: Field(
name='compressed_accumulated_power',
type=BASE_TYPES[0x84], # uint16
def_num=28,
components=(
ComponentField(
name='accumulated_power',
def_num=29,
units='watts',
accumulate=True,
bits=16,
bit_offset=0,
),
),
),
29: Field(
name='accumulated_power',
type=BASE_TYPES[0x86], # uint32
def_num=29,
units='watts',
),
30: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance'],
def_num=30,
),
31: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=31,
units='m',
),
32: Field(
name='vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=32,
scale=1000,
units='m/s',
),
33: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='kcal',
),
39: Field(
name='vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=39,
scale=10,
units='mm',
),
40: Field(
name='stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=40,
scale=100,
units='percent',
),
41: Field(
name='stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=41,
scale=10,
units='ms',
),
42: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=42,
),
43: Field(
name='left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=43,
scale=2,
units='percent',
),
44: Field(
name='right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=44,
scale=2,
units='percent',
),
45: Field(
name='left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=45,
scale=2,
units='percent',
),
46: Field(
name='right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=46,
scale=2,
units='percent',
),
47: Field(
name='combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=47,
scale=2,
units='percent',
),
48: Field(
name='time128',
type=BASE_TYPES[0x02], # uint8
def_num=48,
scale=128,
units='s',
),
49: Field(
name='stroke_type',
type=FIELD_TYPES['stroke_type'],
def_num=49,
),
50: Field(
name='zone',
type=BASE_TYPES[0x02], # uint8
def_num=50,
),
51: Field(
name='ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=51,
scale=100,
units='m/s',
),
52: Field( # Log cadence and fractional cadence for backwards compatability
name='cadence256',
type=BASE_TYPES[0x84], # uint16
def_num=52,
scale=256,
units='rpm',
),
53: Field(
name='fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=53,
scale=128,
units='rpm',
),
54: Field( # Total saturated and unsaturated hemoglobin
name='total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=100,
units='g/dL',
),
55: Field( # Min saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_min',
type=BASE_TYPES[0x84], # uint16
def_num=55,
scale=100,
units='g/dL',
),
56: Field( # Max saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_max',
type=BASE_TYPES[0x84], # uint16
def_num=56,
scale=100,
units='g/dL',
),
57: Field( # Percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=57,
scale=10,
units='%',
),
58: Field( # Min percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_min',
type=BASE_TYPES[0x84], # uint16
def_num=58,
scale=10,
units='%',
),
59: Field( # Max percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_max',
type=BASE_TYPES[0x84], # uint16
def_num=59,
scale=10,
units='%',
),
62: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=62,
),
67: Field( # Left platform center offset
name='left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=67,
units='mm',
),
68: Field( # Right platform center offset
name='right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=68,
units='mm',
),
69: Field( # Left power phase angles. Data value indexes defined by power_phase_type.
name='left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=69,
scale=0.7111111,
units='degrees',
),
70: Field( # Left power phase peak angles. Data value indexes defined by power_phase_type.
name='left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=70,
scale=0.7111111,
units='degrees',
),
71: Field( # Right power phase angles. Data value indexes defined by power_phase_type.
name='right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=71,
scale=0.7111111,
units='degrees',
),
72: Field( # Right power phase peak angles. Data value indexes defined by power_phase_type.
name='right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=72,
scale=0.7111111,
units='degrees',
),
73: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=73,
scale=1000,
units='m/s',
),
78: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=78,
scale=5,
offset=500,
units='m',
),
81: Field( # lev battery state of charge
name='battery_soc',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=2,
units='percent',
),
82: Field( # lev motor power
name='motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=82,
units='watts',
),
83: Field(
name='vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=83,
scale=100,
units='percent',
),
84: Field(
name='stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='percent',
),
85: Field(
name='step_length',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=10,
units='mm',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
21: MessageType(
name='event',
mesg_num=21,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='data16',
type=BASE_TYPES[0x84], # uint16
def_num=2,
components=(
ComponentField(
name='data',
def_num=3,
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='data',
type=BASE_TYPES[0x86], # uint32
def_num=3,
subfields=(
SubField(
name='battery_level',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='V',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='battery',
raw_value=11,
),
),
),
SubField(
name='cad_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_high_alert',
raw_value=17,
),
),
),
SubField(
name='cad_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_low_alert',
raw_value=18,
),
),
),
SubField(
name='calorie_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='calorie_duration_alert',
raw_value=25,
),
),
),
SubField(
name='comm_timeout',
def_num=3,
type=FIELD_TYPES['comm_timeout_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='comm_timeout',
raw_value=47,
),
),
),
SubField(
name='course_point_index',
def_num=3,
type=FIELD_TYPES['message_index'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='course_point',
raw_value=10,
),
),
),
SubField(
name='distance_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='distance_duration_alert',
raw_value=24,
),
),
),
SubField(
name='fitness_equipment_state',
def_num=3,
type=FIELD_TYPES['fitness_equipment_state'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='fitness_equipment',
raw_value=27,
),
),
),
SubField(
name='gear_change_data',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='front_gear_change',
raw_value=42,
),
ReferenceField(
name='event',
def_num=0,
value='rear_gear_change',
raw_value=43,
),
),
components=(
ComponentField(
name='rear_gear_num',
def_num=11,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='rear_gear',
def_num=12,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='front_gear_num',
def_num=9,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='front_gear',
def_num=10,
accumulate=False,
bits=8,
bit_offset=24,
),
),
),
SubField(
name='hr_high_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_high_alert',
raw_value=13,
),
),
),
SubField(
name='hr_low_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_low_alert',
raw_value=14,
),
),
),
SubField(
name='power_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_high_alert',
raw_value=19,
),
),
),
SubField(
name='power_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_low_alert',
raw_value=20,
),
),
),
SubField( # Indicates the rider position value.
name='rider_position',
def_num=3,
type=FIELD_TYPES['rider_position_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='rider_position_change',
raw_value=44,
),
),
),
SubField(
name='speed_high_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_high_alert',
raw_value=15,
),
),
),
SubField(
name='speed_low_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_low_alert',
raw_value=16,
),
),
),
SubField(
name='sport_point',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='sport_point',
raw_value=33,
),
),
components=(
ComponentField(
name='score',
def_num=7,
accumulate=False,
bits=16,
bit_offset=0,
),
ComponentField(
name='opponent_score',
def_num=8,
accumulate=False,
bits=16,
bit_offset=16,
),
),
),
SubField(
name='time_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='time_duration_alert',
raw_value=23,
),
),
),
SubField(
name='timer_trigger',
def_num=3,
type=FIELD_TYPES['timer_trigger'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='timer',
raw_value=0,
),
),
),
SubField(
name='virtual_partner_speed',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='virtual_partner_pace',
raw_value=12,
),
),
),
),
),
4: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
7: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='score',
type=BASE_TYPES[0x84], # uint16
def_num=7,
),
8: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=8,
),
9: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Front gear number. 1 is innermost.
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=9,
),
10: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of front teeth.
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=10,
),
11: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Rear gear number. 1 is innermost.
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=11,
),
12: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of rear teeth.
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=12,
),
13: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=13,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
23: MessageType(
name='device_info',
mesg_num=23,
fields={
0: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field(
name='device_type',
type=FIELD_TYPES['antplus_device_type'], # uint8
def_num=1,
subfields=(
SubField(
name='ant_device_type',
def_num=1,
type=BASE_TYPES[0x02], # uint8
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='ant',
raw_value=0,
),
),
),
SubField(
name='antplus_device_type',
def_num=1,
type=FIELD_TYPES['antplus_device_type'],
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='antplus',
raw_value=1,
),
),
),
),
),
2: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=4,
subfields=(
SubField(
name='garmin_product',
def_num=4,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=2,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
5: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
),
6: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Reset by new battery or charge.
name='cum_operating_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='s',
),
10: Field(
name='battery_voltage',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=256,
units='V',
),
11: Field(
name='battery_status',
type=FIELD_TYPES['battery_status'],
def_num=11,
),
18: Field( # Indicates the location of the sensor
name='sensor_position',
type=FIELD_TYPES['body_location'],
def_num=18,
),
19: Field( # Used to describe the sensor or location
name='descriptor',
type=BASE_TYPES[0x07], # string
def_num=19,
),
20: Field(
name='ant_transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=20,
),
21: Field(
name='ant_device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=21,
),
22: Field(
name='ant_network',
type=FIELD_TYPES['ant_network'],
def_num=22,
),
25: Field(
name='source_type',
type=FIELD_TYPES['source_type'],
def_num=25,
),
27: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=27,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
27: MessageType(
name='workout_step',
mesg_num=27,
fields={
0: Field(
name='wkt_step_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='duration_type',
type=FIELD_TYPES['wkt_step_duration'],
def_num=1,
),
2: Field(
name='duration_value',
type=BASE_TYPES[0x86], # uint32
def_num=2,
subfields=(
SubField(
name='duration_calories',
def_num=2,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='calories',
raw_value=4,
),
),
),
SubField(
name='duration_distance',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='distance',
raw_value=1,
),
),
),
SubField(
name='duration_hr',
def_num=2,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='hr_less_than',
raw_value=2,
),
ReferenceField(
name='duration_type',
def_num=1,
value='hr_greater_than',
raw_value=3,
),
),
),
SubField(
name='duration_power',
def_num=2,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='power_less_than',
raw_value=14,
),
ReferenceField(
name='duration_type',
def_num=1,
value='power_greater_than',
raw_value=15,
),
),
),
SubField( # message_index of step to loop back to. Steps are assumed to be in the order by message_index. custom_name and intensity members are undefined for this duration type.
name='duration_step',
def_num=2,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField(
name='duration_time',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='time',
raw_value=0,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repetition_time',
raw_value=28,
),
),
),
),
),
3: Field(
name='target_type',
type=FIELD_TYPES['wkt_step_target'],
def_num=3,
),
4: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=4,
subfields=(
SubField(
name='repeat_calories',
def_num=4,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
),
),
SubField(
name='repeat_distance',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
),
),
SubField(
name='repeat_hr',
def_num=4,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
),
),
SubField(
name='repeat_power',
def_num=4,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField( # # of repetitions
name='repeat_steps',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
),
),
SubField(
name='repeat_time',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
),
),
SubField( # Zone (1-?); Custom = 0;
name='target_cadence_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField( # hr zone (1-5);Custom =0;
name='target_hr_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField( # Power Zone ( 1-7); Custom = 0;
name='target_power_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField( # speed zone (1-10);Custom =0;
name='target_speed_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
SubField(
name='target_stroke_type',
def_num=4,
type=FIELD_TYPES['swim_stroke'],
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='swim_stroke',
raw_value=11,
),
),
),
),
),
5: Field(
name='custom_target_value_low',
type=BASE_TYPES[0x86], # uint32
def_num=5,
subfields=(
SubField(
name='custom_target_cadence_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_low',
def_num=5,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_low',
def_num=5,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
6: Field(
name='custom_target_value_high',
type=BASE_TYPES[0x86], # uint32
def_num=6,
subfields=(
SubField(
name='custom_target_cadence_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_high',
def_num=6,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_high',
def_num=6,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
7: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=7,
),
8: Field(
name='notes',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='equipment',
type=FIELD_TYPES['workout_equipment'],
def_num=9,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
32: MessageType(
name='course_point',
mesg_num=32,
fields={
1: Field(
name='timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=100,
units='m',
),
5: Field(
name='type',
type=FIELD_TYPES['course_point'],
def_num=5,
),
6: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=6,
),
8: Field(
name='favorite',
type=FIELD_TYPES['bool'],
def_num=8,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
37: MessageType(
name='file_capabilities',
mesg_num=37,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='flags',
type=FIELD_TYPES['file_flags'],
def_num=1,
),
2: Field(
name='directory',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='max_count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='max_size',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='bytes',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
38: MessageType(
name='mesg_capabilities',
mesg_num=38,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='count_type',
type=FIELD_TYPES['mesg_count'],
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
subfields=(
SubField(
name='max_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file',
raw_value=1,
),
),
),
SubField(
name='max_per_file_type',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file_type',
raw_value=2,
),
),
),
SubField(
name='num_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='num_per_file',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
39: MessageType(
name='field_capabilities',
mesg_num=39,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='field_num',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
49: MessageType(
name='file_creator',
mesg_num=49,
fields={
0: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
},
),
53: MessageType(
name='speed_zone',
mesg_num=53,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='m/s',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
55: MessageType(
name='monitoring',
mesg_num=55,
fields={
0: Field( # Associates this data to device_info message. Not required for file with single device (sensor).
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field( # Accumulated total calories. Maintained by MonitoringReader for each activity_type. See SDK documentation
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='kcal',
),
2: Field( # Accumulated distance. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=2,
scale=100,
units='m',
),
3: Field( # Accumulated cycles. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='cycles',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=2,
units='cycles',
subfields=(
SubField(
name='steps',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='steps',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='walking',
raw_value=6,
),
ReferenceField(
name='activity_type',
def_num=5,
value='running',
raw_value=1,
),
),
),
SubField(
name='strokes',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=2,
units='strokes',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='cycling',
raw_value=2,
),
ReferenceField(
name='activity_type',
def_num=5,
value='swimming',
raw_value=5,
),
),
),
),
),
4: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=5,
),
6: Field(
name='activity_subtype',
type=FIELD_TYPES['activity_subtype'],
def_num=6,
),
7: Field(
name='activity_level',
type=FIELD_TYPES['activity_level'],
def_num=7,
),
8: Field(
name='distance_16',
type=BASE_TYPES[0x84], # uint16
def_num=8,
units='100*m',
),
9: Field(
name='cycles_16',
type=BASE_TYPES[0x84], # uint16
def_num=9,
units='2*cycles or steps',
),
10: Field(
name='active_time_16',
type=BASE_TYPES[0x84], # uint16
def_num=10,
units='s',
),
11: Field( # Must align to logging interval, for example, time must be 00:00:00 for daily log.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=11,
),
12: Field( # Avg temperature during the logging interval ended at timestamp
name='temperature',
type=BASE_TYPES[0x83], # sint16
def_num=12,
scale=100,
units='C',
),
14: Field( # Min temperature during the logging interval ended at timestamp
name='temperature_min',
type=BASE_TYPES[0x83], # sint16
def_num=14,
scale=100,
units='C',
),
15: Field( # Max temperature during the logging interval ended at timestamp
name='temperature_max',
type=BASE_TYPES[0x83], # sint16
def_num=15,
scale=100,
units='C',
),
16: Field( # Indexed using minute_activity_level enum
name='activity_time',
type=BASE_TYPES[0x84], # uint16
def_num=16,
units='minutes',
),
19: Field(
name='active_calories',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='kcal',
),
24: Field( # Indicates single type / intensity for duration since last monitoring message.
name='current_activity_type_intensity',
type=BASE_TYPES[0x0D], # byte
def_num=24,
components=(
ComponentField(
name='activity_type',
def_num=5,
accumulate=False,
bits=5,
bit_offset=0,
),
ComponentField(
name='intensity',
def_num=28,
accumulate=False,
bits=3,
bit_offset=5,
),
),
),
25: Field(
name='timestamp_min_8',
type=BASE_TYPES[0x02], # uint8
def_num=25,
units='min',
),
26: Field(
name='timestamp_16',
type=BASE_TYPES[0x84], # uint16
def_num=26,
units='s',
),
27: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=27,
units='bpm',
),
28: Field(
name='intensity',
type=BASE_TYPES[0x02], # uint8
def_num=28,
scale=10,
),
29: Field(
name='duration_min',
type=BASE_TYPES[0x84], # uint16
def_num=29,
units='min',
),
30: Field(
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=30,
units='s',
),
31: Field(
name='ascent',
type=BASE_TYPES[0x86], # uint32
def_num=31,
scale=1000,
units='m',
),
32: Field(
name='descent',
type=BASE_TYPES[0x86], # uint32
def_num=32,
scale=1000,
units='m',
),
33: Field(
name='moderate_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='minutes',
),
34: Field(
name='vigorous_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='minutes',
),
253: FIELD_TYPE_TIMESTAMP, # Must align to logging interval, for example, time must be 00:00:00 for daily log.
},
),
72: MessageType( # Corresponds to file_id of workout or course.
name='training_file',
mesg_num=72,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
78: MessageType( # Heart rate variability
name='hrv',
mesg_num=78,
fields={
0: Field( # Time between beats
name='time',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='s',
),
},
),
80: MessageType(
name='ant_rx',
mesg_num=80,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
81: MessageType(
name='ant_tx',
mesg_num=81,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
82: MessageType(
name='ant_channel_id',
mesg_num=82,
fields={
0: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='device_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=1,
),
2: Field(
name='device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=2,
),
3: Field(
name='transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
4: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=4,
),
},
),
101: MessageType(
name='length',
mesg_num=101,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=1000,
units='s',
),
4: Field(
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='total_strokes',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='strokes',
),
6: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
),
7: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=7,
units='swim_stroke',
),
9: Field(
name='avg_swimming_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='strokes/min',
),
10: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field(
name='length_type',
type=FIELD_TYPES['length_type'],
def_num=12,
),
18: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=18,
),
19: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=19,
),
20: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='counts',
),
21: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='counts',
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
106: MessageType(
name='slave_device',
mesg_num=106,
fields={
0: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
},
),
127: MessageType(
name='connectivity',
mesg_num=127,
fields={
0: Field( # Use Bluetooth for connectivity features
name='bluetooth_enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field( # Use Bluetooth Low Energy for connectivity features
name='bluetooth_le_enabled',
type=FIELD_TYPES['bool'],
def_num=1,
),
2: Field( # Use ANT for connectivity features
name='ant_enabled',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='live_tracking_enabled',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='weather_conditions_enabled',
type=FIELD_TYPES['bool'],
def_num=5,
),
6: Field(
name='weather_alerts_enabled',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='auto_activity_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=7,
),
8: Field(
name='course_download_enabled',
type=FIELD_TYPES['bool'],
def_num=8,
),
9: Field(
name='workout_download_enabled',
type=FIELD_TYPES['bool'],
def_num=9,
),
10: Field(
name='gps_ephemeris_download_enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='incident_detection_enabled',
type=FIELD_TYPES['bool'],
def_num=11,
),
12: Field(
name='grouptrack_enabled',
type=FIELD_TYPES['bool'],
def_num=12,
),
},
),
128: MessageType(
name='weather_conditions',
mesg_num=128,
fields={
0: Field( # Current or forecast
name='weather_report',
type=FIELD_TYPES['weather_report'],
def_num=0,
),
1: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=1,
units='C',
),
2: Field( # Corresponds to GSC Response weatherIcon field
name='condition',
type=FIELD_TYPES['weather_status'],
def_num=2,
),
3: Field(
name='wind_direction',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='degrees',
),
4: Field(
name='wind_speed',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=1000,
units='m/s',
),
5: Field( # range 0-100
name='precipitation_probability',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field( # Heat Index if GCS heatIdx above or equal to 90F or wind chill if GCS windChill below or equal to 32F
name='temperature_feels_like',
type=BASE_TYPES[0x01], # sint8
def_num=6,
units='C',
),
7: Field(
name='relative_humidity',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
8: Field( # string corresponding to GCS response location string
name='location',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='observed_at_time',
type=FIELD_TYPES['date_time'],
def_num=9,
),
10: Field(
name='observed_location_lat',
type=BASE_TYPES[0x85], # sint32
def_num=10,
units='semicircles',
),
11: Field(
name='observed_location_long',
type=BASE_TYPES[0x85], # sint32
def_num=11,
units='semicircles',
),
12: Field(
name='day_of_week',
type=FIELD_TYPES['day_of_week'],
def_num=12,
),
13: Field(
name='high_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
14: Field(
name='low_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=14,
units='C',
),
253: FIELD_TYPE_TIMESTAMP, # time of update for current conditions, else forecast time
},
),
129: MessageType(
name='weather_alert',
mesg_num=129,
fields={
0: Field( # Unique identifier from GCS report ID string, length is 12
name='report_id',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Time alert was issued
name='issue_time',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field( # Time alert expires
name='expire_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field( # Warning, Watch, Advisory, Statement
name='severity',
type=FIELD_TYPES['weather_severity'],
def_num=3,
),
4: Field( # Tornado, Severe Thunderstorm, etc.
name='type',
type=FIELD_TYPES['weather_severe_type'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
131: MessageType(
name='cadence_zone',
mesg_num=131,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x02], # uint8
def_num=0,
units='rpm',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
132: MessageType(
name='hr',
mesg_num=132,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='time256',
type=BASE_TYPES[0x02], # uint8
def_num=1,
components=(
ComponentField(
name='fractional_timestamp',
def_num=0,
scale=256,
units='s',
accumulate=False,
bits=8,
bit_offset=0,
),
),
),
6: Field(
name='filtered_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
9: Field(
name='event_timestamp',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=1024,
units='s',
),
10: Field(
name='event_timestamp_12',
type=BASE_TYPES[0x0D], # byte
def_num=10,
components=(
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=0,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=12,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=24,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=36,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=48,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=60,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=72,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=84,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=96,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=108,
),
),
),
253: FIELD_TYPE_TIMESTAMP,
},
),
142: MessageType(
name='segment_lap',
mesg_num=142,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strokes',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strokes',
ref_fields=(
ReferenceField(
name='sport',
def_num=23,
value='cycling',
raw_value=2,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=23,
),
24: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=24,
),
25: Field( # North east corner latitude.
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=25,
units='semicircles',
),
26: Field( # North east corner longitude.
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=26,
units='semicircles',
),
27: Field( # South west corner latitude.
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=27,
units='semicircles',
),
28: Field( # South west corner latitude.
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=28,
units='semicircles',
),
29: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=29,
),
30: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=30,
units='watts',
),
31: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=31,
),
32: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=32,
),
33: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=33,
units='J',
),
34: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=34,
scale=5,
offset=500,
units='m',
),
35: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=5,
offset=500,
units='m',
),
36: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=36,
units='m',
),
37: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=37,
scale=100,
units='%',
),
38: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=38,
scale=100,
units='%',
),
39: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=39,
scale=100,
units='%',
),
40: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=40,
scale=100,
units='%',
),
41: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=41,
scale=100,
units='%',
),
42: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=42,
units='C',
),
43: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=43,
units='C',
),
44: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=44,
scale=1000,
units='s',
),
45: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=1000,
units='m/s',
),
46: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=1000,
units='m/s',
),
47: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=1000,
units='m/s',
),
48: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=1000,
units='m/s',
),
49: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=49,
scale=1000,
units='s',
),
50: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=50,
scale=1000,
units='s',
),
51: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=51,
scale=1000,
units='s',
),
52: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=53,
),
54: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=5,
offset=500,
units='m',
),
55: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=55,
units='bpm',
),
56: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=56,
scale=1000,
units='s',
),
57: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=57,
),
58: Field(
name='sport_event',
type=FIELD_TYPES['sport_event'],
def_num=58,
),
59: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=59,
scale=2,
units='percent',
),
60: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=60,
scale=2,
units='percent',
),
61: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=61,
scale=2,
units='percent',
),
62: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=62,
scale=2,
units='percent',
),
63: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=63,
scale=2,
units='percent',
),
64: Field(
name='status',
type=FIELD_TYPES['segment_lap_status'],
def_num=64,
),
65: Field(
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=65,
),
66: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=66,
scale=128,
units='rpm',
),
67: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=67,
scale=128,
units='rpm',
),
68: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=68,
scale=128,
units='cycles',
),
69: Field(
name='front_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=69,
),
70: Field(
name='rear_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=71,
scale=1000,
units='s',
),
72: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=72,
),
73: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=73,
units='mm',
),
74: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=74,
units='mm',
),
75: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=75,
scale=0.7111111,
units='degrees',
),
76: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=76,
scale=0.7111111,
units='degrees',
),
77: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=77,
scale=0.7111111,
units='degrees',
),
78: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=78,
scale=0.7111111,
units='degrees',
),
79: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=79,
units='watts',
),
80: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=80,
units='watts',
),
81: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=81,
units='rpm',
),
82: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=82,
units='rpm',
),
83: Field( # Manufacturer that produced the segment
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=83,
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
149: MessageType( # Unique Identification data for an individual segment leader within a segment file
name='segment_leaderboard_entry',
mesg_num=149,
fields={
0: Field( # Friendly name assigned to leader
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Leader classification
name='type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=1,
),
2: Field( # Primary user ID of this leader
name='group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=2,
),
3: Field( # ID of the activity associated with this leader time
name='activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Segment Time (includes pauses)
name='segment_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field( # String version of the activity_id. 21 characters long, express in decimal
name='activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
150: MessageType( # Navigation and race evaluation point for a segment decribing a point along the segment path and time it took each segment leader to reach that point
name='segment_point',
mesg_num=150,
fields={
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field( # Accumulated distance along the segment at the described point
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Accumulated altitude along the segment at the described point
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5,
offset=500,
units='m',
),
5: Field( # Accumualted time each leader board member required to reach the described point. This value is zero for all leader board members at the starting point of the segment.
name='leader_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=1000,
units='s',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
158: MessageType(
name='workout_session',
mesg_num=158,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='first_step_index',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='m',
),
5: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
159: MessageType(
name='watchface_settings',
mesg_num=159,
fields={
0: Field(
name='mode',
type=FIELD_TYPES['watchface_mode'],
def_num=0,
),
1: Field(
name='layout',
type=BASE_TYPES[0x0D], # byte
def_num=1,
subfields=(
SubField(
name='analog_layout',
def_num=1,
type=FIELD_TYPES['analog_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='analog',
raw_value=1,
),
),
),
SubField(
name='digital_layout',
def_num=1,
type=FIELD_TYPES['digital_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='digital',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
160: MessageType(
name='gps_metadata',
mesg_num=160,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=5,
offset=500,
units='m',
),
4: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='m/s',
),
5: Field(
name='heading',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='degrees',
),
6: Field( # Used to correlate UTC to system time if the timestamp of the message is in system time. This UTC time is derived from the GPS data.
name='utc_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
units='s',
),
7: Field( # velocity[0] is lon velocity. Velocity[1] is lat velocity. Velocity[2] is altitude velocity.
name='velocity',
type=BASE_TYPES[0x83], # sint16
def_num=7,
scale=100,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
161: MessageType(
name='camera_event',
mesg_num=161,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='camera_event_type',
type=FIELD_TYPES['camera_event_type'],
def_num=1,
),
2: Field(
name='camera_file_uuid',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='camera_orientation',
type=FIELD_TYPES['camera_orientation_type'],
def_num=3,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
162: MessageType(
name='timestamp_correlation',
mesg_num=162,
fields={
0: Field( # Fractional part of the UTC timestamp at the time the system timestamp was recorded.
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field( # Whole second part of the system timestamp
name='system_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
units='s',
),
2: Field( # Fractional part of the system timestamp
name='fractional_system_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=32768,
units='s',
),
3: Field( # timestamp epoch expressed in local time used to convert timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=3,
units='s',
),
4: Field( # Millisecond part of the UTC timestamp at the time the system timestamp was recorded.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='ms',
),
5: Field( # Millisecond part of the system timestamp
name='system_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of UTC timestamp at the time the system timestamp was recorded.
},
),
164: MessageType(
name='gyroscope_data',
mesg_num=164,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the gyro sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in gyro_x and gyro_y and gyro_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated gyro reading
name='calibrated_gyro_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='deg/s',
),
6: Field( # Calibrated gyro reading
name='calibrated_gyro_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='deg/s',
),
7: Field( # Calibrated gyro reading
name='calibrated_gyro_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='deg/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
165: MessageType(
name='accelerometer_data',
mesg_num=165,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the accelerometer sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in accel_x and accel_y and accel_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated accel reading
name='calibrated_accel_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='g',
),
6: Field( # Calibrated accel reading
name='calibrated_accel_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='g',
),
7: Field( # Calibrated accel reading
name='calibrated_accel_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='g',
),
8: Field( # Calibrated accel reading
name='compressed_calibrated_accel_x',
type=BASE_TYPES[0x83], # sint16
def_num=8,
units='mG',
),
9: Field( # Calibrated accel reading
name='compressed_calibrated_accel_y',
type=BASE_TYPES[0x83], # sint16
def_num=9,
units='mG',
),
10: Field( # Calibrated accel reading
name='compressed_calibrated_accel_z',
type=BASE_TYPES[0x83], # sint16
def_num=10,
units='mG',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
167: MessageType(
name='three_d_sensor_calibration',
mesg_num=167,
fields={
0: Field( # Indicates which sensor the calibration is for
name='sensor_type',
type=FIELD_TYPES['sensor_type'],
def_num=0,
),
1: Field( # Calibration factor used to convert from raw ADC value to degrees, g, etc.
name='calibration_factor',
type=BASE_TYPES[0x86], # uint32
def_num=1,
subfields=(
SubField( # Accelerometer calibration factor
name='accel_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='g',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='accelerometer',
raw_value=0,
),
),
),
SubField( # Gyro calibration factor
name='gyro_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='deg/s',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='gyroscope',
raw_value=1,
),
),
),
),
),
2: Field( # Calibration factor divisor
name='calibration_divisor',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='counts',
),
3: Field( # Level shift value used to shift the ADC value back into range
name='level_shift',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Internal calibration factors, one for each: xy, yx, zx
name='offset_cal',
type=BASE_TYPES[0x85], # sint32
def_num=4,
),
5: Field( # 3 x 3 rotation matrix (row major)
name='orientation_matrix',
type=BASE_TYPES[0x85], # sint32
def_num=5,
scale=65535,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
169: MessageType(
name='video_frame',
mesg_num=169,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Number of the frame that the timestamp and timestamp_ms correlate to
name='frame_number',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
174: MessageType(
name='obdii_data',
mesg_num=174,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Offset of PID reading [i] from start_timestamp+start_timestamp_ms. Readings may span accross seconds.
name='time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # Parameter ID
name='pid',
type=BASE_TYPES[0x0D], # byte
def_num=2,
),
3: Field( # Raw parameter data
name='raw_data',
type=BASE_TYPES[0x0D], # byte
def_num=3,
),
4: Field( # Optional, data size of PID[i]. If not specified refer to SAE J1979.
name='pid_data_size',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field( # System time associated with sample expressed in ms, can be used instead of time_offset. There will be a system_time value for each raw_data element. For multibyte pids the system_time is repeated.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Timestamp of first sample recorded in the message. Used with time_offset to generate time of each sample
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
),
7: Field( # Fractional part of start_timestamp
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
177: MessageType(
name='nmea_sentence',
mesg_num=177,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # NMEA sentence
name='sentence',
type=BASE_TYPES[0x07], # string
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
178: MessageType(
name='aviation_attitude',
mesg_num=178,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # System time associated with sample expressed in ms.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='ms',
),
2: Field( # Range -PI/2 to +PI/2
name='pitch',
type=BASE_TYPES[0x83], # sint16
def_num=2,
scale=10430.38,
units='radians',
),
3: Field( # Range -PI to +PI
name='roll',
type=BASE_TYPES[0x83], # sint16
def_num=3,
scale=10430.38,
units='radians',
),
4: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_lateral',
type=BASE_TYPES[0x83], # sint16
def_num=4,
scale=100,
units='m/s^2',
),
5: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_normal',
type=BASE_TYPES[0x83], # sint16
def_num=5,
scale=100,
units='m/s^2',
),
6: Field( # Range -8.727 to +8.727 (-500 degs/sec to +500 degs/sec)
name='turn_rate',
type=BASE_TYPES[0x83], # sint16
def_num=6,
scale=1024,
units='radians/second',
),
7: Field(
name='stage',
type=FIELD_TYPES['attitude_stage'],
def_num=7,
),
8: Field( # The percent complete of the current attitude stage. Set to 0 for attitude stages 0, 1 and 2 and to 100 for attitude stage 3 by AHRS modules that do not support it. Range - 100
name='attitude_stage_complete',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='%',
),
9: Field( # Track Angle/Heading Range 0 - 2pi
name='track',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=10430.38,
units='radians',
),
10: Field(
name='validity',
type=FIELD_TYPES['attitude_validity'],
def_num=10,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
184: MessageType(
name='video',
mesg_num=184,
fields={
0: Field(
name='url',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='hosting_provider',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Playback time of video
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='ms',
),
},
),
185: MessageType(
name='video_title',
mesg_num=185,
fields={
0: Field( # Total number of title parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long titles will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
186: MessageType(
name='video_description',
mesg_num=186,
fields={
0: Field( # Total number of description parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long descriptions will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
187: MessageType(
name='video_clip',
mesg_num=187,
fields={
0: Field(
name='clip_number',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='end_timestamp',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='end_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
),
6: Field( # Start of clip in video time
name='clip_start',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='ms',
),
7: Field( # End of clip in video time
name='clip_end',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='ms',
),
},
),
188: MessageType(
name='ohr_settings',
mesg_num=188,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['switch'],
def_num=0,
),
},
),
200: MessageType(
name='exd_screen_configuration',
mesg_num=200,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # number of fields in screen
name='field_count',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='layout',
type=FIELD_TYPES['exd_layout'],
def_num=2,
),
3: Field(
name='screen_enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
},
),
201: MessageType(
name='exd_data_field_configuration',
mesg_num=201,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_count',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_count',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='display_type',
type=FIELD_TYPES['exd_display_type'],
def_num=4,
),
5: Field(
name='title',
type=BASE_TYPES[0x07], # string
def_num=5,
),
},
),
202: MessageType(
name='exd_data_concept_configuration',
mesg_num=202,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_index',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data_page',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='concept_key',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field(
name='scaling',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
8: Field(
name='data_units',
type=FIELD_TYPES['exd_data_units'],
def_num=8,
),
9: Field(
name='qualifier',
type=FIELD_TYPES['exd_qualifiers'],
def_num=9,
),
10: Field(
name='descriptor',
type=FIELD_TYPES['exd_descriptors'],
def_num=10,
),
11: Field(
name='is_signed',
type=FIELD_TYPES['bool'],
def_num=11,
),
},
),
206: MessageType( # Must be logged before developer field is used
name='field_description',
mesg_num=206,
fields={
0: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='field_definition_number',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='fit_base_type_id',
type=FIELD_TYPES['fit_base_type'],
def_num=2,
),
3: Field(
name='field_name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='array',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='components',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='scale',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field(
name='offset',
type=BASE_TYPES[0x01], # sint8
def_num=7,
),
8: Field(
name='units',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='bits',
type=BASE_TYPES[0x07], # string
def_num=9,
),
10: Field(
name='accumulate',
type=BASE_TYPES[0x07], # string
def_num=10,
),
13: Field(
name='fit_base_unit_id',
type=FIELD_TYPES['fit_base_unit'],
def_num=13,
),
14: Field(
name='native_mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=14,
),
15: Field(
name='native_field_num',
type=BASE_TYPES[0x02], # uint8
def_num=15,
),
},
),
207: MessageType( # Must be logged before field description
name='developer_data_id',
mesg_num=207,
fields={
0: Field(
name='developer_id',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field(
name='application_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='manufacturer_id',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='application_version',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
},
),
208: MessageType(
name='magnetometer_data',
mesg_num=208,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the compass sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in cmps_x and cmps_y and cmps_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated Magnetometer reading
name='calibrated_mag_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='G',
),
6: Field( # Calibrated Magnetometer reading
name='calibrated_mag_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='G',
),
7: Field( # Calibrated Magnetometer reading
name='calibrated_mag_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='G',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
######################### Activity File Messages #########################
34: MessageType(
name='activity',
mesg_num=34,
fields={
0: Field( # Exclude pauses
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
scale=1000,
units='s',
),
1: Field(
name='num_sessions',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field(
name='type',
type=FIELD_TYPES['activity'],
def_num=2,
),
3: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=3,
),
4: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=4,
),
5: Field( # timestamp epoch expressed in local time, used to convert activity timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=5,
),
6: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
###################### Blood Pressure File Messages ######################
51: MessageType(
name='blood_pressure',
mesg_num=51,
fields={
0: Field(
name='systolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='mmHg',
),
1: Field(
name='diastolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='mmHg',
),
2: Field(
name='mean_arterial_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='mmHg',
),
3: Field(
name='map_3_sample_mean',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='mmHg',
),
4: Field(
name='map_morning_values',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='mmHg',
),
5: Field(
name='map_evening_values',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='mmHg',
),
6: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
7: Field(
name='heart_rate_type',
type=FIELD_TYPES['hr_type'],
def_num=7,
),
8: Field(
name='status',
type=FIELD_TYPES['bp_status'],
def_num=8,
),
9: Field( # Associates this blood pressure message to a user. This corresponds to the index of the user profile message in the blood pressure file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
########################## Course File Messages ##########################
31: MessageType(
name='course',
mesg_num=31,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='capabilities',
type=FIELD_TYPES['course_capabilities'],
def_num=6,
),
7: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=7,
),
},
),
########################## Device File Messages ##########################
35: MessageType(
name='software',
mesg_num=35,
fields={
3: Field(
name='version',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
),
5: Field(
name='part_number',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
########################## Goals File Messages ###########################
15: MessageType(
name='goal',
mesg_num=15,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='start_date',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='end_date',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='type',
type=FIELD_TYPES['goal'],
def_num=4,
),
5: Field(
name='value',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field(
name='repeat',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=7,
),
8: Field(
name='recurrence',
type=FIELD_TYPES['goal_recurrence'],
def_num=8,
),
9: Field(
name='recurrence_value',
type=BASE_TYPES[0x84], # uint16
def_num=9,
),
10: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='source',
type=FIELD_TYPES['goal_source'],
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################## Monitoring File Messages ########################
103: MessageType(
name='monitoring_info',
mesg_num=103,
fields={
0: Field( # Use to convert activity timestamps to local time if device does not support time zone and daylight savings time correction.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=0,
units='s',
),
1: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=1,
),
3: Field( # Indexed by activity_type
name='cycles_to_distance',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=5000,
units='m/cycle',
),
4: Field( # Indexed by activity_type
name='cycles_to_calories',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5000,
units='kcal/cycle',
),
5: Field(
name='resting_metabolic_rate',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='kcal/day',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
############################# Other Messages #############################
145: MessageType(
name='memo_glob',
mesg_num=145,
fields={
0: Field( # Block of utf8 bytes
name='memo',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field( # Allows relating glob to another mesg If used only required for first part of each memo_glob
name='message_number',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field( # Index of external mesg
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=2,
),
250: Field( # Sequence number of memo blocks
name='part_index',
type=BASE_TYPES[0x86], # uint32
def_num=250,
),
},
),
######################### Schedule File Messages #########################
28: MessageType(
name='schedule',
mesg_num=28,
fields={
0: Field( # Corresponds to file_id of scheduled workout / course.
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field( # Corresponds to file_id of scheduled workout / course.
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
2: Field( # Corresponds to file_id of scheduled workout / course.
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=2,
),
3: Field( # Corresponds to file_id of scheduled workout / course.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field( # TRUE if this activity has been started
name='completed',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='type',
type=FIELD_TYPES['schedule'],
def_num=5,
),
6: Field(
name='scheduled_time',
type=FIELD_TYPES['local_date_time'],
def_num=6,
),
},
),
######################### Segment File Messages ##########################
148: MessageType( # Unique Identification data for a segment file
name='segment_id',
mesg_num=148,
fields={
0: Field( # Friendly name assigned to segment
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # UUID of the segment
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Sport associated with the segment
name='sport',
type=FIELD_TYPES['sport'],
def_num=2,
),
3: Field( # Segment enabled for evaluation
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
5: Field( # ID of the device that created the segment
name='device_id',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Indicates if any segments should be deleted
name='delete_status',
type=FIELD_TYPES['segment_delete_status'],
def_num=7,
),
8: Field( # Indicates how the segment was selected to be sent to the device
name='selection_type',
type=FIELD_TYPES['segment_selection_type'],
def_num=8,
),
},
),
####################### Segment List File Messages #######################
151: MessageType( # Summary of the unique segment and leaderboard information associated with a segment file. This message is used to compile a segment list file describing all segment files on a device. The segment list file is used when refreshing the contents of a segment file with the latest available leaderboard information.
name='segment_file',
mesg_num=151,
fields={
1: Field( # UUID of the segment file
name='file_uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
3: Field( # Enabled state of the segment file
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment file
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
7: Field( # Leader type of each leader in the segment file
name='leader_type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=7,
),
8: Field( # Group primary key of each leader in the segment file
name='leader_group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=8,
),
9: Field( # Activity ID of each leader in the segment file
name='leader_activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=9,
),
10: Field( # String version of the activity ID of each leader in the segment file. 21 characters long for each ID, express in decimal
name='leader_activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=10,
),
11: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################### Settings File Messages #########################
2: MessageType(
name='device_settings',
mesg_num=2,
fields={
0: Field( # Index into time zone arrays.
name='active_time_zone',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # Offset from system time. Required to convert timestamp from system time to UTC.
name='utc_offset',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
2: Field( # Offset from system time.
name='time_offset',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='s',
),
4: Field( # Display mode for the time
name='time_mode',
type=FIELD_TYPES['time_mode'],
def_num=4,
),
5: Field( # timezone offset in 1/4 hour increments
name='time_zone_offset',
type=BASE_TYPES[0x01], # sint8
def_num=5,
scale=4,
units='hr',
),
12: Field( # Mode for backlight
name='backlight_mode',
type=FIELD_TYPES['backlight_mode'],
def_num=12,
),
36: Field( # Enabled state of the activity tracker functionality
name='activity_tracker_enabled',
type=FIELD_TYPES['bool'],
def_num=36,
),
39: Field( # UTC timestamp used to set the devices clock and date
name='clock_time',
type=FIELD_TYPES['date_time'],
def_num=39,
),
40: Field( # Bitfield to configure enabled screens for each supported loop
name='pages_enabled',
type=BASE_TYPES[0x84], # uint16
def_num=40,
),
46: Field( # Enabled state of the move alert
name='move_alert_enabled',
type=FIELD_TYPES['bool'],
def_num=46,
),
47: Field( # Display mode for the date
name='date_mode',
type=FIELD_TYPES['date_mode'],
def_num=47,
),
55: Field(
name='display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=55,
),
56: Field(
name='mounting_side',
type=FIELD_TYPES['side'],
def_num=56,
),
57: Field( # Bitfield to indicate one page as default for each supported loop
name='default_page',
type=BASE_TYPES[0x84], # uint16
def_num=57,
),
58: Field( # Minimum steps before an autosync can occur
name='autosync_min_steps',
type=BASE_TYPES[0x84], # uint16
def_num=58,
units='steps',
),
59: Field( # Minimum minutes before an autosync can occur
name='autosync_min_time',
type=BASE_TYPES[0x84], # uint16
def_num=59,
units='minutes',
),
80: Field( # Enable auto-detect setting for the lactate threshold feature.
name='lactate_threshold_autodetect_enabled',
type=FIELD_TYPES['bool'],
def_num=80,
),
86: Field( # Automatically upload using BLE
name='ble_auto_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=86,
),
89: Field( # Helps to conserve battery by changing modes
name='auto_sync_frequency',
type=FIELD_TYPES['auto_sync_frequency'],
def_num=89,
),
90: Field( # Allows setting specific activities auto-activity detect enabled/disabled settings
name='auto_activity_detect',
type=FIELD_TYPES['auto_activity_detect'],
def_num=90,
),
94: Field( # Number of screens configured to display
name='number_of_screens',
type=BASE_TYPES[0x02], # uint8
def_num=94,
),
95: Field( # Smart Notification display orientation
name='smart_notification_display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=95,
),
},
),
###################### Sport Settings File Messages ######################
7: MessageType(
name='zones_target',
mesg_num=7,
fields={
1: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='threshold_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='functional_threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
5: Field(
name='hr_calc_type',
type=FIELD_TYPES['hr_zone_calc'],
def_num=5,
),
7: Field(
name='pwr_calc_type',
type=FIELD_TYPES['pwr_zone_calc'],
def_num=7,
),
},
),
########################## Totals File Messages ##########################
33: MessageType(
name='totals',
mesg_num=33,
fields={
0: Field( # Excludes pauses
name='timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
units='s',
),
1: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='m',
),
2: Field(
name='calories',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='kcal',
),
3: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=3,
),
4: Field( # Includes pauses
name='elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='s',
),
5: Field(
name='sessions',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
6: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='s',
),
9: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
####################### Weight Scale File Messages #######################
30: MessageType(
name='weight_scale',
mesg_num=30,
fields={
0: Field(
name='weight',
type=FIELD_TYPES['weight'],
def_num=0,
scale=100,
units='kg',
),
1: Field(
name='percent_fat',
type=BASE_TYPES[0x84], # uint16
def_num=1,
scale=100,
units='%',
),
2: Field(
name='percent_hydration',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=100,
units='%',
),
3: Field(
name='visceral_fat_mass',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
units='kg',
),
4: Field(
name='bone_mass',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='kg',
),
5: Field(
name='muscle_mass',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='kg',
),
7: Field(
name='basal_met',
type=BASE_TYPES[0x84], # uint16
def_num=7,
scale=4,
units='kcal/day',
),
8: Field(
name='physique_rating',
type=BASE_TYPES[0x02], # uint8
def_num=8,
),
9: Field( # ~4kJ per kcal, 0.25 allows max 16384 kcal
name='active_met',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=4,
units='kcal/day',
),
10: Field(
name='metabolic_age',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='years',
),
11: Field(
name='visceral_fat_rating',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
12: Field( # Associates this weight scale message to a user. This corresponds to the index of the user profile message in the weight scale file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=12,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
######################### Workout File Messages ##########################
26: MessageType(
name='workout',
mesg_num=26,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='capabilities',
type=FIELD_TYPES['workout_capabilities'],
def_num=5,
),
6: Field( # number of valid steps
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=6,
),
8: Field(
name='wkt_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
11: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=11,
),
14: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=100,
units='m',
),
15: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=15,
),
},
),
}
| 33.844565 | 336 | 0.397916 |
3e5b857f8383e340919c32b08170a5b4cd5f70b7
| 820 |
py
|
Python
|
python-basic-project/unit08/myfinance.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | 9 |
2020-10-25T15:13:32.000Z
|
2022-03-26T11:27:21.000Z
|
python-basic-project/unit08/myfinance.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | null | null | null |
python-basic-project/unit08/myfinance.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | 7 |
2021-03-01T11:06:45.000Z
|
2022-03-14T07:06:04.000Z
|
import requests
from bs4 import BeautifulSoup
if __name__ == "__main__":
kospi = get_tickers(market=2)
kosdaq = get_tickers(market=3)
print(len(kospi))
print(len(kosdaq))
print(get_dvr("005930"))
| 24.117647 | 90 | 0.603659 |
3e5d6df95ec953ae6051ebc8540af72617f83181
| 7,209 |
py
|
Python
|
TravelWebsite/travello/admin.py
|
DSAnup/Django
|
76025d181bafbb41783912577f80ec728884549d
|
[
"MIT"
] | 1 |
2020-03-15T05:22:30.000Z
|
2020-03-15T05:22:30.000Z
|
TravelWebsite/travello/admin.py
|
DSAnup/Django
|
76025d181bafbb41783912577f80ec728884549d
|
[
"MIT"
] | null | null | null |
TravelWebsite/travello/admin.py
|
DSAnup/Django
|
76025d181bafbb41783912577f80ec728884549d
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.utils.html import format_html
from django.shortcuts import redirect
from .models import *
# Register your models here.
admin.site.register(CategoryDes)
admin.site.register(Destination, DestinationAdmin)
admin.site.register(Besttrip, BesttripAdmin)
admin.site.register(Testominal, TestomonialAdmin)
admin.site.register(Subscibtion, SubcriptionAdmin)
admin.site.register(Homeslider, HomesliderAdmin)
admin.site.register(Intro, IntroAdmin)
admin.site.register(FooterContact, FooterContent)
admin.site.register(HomeStatic, HomeFixedContent)
admin.site.register(about_us_fixed, AboutUs)
admin.site.register(why_choose_us, WhyChoose)
admin.site.register(team, OurTeam)
| 49.717241 | 164 | 0.659315 |
3e5e4207adc8922463d0a98148721a7ee4e6e6eb
| 1,428 |
py
|
Python
|
demos/cookie-clicker/cookie-clicker.py
|
Coding-Kakis/Automating-Shenanigans-in-Python
|
c8e00231468668fbe231e0b35e32b9e99d5bd458
|
[
"MIT"
] | 1 |
2021-09-11T13:05:17.000Z
|
2021-09-11T13:05:17.000Z
|
demos/cookie-clicker/cookie-clicker.py
|
Coding-Kakis/Automating-Shenanigans-in-Python
|
c8e00231468668fbe231e0b35e32b9e99d5bd458
|
[
"MIT"
] | null | null | null |
demos/cookie-clicker/cookie-clicker.py
|
Coding-Kakis/Automating-Shenanigans-in-Python
|
c8e00231468668fbe231e0b35e32b9e99d5bd458
|
[
"MIT"
] | null | null | null |
# Cookie clicker auto-clicker
# Works for the classic version here: https://orteil.dashnet.org/experiments/cookie/
import pyautogui
def locate_cookie():
"""
Returns the locations of the Big Cookie
Does not return until the cookie is found
"""
loc = None
while loc == None:
loc = pyautogui.locateCenterOnScreen('rsrc/bigcookie.png')
return loc
def click_cookie(loc, ntimes):
"""
Moves mouse to `loc` and clicks `ntimes`
"""
x,y = loc
pyautogui.moveTo(x,y)
for _ in range(ntimes):
pyautogui.click()
def round():
"""
Does 1 round.
Returns `Yes` if user wants to continue
Returns `No` otherwise.
"""
loc = locate_cookie()
pyautogui.alert(
title = "Found cookie!",
text = str(loc))
while True:
number_of_times = pyautogui.prompt(
title = "Continue?",
text = "Click how many times?")
if not number_of_times.isdigit():
pyautogui.alert(
title = "Error!",
text = "Input isn't an integer!")
continue
break
number_of_times = int(number_of_times)
click_cookie(loc, number_of_times)
reply = pyautogui.confirm(
title = "Done!",
text = "Another round?",
buttons = ["Yes", "No"])
return reply
while True:
reply = round()
if reply == "No":
break
| 19.833333 | 84 | 0.573529 |
3e5e941943139ba0623e31d497e78bf7beb9106d
| 1,485 |
py
|
Python
|
esupa/templatetags/esupa.py
|
Abando/esupa
|
84888ff7d7879437659fd06a8707ac033f25b8ab
|
[
"Apache-2.0"
] | null | null | null |
esupa/templatetags/esupa.py
|
Abando/esupa
|
84888ff7d7879437659fd06a8707ac033f25b8ab
|
[
"Apache-2.0"
] | 4 |
2015-11-09T02:01:15.000Z
|
2016-01-20T14:51:13.000Z
|
esupa/templatetags/esupa.py
|
ekevoo/esupa
|
84888ff7d7879437659fd06a8707ac033f25b8ab
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2015, Ekevoo.com.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
#
from datetime import datetime
from django.template import Library
from django.template.defaultfilters import date
from django.utils.safestring import mark_safe
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import ugettext
register = Library()
| 37.125 | 107 | 0.703704 |
3e62b645957319fa784b6eef70fbe8c8812a5575
| 3,305 |
py
|
Python
|
ivy/pages.py
|
swsch/ivy
|
4932cf7541acff13815be613b0f3335b21c86670
|
[
"Unlicense"
] | null | null | null |
ivy/pages.py
|
swsch/ivy
|
4932cf7541acff13815be613b0f3335b21c86670
|
[
"Unlicense"
] | null | null | null |
ivy/pages.py
|
swsch/ivy
|
4932cf7541acff13815be613b0f3335b21c86670
|
[
"Unlicense"
] | null | null | null |
# ------------------------------------------------------------------------------
# This module renders and writes HTML pages to disk.
# ------------------------------------------------------------------------------
import re
import os
from . import site
from . import events
from . import filters
from . import utils
from . import templates
from . import hashes
from typing import List
from .nodes import Node
# A Page instance represents a single HTML page in the rendered site.
| 35.537634 | 80 | 0.579728 |
3e64ce743607e76cfc572cc4ea2cfe77fba2b173
| 5,646 |
py
|
Python
|
mvyaml/mvyaml.py
|
gchiesa/mvyaml
|
6d4c580bc596d220b45e6a6ccf9b2c3ef582f554
|
[
"MIT"
] | null | null | null |
mvyaml/mvyaml.py
|
gchiesa/mvyaml
|
6d4c580bc596d220b45e6a6ccf9b2c3ef582f554
|
[
"MIT"
] | null | null | null |
mvyaml/mvyaml.py
|
gchiesa/mvyaml
|
6d4c580bc596d220b45e6a6ccf9b2c3ef582f554
|
[
"MIT"
] | null | null | null |
"""Main module."""
from copy import deepcopy
from datetime import datetime
from difflib import Differ
from io import StringIO
from typing import IO, Iterable, AnyStr
from datadiff.tools import assert_equal
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
def as_yaml(data: Iterable) -> AnyStr:
yaml = YAML()
output = StringIO()
yaml.dump(data, output)
return output.getvalue()
def override(self, data: [Iterable]):
self._curr_data = CommentedMap()
self._curr_data.update(data)
self._commit(comment='Overridden')
return self
def _commit(self, *args, **kwargs):
return self._commit_head(*args, **kwargs)
def _commit_head(self, tag: AnyStr = None, comment: AnyStr = None):
"""
apply the modifications on curr_data to the underling opened version
and create a new tag
"""
commented_map = CommentedMap()
commented_map.update(self._curr_data or self.data)
if tag:
self._raw[tag] = commented_map
self._raw['__current'] = tag
else:
new_tag = self._make_tag()
self._raw.insert(2, new_tag, commented_map, comment=comment)
self._raw['__current'] = new_tag
self._curr_version = None
self._curr_data = None
return self
def _commit_tail(self, tag: AnyStr = None, comment: AnyStr = None):
"""
apply the modifications on curr_data to the underling opened version
and create a new tag
"""
commented_map = CommentedMap()
commented_map.update(self._curr_data or self.data)
if tag:
self._raw[tag] = commented_map
self._raw['__current'] = tag
else:
new_tag = self._make_tag()
self._raw.insert(len(self._raw.keys()), new_tag, commented_map, comment=comment)
self._raw['__current'] = new_tag
self._curr_version = None
self._curr_data = None
return self
def _has_changes(self):
orig = self._raw[self._curr_version or self.current]
current = self._curr_data or self.data
try:
assert_equal(orig, current)
except AssertionError:
return True
return False
| 32.079545 | 108 | 0.613355 |
3e667e1d0cd277296f1a4241baaa4af12a192a1e
| 1,889 |
py
|
Python
|
tests/test_monkeypatch.py
|
marcdemers/py_vollib_vectorized
|
0c2519ff58e3caf2caee37ca37d878e6e5e1eefd
|
[
"MIT"
] | 40 |
2020-12-17T16:36:32.000Z
|
2022-02-07T20:11:26.000Z
|
tests/test_monkeypatch.py
|
marcdemers/py_vollib_vectorized
|
0c2519ff58e3caf2caee37ca37d878e6e5e1eefd
|
[
"MIT"
] | 8 |
2021-01-20T04:17:50.000Z
|
2022-02-18T07:02:27.000Z
|
tests/test_monkeypatch.py
|
marcdemers/py_vollib_vectorized
|
0c2519ff58e3caf2caee37ca37d878e6e5e1eefd
|
[
"MIT"
] | 13 |
2020-12-30T21:05:10.000Z
|
2022-03-27T12:30:26.000Z
|
import unittest
from unittest import TestCase
| 42.931818 | 137 | 0.728957 |
3e6846fed01d2e5081085a1f9b9ca2203cbb1dad
| 1,137 |
py
|
Python
|
b2share/modules/deposit/search.py
|
hjhsalo/b2share-new
|
2a2a961f7cc3a5353850e9a409fd7e879c715b0b
|
[
"MIT"
] | null | null | null |
b2share/modules/deposit/search.py
|
hjhsalo/b2share-new
|
2a2a961f7cc3a5353850e9a409fd7e879c715b0b
|
[
"MIT"
] | null | null | null |
b2share/modules/deposit/search.py
|
hjhsalo/b2share-new
|
2a2a961f7cc3a5353850e9a409fd7e879c715b0b
|
[
"MIT"
] | 1 |
2020-09-29T10:56:03.000Z
|
2020-09-29T10:56:03.000Z
|
from elasticsearch_dsl import Q, TermsFacet
from flask import has_request_context
from flask_login import current_user
from invenio_search import RecordsSearch
from invenio_search.api import DefaultFilter
from .permissions import admin_permission_factory
def deposits_filter():
"""Filter list of deposits.
Permit to the user to see all if:
* The user is an admin (see
func:`invenio_deposit.permissions:admin_permission_factory`).
* It's called outside of a request.
Otherwise, it filters out any deposit where user is not the owner.
"""
if not has_request_context() or admin_permission_factory().can():
return Q()
else:
return Q(
'match', **{'_deposit.owners': getattr(current_user, 'id', 0)}
)
| 25.266667 | 74 | 0.664908 |
3e69d58aa5e27029fd5fb9a2126945c9c542b4c9
| 1,586 |
py
|
Python
|
code/find_nconfsources.py
|
fornax-navo/fornax-demo-notebooks
|
49525d5bed3440d0d1903c29b9a1af8e0ff7e975
|
[
"BSD-3-Clause"
] | 1 |
2022-02-03T18:12:59.000Z
|
2022-02-03T18:12:59.000Z
|
code/find_nconfsources.py
|
fornax-navo/fornax-demo-notebooks
|
49525d5bed3440d0d1903c29b9a1af8e0ff7e975
|
[
"BSD-3-Clause"
] | 1 |
2022-03-11T21:17:35.000Z
|
2022-03-11T22:28:46.000Z
|
code/find_nconfsources.py
|
fornax-navo/fornax-demo-notebooks
|
49525d5bed3440d0d1903c29b9a1af8e0ff7e975
|
[
"BSD-3-Clause"
] | 2 |
2022-02-01T00:57:35.000Z
|
2022-02-13T22:20:55.000Z
|
import numpy as np
from determine_source_type import determine_source_type
#function to figure out how many sources are in cutout
#and set up necessary tractor input for those sources
| 38.682927 | 111 | 0.663934 |
3e6ad0d35aefd868861d6a14144cf80665b8e7ea
| 274 |
py
|
Python
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | 1 |
2019-12-02T15:10:49.000Z
|
2019-12-02T15:10:49.000Z
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | 5 |
2015-08-13T16:00:04.000Z
|
2016-03-14T18:43:11.000Z
|
setup.py
|
dalejung/earthdragon
|
6fc9308288361bbe54d1d0107b4a77e3f27cd9be
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
DISTNAME='earthdragon'
FULLVERSION='0.1'
setup(
name=DISTNAME,
version=FULLVERSION,
packages=['earthdragon'],
install_requires = [
'asttools',
'toolz',
'typeguard',
'more_itertools',
]
)
| 16.117647 | 32 | 0.605839 |
3e6ad6e1c6ce978983b60511c62b60c613bacb9a
| 92 |
py
|
Python
|
script.py
|
juand1809/JuanVelasquez_Ejercicio23
|
40b20dc3db6e3a9a884265a950dd3ccac1f7a615
|
[
"MIT"
] | null | null | null |
script.py
|
juand1809/JuanVelasquez_Ejercicio23
|
40b20dc3db6e3a9a884265a950dd3ccac1f7a615
|
[
"MIT"
] | null | null | null |
script.py
|
juand1809/JuanVelasquez_Ejercicio23
|
40b20dc3db6e3a9a884265a950dd3ccac1f7a615
|
[
"MIT"
] | null | null | null |
import os
a = os.system("g++ sumatoria.cpp -o sumatoria.x")
a = os.system("./sumatoria.x")
| 18.4 | 49 | 0.652174 |
3e6b0a9948d6ab9ae3bf82cdb88963f7746825d0
| 334 |
py
|
Python
|
consultas/urls.py
|
Valarr/django-app
|
2faac602ce5f36dc9007d4af7a3acd38504f4f95
|
[
"MIT"
] | null | null | null |
consultas/urls.py
|
Valarr/django-app
|
2faac602ce5f36dc9007d4af7a3acd38504f4f95
|
[
"MIT"
] | null | null | null |
consultas/urls.py
|
Valarr/django-app
|
2faac602ce5f36dc9007d4af7a3acd38504f4f95
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('consultaticket', views.consultaticket, name='consultaticket'),
path('consultadecredito', views.consultadecredito, name='consultadecredito'),
path('mostrarticket', views.mostrarticket, name='mostrarticket'),
]
| 33.4 | 81 | 0.730539 |
3e6c1c6b5fbe5a4ffcca63260b56292216d80f44
| 1,973 |
py
|
Python
|
order_history.py
|
zylizy/DBMS_Project
|
d6ff25d566a362495e3b4eb68d48d8400f2f20e6
|
[
"MIT"
] | null | null | null |
order_history.py
|
zylizy/DBMS_Project
|
d6ff25d566a362495e3b4eb68d48d8400f2f20e6
|
[
"MIT"
] | null | null | null |
order_history.py
|
zylizy/DBMS_Project
|
d6ff25d566a362495e3b4eb68d48d8400f2f20e6
|
[
"MIT"
] | null | null | null |
import streamlit as st
from db_functions import *
| 51.921053 | 115 | 0.611759 |
3e6d175a2c46fd4c086a5aa6dbda506eabe35fd4
| 1,415 |
py
|
Python
|
cogs/commands/utility/8ball.py
|
teSill/temflix
|
31d40265fa71695966c6178145a1057cd2aeda27
|
[
"MIT"
] | 3 |
2020-12-21T20:51:56.000Z
|
2022-01-04T11:55:45.000Z
|
cogs/commands/utility/8ball.py
|
teSill/temflix
|
31d40265fa71695966c6178145a1057cd2aeda27
|
[
"MIT"
] | null | null | null |
cogs/commands/utility/8ball.py
|
teSill/temflix
|
31d40265fa71695966c6178145a1057cd2aeda27
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import random
| 36.282051 | 123 | 0.466431 |
3e70d3317c13de0952315e701a55b920df03ec85
| 1,441 |
py
|
Python
|
20210607_fizzbuzz.py
|
sayloren/codechallenges
|
b31b64c176a1c03c937e915f3b60657669495681
|
[
"Apache-2.0"
] | null | null | null |
20210607_fizzbuzz.py
|
sayloren/codechallenges
|
b31b64c176a1c03c937e915f3b60657669495681
|
[
"Apache-2.0"
] | null | null | null |
20210607_fizzbuzz.py
|
sayloren/codechallenges
|
b31b64c176a1c03c937e915f3b60657669495681
|
[
"Apache-2.0"
] | null | null | null |
# code golf challenge
# https://code.golf/fizz-buzz#python
# wren 20210607
# Print the numbers from 1 to 100 inclusive, each on their own line.
# If, however, the number is a multiple of three then print Fizz instead,
# and if the number is a multiple of five then print Buzz.
# For numbers which are multiples of both three and five then print FizzBuzz.
# iterate through reange of required numbers - plus one because range is
# zero based if 3 print fizz, if 5 print buzz, and if both fizzbuzz
# i feel like there should be a way to have the 15/fizzbuzz not be a
# seperate condition
for number in range(1,101):
# if there are no remainders for the number in the range divied by 15
# condition for both 5 and 3 - 15 is lowest divisor, where both fizz and buzz
if number%15==0):print('FizzBuzz')
# if there are no remainders for the number in the range divied by 3
# elif condition for just 3 - fizz
elif number%3 ==0:print('Fizz')
# if there are no remainders for the number in the range divied by 5
# elif condition for just 5, buzz
elif number%5==0:print('Buzz')
# else none of the conditions, just print the number
else:print(number)
# alternative approach in one liner
# for loop the same, but conditions where there are no remainders for the
# divisability are all wrapped in a print statement
for number in range(1, 101): print("Fizz"*(number%3==0)+"Buzz"*(number%5==0) or str(number))
| 46.483871 | 92 | 0.725191 |
3e730fa82d3520ad13dc948a854e1cd1df0331d4
| 275 |
py
|
Python
|
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
grro/install-raspberry
|
f6db2d451c1277127a77fdc6b00ea55708f0bd17
|
[
"Apache-2.0"
] | null | null | null |
from setuptools import setup
setup(
name='install-raspberry',
version='',
packages=[''],
url='https://github.com/grro/httpstreamproxy',
license='Apache Software License',
author='grro',
author_email='[email protected]',
description='test'
)
| 21.153846 | 50 | 0.658182 |
3e73f6fed18b11f2933d0b20530ca1d6b4de649e
| 2,701 |
py
|
Python
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | 2 |
2015-01-04T21:27:45.000Z
|
2015-01-05T13:31:52.000Z
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | 28 |
2015-01-04T22:13:24.000Z
|
2019-11-29T13:41:01.000Z
|
py_privatekonomi/tests/swedbank/test_swedbank_db.py
|
nilsFK/py-privatekonomi
|
9172dfa85e439e18558a60fdb3b69e956e70e783
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import inspect
from py_privatekonomi.utilities import common
from py_privatekonomi.tests.test_base import TestBase
from py_privatekonomi.tests.dataset.swedbank.sample1 import test_data as test_data_1
from py_privatekonomi.tests.dataset.swedbank.sample2 import test_data as test_data_2
from py_privatekonomi.tests.dataset.swedbank.sample3 import test_data as test_data_3
from py_privatekonomi.tests.dataset.swedbank.sample5 import test_data as test_data_5
if __name__ == '__main__':
unittest.main()
| 37 | 84 | 0.652721 |
3e74eb605f50a2789671592734f1dea5fd163012
| 918 |
py
|
Python
|
gharchive/parse_json.py
|
IAMABOY/Mining-Github
|
cf11c94e72b11f3ce9d638b562df438c8e56d149
|
[
"MIT"
] | 8 |
2019-12-08T11:57:59.000Z
|
2022-01-24T06:26:56.000Z
|
gharchive/parse_json.py
|
IAMABOY/Mining-Github
|
cf11c94e72b11f3ce9d638b562df438c8e56d149
|
[
"MIT"
] | null | null | null |
gharchive/parse_json.py
|
IAMABOY/Mining-Github
|
cf11c94e72b11f3ce9d638b562df438c8e56d149
|
[
"MIT"
] | 2 |
2019-12-17T02:38:55.000Z
|
2021-12-16T01:53:11.000Z
|
import sys
import os
import json
import gzip
if __name__ == '__main__':
jsonReader('2019-09-19-10.json.gz',1)
| 24.157895 | 63 | 0.484749 |
3e753e4b76a7bccde83190218fa4e3ea302764fe
| 393 |
py
|
Python
|
iotalib/check_roof.py
|
WWGolay/iota
|
f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d
|
[
"MIT"
] | null | null | null |
iotalib/check_roof.py
|
WWGolay/iota
|
f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d
|
[
"MIT"
] | null | null | null |
iotalib/check_roof.py
|
WWGolay/iota
|
f3e67502d7f96bb836b45b7eca4ebb9fe5490e6d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import pycurl
from io import BytesIO
| 21.833333 | 59 | 0.592875 |
3e7863d676fdd4741e30575b304165077d18541c
| 2,238 |
py
|
Python
|
egg/app.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
egg/app.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
egg/app.py
|
eanorambuena/Driver
|
3cb14f5d741c6bae364326305ae0ded04e10e9d4
|
[
"MIT"
] | null | null | null |
# Imports
from egg.resources.console import get, clearConsole
from egg.resources.constants import *
from egg.resources.modules import install, upgrade, Repo
from egg.resources.help import help
from egg.resources.auth import login, register
"""
FUNCTION eggConsole(condition: bool = True)
Display the Egg Console
Currently, the Egg Console commands are:
$nqs Start the NQS Depeloper console
$new Start the News Journalist console
$login Log in Egg-cosystem *comming soon*
$register Register in Egg-cosystem *comming soon*
$install Install a pip package
$upgrade Upgrade a pip package
$pull Import a package stored on a GitHUb repository *comming soon: currently, just use github_com package*
$help Get started command
$clear Clear the Egg Console
$end End the Egg Console
WARNING:
Always use $end command in every console you run
*ONLY use a condition different to True as an argument of eggConsole(condition) if you know what are you doing**
This is the reason why condition only allows <<bool>> as data type
"""
| 32.434783 | 113 | 0.605004 |
3e78c123f36641a6b522ac2d459248b01e28de60
| 1,204 |
py
|
Python
|
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | 1 |
2017-10-23T14:58:47.000Z
|
2017-10-23T14:58:47.000Z
|
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | null | null | null |
hello/hello_pil.py
|
East196/hello-py
|
a77c7a0c8e5e2b5e8cefaf0fda335ab0c3b1da21
|
[
"Apache-2.0"
] | 1 |
2018-04-06T07:49:18.000Z
|
2018-04-06T07:49:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PIL import Image, ImageDraw, ImageFont, ImageFilter
import random
im = Image.open('F:/workspace/python/data/backpink.jpg')
im2 = im.filter(ImageFilter.BLUR)
im2.save('F:/workspace/python/data/backpink_blur.png', 'png')
im2.save('F:/workspace/python/data/backpink_blur.jpg', 'jpeg')
# :
# 1:
# 2:
# 240 x 60:
width = 60 * 4
height = 60
image = Image.new('RGB', (width, height), (255, 255, 255))
# Font:
font = ImageFont.truetype('C:/Windows/Fonts/Arial.ttf', 36)
# Draw:
draw = ImageDraw.Draw(image)
# :
for x in range(width):
for y in range(height):
draw.point((x, y), fill=random_color())
# :
for t in range(4):
draw.text((60 * t + 10, 10), random_char(), font=font, fill=random_color2())
# :
image = image.filter(ImageFilter.BLUR)
image.save('code.jpg', 'jpeg')
print((image.format, image.size, image.mode))
# image.show()
| 24.571429 | 84 | 0.680233 |
3e7a2d3d3d5314bdb0dff02d7d69496583791bdc
| 382 |
py
|
Python
|
terra_layer/migrations/0028_layergroup_exclusive.py
|
Terralego/terra-layer
|
6564a63d389503d3ae1f63ce46e674b228d6764b
|
[
"MIT"
] | 1 |
2019-08-08T15:17:32.000Z
|
2019-08-08T15:17:32.000Z
|
terra_layer/migrations/0028_layergroup_exclusive.py
|
Terralego/terra-layer
|
6564a63d389503d3ae1f63ce46e674b228d6764b
|
[
"MIT"
] | 65 |
2019-10-21T10:05:00.000Z
|
2022-03-08T14:08:27.000Z
|
terra_layer/migrations/0028_layergroup_exclusive.py
|
Terralego/terra-layer
|
6564a63d389503d3ae1f63ce46e674b228d6764b
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.9 on 2019-07-16 10:15
from django.db import migrations, models
| 22.470588 | 63 | 0.636126 |
3e7a7495c3ea96af4211a7bee33396978138c459
| 605 |
py
|
Python
|
docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py
|
ran-isenberg/aws-lambda-handler-cookbook
|
adfe58dacd87315151265818869bb842c7eb4971
|
[
"MIT"
] | 61 |
2022-02-07T05:21:14.000Z
|
2022-03-27T14:11:30.000Z
|
docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py
|
ran-isenberg/aws-lambda-handler-cookbook
|
adfe58dacd87315151265818869bb842c7eb4971
|
[
"MIT"
] | 17 |
2022-02-26T05:25:31.000Z
|
2022-03-16T20:02:46.000Z
|
docs/examples/best_practices/dynamic_configuration/cdk_appconfig.py
|
ran-isenberg/aws-lambda-handler-cookbook
|
adfe58dacd87315151265818869bb842c7eb4971
|
[
"MIT"
] | 4 |
2022-02-17T16:35:27.000Z
|
2022-03-07T03:13:07.000Z
|
from aws_cdk import Stack
from aws_lambda_handler_cookbook.service_stack.configuration.configuration_construct import ConfigurationStore
from aws_lambda_handler_cookbook.service_stack.constants import CONFIGURATION_NAME, ENVIRONMENT, SERVICE_NAME
from constructs import Construct
| 43.214286 | 124 | 0.809917 |
3e7d231b81300bc8be65b86f6758957fdbb26baa
| 653 |
py
|
Python
|
backend-project/small_eod/users/models.py
|
merito/small_eod
|
ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2
|
[
"MIT"
] | 64 |
2019-12-30T11:24:03.000Z
|
2021-06-24T01:04:56.000Z
|
backend-project/small_eod/users/models.py
|
merito/small_eod
|
ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2
|
[
"MIT"
] | 465 |
2018-06-13T21:43:43.000Z
|
2022-01-04T23:33:56.000Z
|
backend-project/small_eod/users/models.py
|
merito/small_eod
|
ab19b82f374cd7c4b21d8f9412657dbe7f7f03e2
|
[
"MIT"
] | 72 |
2018-12-02T19:47:03.000Z
|
2022-01-04T22:54:49.000Z
|
from django.contrib.auth.models import AbstractUser
from ..notifications.utils import TemplateKey, TemplateMailManager
| 29.681818 | 79 | 0.658499 |
3e7efc62df24d3372d57ba9f3602f16dfbfbeff6
| 2,689 |
py
|
Python
|
rtlsdr_sstv/utils.py
|
martinber/rtlsdr_sstv
|
f59ca523408e949f98c4b81b09b2d46232111f4a
|
[
"MIT"
] | 3 |
2019-03-16T01:20:09.000Z
|
2020-12-31T12:31:17.000Z
|
rtlsdr_sstv/utils.py
|
martinber/rtlsdr_sstv
|
f59ca523408e949f98c4b81b09b2d46232111f4a
|
[
"MIT"
] | null | null | null |
rtlsdr_sstv/utils.py
|
martinber/rtlsdr_sstv
|
f59ca523408e949f98c4b81b09b2d46232111f4a
|
[
"MIT"
] | 1 |
2020-12-27T02:31:18.000Z
|
2020-12-27T02:31:18.000Z
|
import collections
import math
import numpy as np
def escribir_pixel(img, columna, linea, canal, valor):
'''funcion encargada de escribir pixel por pixel la imagen'''
if linea >= img.height:
return
if canal == "lum":
prev = img.getpixel((columna,linea-1))
datapixel = (mapeadora(valor), prev[1], prev[2])
img.putpixel((columna,linea-1), datapixel)
if canal == "cr":
prev = img.getpixel((columna,linea-1))
nxt_prev = img.getpixel((columna,linea))
datapixel = (prev[0], prev[1], mapeadora(valor))
nxt_datapixel = (nxt_prev[0], nxt_prev[1], mapeadora(valor))
img.putpixel((columna,linea-1), datapixel)
img.putpixel((columna,linea), nxt_datapixel)
if canal == "cb":
prev = img.getpixel((columna,linea-1))
nxt_prev = img.getpixel((columna,linea))
datapixel = (prev[0], mapeadora(valor), prev[2])
nxt_datapixel = (nxt_prev[0], mapeadora(valor), nxt_prev[2])
img.putpixel((columna,linea-1), datapixel)
img.putpixel((columna,linea), nxt_datapixel)
if canal == "nxt_lum":
prev = img.getpixel((columna,linea))
datapixel = (mapeadora(valor), prev[1], prev[2])
img.putpixel((columna,linea), datapixel)
def lowpass(cutout, delta_w, atten):
'''
cutout y delta_w en fracciones de pi radianes por segundo.
atten en decibeles positivos.
'''
beta = 0
if atten > 50:
beta = 0.1102 * (atten - 8.7)
elif atten < 21:
beta = 0
else:
beta = 0.5842 * (atten - 21)**0.4 + 0.07886 * (atten - 21)
length = math.ceil((atten - 8) / (2.285 * delta_w * math.pi)) + 1;
if length % 2 == 0:
length += 1
coeffs = np.kaiser(length, beta)
# i es el indice en el vector, n es el eje con el cero centrado en el medio
# del filtro
for i, n in enumerate(range(
int(-(length - 1) / 2),
int((length - 1) / 2)+1)):
if n == 0:
coeffs[i] *= cutout
else:
coeffs[i] *= math.sin(n * math.pi * cutout) / (n * math.pi)
return coeffs
| 29.549451 | 80 | 0.581629 |
3e7f9f610ed95d40e15a8580e0dd70e9219fb93d
| 3,653 |
py
|
Python
|
Pong.py
|
Mishkanian/pong_game
|
5a04b4b5fc36af2159e60fb85941034a2325996c
|
[
"MIT"
] | null | null | null |
Pong.py
|
Mishkanian/pong_game
|
5a04b4b5fc36af2159e60fb85941034a2325996c
|
[
"MIT"
] | null | null | null |
Pong.py
|
Mishkanian/pong_game
|
5a04b4b5fc36af2159e60fb85941034a2325996c
|
[
"MIT"
] | 1 |
2021-11-15T20:21:53.000Z
|
2021-11-15T20:21:53.000Z
|
"""
Pong game by Michael Mishkanian
"""
import turtle
wn = turtle.Screen()
wn.title("Pong by Michael Mishkanian")
wn.bgcolor("black")
wn.setup(width=800, height=600)
wn.tracer(0)
# Paddle A
paddle_a = turtle.Turtle()
paddle_a.speed(0)
paddle_a.shape("square")
paddle_a.color("white")
paddle_a.shapesize(stretch_wid=5, stretch_len=1) # make paddle a rectangle
paddle_a.penup()
paddle_a.goto(-350, 0) # starting location of paddle on left side of screen
# Paddle B
paddle_b = turtle.Turtle()
paddle_b.speed(0)
paddle_b.shape("square")
paddle_b.color("white")
paddle_b.shapesize(stretch_wid=5, stretch_len=1)
paddle_b.penup()
paddle_b.goto(350, 0) # starting location of paddle on right side of screen
# Ball
ball = turtle.Turtle()
ball.speed(0)
ball.shape("square")
ball.color("white")
ball.penup()
ball.goto(0, 0) # ball starts in middle of screen
ball.dx = .33 # movement speed of the ball dx
ball.dy = .33 # movement speed of the ball dy
# Score Display
pen = turtle.Turtle()
pen.speed(0)
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Player 1: 0 Player 2: 0", align="center", font=("Courier", 24, "normal"))
# Start Tracking Scores
score_a = 0
score_b = 0
def paddle_a_up():
"""
This function takes in the current y-coordinate of paddle A
and then increases the position by 20 (AKA "go up")
"""
y = paddle_a.ycor()
y += 20
paddle_a.sety(y)
def paddle_a_down():
"""
This function takes in the current y-coordinate of paddle A
and then decreases the position down 20 (AKA "go down")
"""
y = paddle_a.ycor()
y -= 20
paddle_a.sety(y)
def paddle_b_up():
"""
This function takes in the current y-coordinate of paddle B
and then increases the position by 20 (AKA "go up")
"""
y = paddle_b.ycor()
y += 20
paddle_b.sety(y)
def paddle_b_down():
"""
This function takes in the current y-coordinate of paddle B
and then decreases the position by 20 (AKA "go down")
"""
y = paddle_b.ycor()
y -= 20
paddle_b.sety(y)
# Key bindings
wn.listen()
wn.onkeypress(paddle_a_up, "w")
wn.onkeypress(paddle_a_down, "s")
wn.onkeypress(paddle_b_up, "Up")
wn.onkeypress(paddle_b_down, "Down")
# Main game loop
while True:
wn.update()
# Ball movement
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Border checks
if ball.ycor() > 290:
ball.sety(290)
ball.dy *= -1 # reverse direction if ball is too high
if ball.ycor() < -290:
ball.sety(-290)
ball.dy *= -1 # reverse direction if ball is too low
# retart game when the ball passes a paddle
if ball.xcor() > 390:
ball.goto(0, 0)
ball.dx *= -1
score_a += 1
pen.clear() # clear score
pen.write("Player 1: {} Player 2: {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
if ball.xcor() < -390:
ball.goto(0, 0)
ball.dx *= -1
score_b += 1
pen.clear() # clear score
pen.write("Player 1: {} Player 2: {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
# Collisions
if (ball.xcor() > 340 and ball.xcor() < 350) and (ball.ycor() < paddle_b.ycor() + 40
and ball.ycor() > paddle_b.ycor() - 40):
ball.setx(340)
ball.dx *= -1
if (ball.xcor() < -340 and ball.xcor() > -350) and (ball.ycor() < paddle_a.ycor() + 40
and ball.ycor() > paddle_a.ycor() - 40):
ball.setx(-340)
ball.dx *= -1
| 26.280576 | 120 | 0.611005 |
3e7fe9149a1b5f7c3cd431d38f69f6e9b05ff08e
| 108 |
py
|
Python
|
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | 1 |
2022-01-14T17:12:44.000Z
|
2022-01-14T17:12:44.000Z
|
number = int(input())
counter = 1
while counter <= number:
print(counter)
counter = 2 * counter + 1
| 18 | 29 | 0.62963 |
3e82a49073596a4c986e7e70b3ddc02848ac39cb
| 385 |
py
|
Python
|
tests/fixtures.py
|
hdsr-mid/string_finder
|
64aa38afa562beddc897a0fcb84bf39e53b935fd
|
[
"MIT"
] | null | null | null |
tests/fixtures.py
|
hdsr-mid/string_finder
|
64aa38afa562beddc897a0fcb84bf39e53b935fd
|
[
"MIT"
] | null | null | null |
tests/fixtures.py
|
hdsr-mid/string_finder
|
64aa38afa562beddc897a0fcb84bf39e53b935fd
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from string_finder.constants import TEST_DATA_DIR
from typing import List
import pytest
| 25.666667 | 90 | 0.719481 |
3e83c39b04f2c10f748cc83b7509198a99b52216
| 1,432 |
py
|
Python
|
clean.py
|
glqstrauss/oopsgenie
|
d1984e332b11f972db2008867f1aba0917457b9b
|
[
"MIT"
] | 5 |
2020-01-02T21:15:31.000Z
|
2020-07-29T18:01:51.000Z
|
clean.py
|
glqstrauss/oopsgenie
|
d1984e332b11f972db2008867f1aba0917457b9b
|
[
"MIT"
] | 2 |
2020-01-07T15:36:44.000Z
|
2020-01-13T20:38:45.000Z
|
clean.py
|
glqstrauss/oopsgenie
|
d1984e332b11f972db2008867f1aba0917457b9b
|
[
"MIT"
] | 1 |
2020-07-29T17:10:32.000Z
|
2020-07-29T17:10:32.000Z
|
import csv
from utils import get_valid_colum_indices
| 34.926829 | 73 | 0.48743 |
3e869ea6160f40dc58804e7f852689a43590b0fc
| 516 |
py
|
Python
|
issues_list/migrations/0003_auto_20181106_1541.py
|
vmcggh18/bits_tracker
|
7c09aae321efb13979bed274d973c77319ce795e
|
[
"PostgreSQL"
] | null | null | null |
issues_list/migrations/0003_auto_20181106_1541.py
|
vmcggh18/bits_tracker
|
7c09aae321efb13979bed274d973c77319ce795e
|
[
"PostgreSQL"
] | 7 |
2020-06-05T19:50:41.000Z
|
2022-03-11T23:39:39.000Z
|
issues_list/migrations/0003_auto_20181106_1541.py
|
vmcggh18/bits_tracker
|
7c09aae321efb13979bed274d973c77319ce795e
|
[
"PostgreSQL"
] | 1 |
2019-02-19T15:30:16.000Z
|
2019-02-19T15:30:16.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-06 15:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
| 23.454545 | 66 | 0.649225 |
3e87d53a9a7c9e621189fb9905a20df232be3db0
| 6,203 |
py
|
Python
|
sentiment_analysis.py
|
bernardomelo/projeto-final
|
f0207144282b04e1b781604bf96d69634ca68ee8
|
[
"MIT"
] | null | null | null |
sentiment_analysis.py
|
bernardomelo/projeto-final
|
f0207144282b04e1b781604bf96d69634ca68ee8
|
[
"MIT"
] | null | null | null |
sentiment_analysis.py
|
bernardomelo/projeto-final
|
f0207144282b04e1b781604bf96d69634ca68ee8
|
[
"MIT"
] | null | null | null |
###############################################################################
# Univesidade Federal de Pernambuco -- UFPE (http://www.ufpe.br)
# Centro de Informatica -- CIn (http://www.cin.ufpe.br)
# Bacharelado em Sistemas de Informacao
# IF968 -- Programacao 1
#
# Autor: Bernardo Gomes de Melo
# Aluno de S.I do Cin
#
# Email: [email protected]
#
#
# Data: 2016-06-10
#
# Descricao: Este e' um modelo de arquivo para ser utilizado para a implementacao
# do projeto pratico da disciplina de Programacao 1.
# A descricao do projeto encontra-se no site da disciplina e trata-se
# de uma adaptacao do projeto disponivel em
# /t http://nifty.stanford.edu/2016/manley-urness-movie-review-sentiment/
# O objetivo deste projeto e' implementar um sistema de analise de
# sentimentos de comentarios de filmes postados no site Rotten Tomatoes.
#
# Licenca: The MIT License (MIT)
# Copyright(c) 2016 Bernardo Gomes de Melo, Aluno do CIn
#
###############################################################################
import sys
import re
def clean_up(s):
''' Retorna uma versao da string 's' na qual todas as letras sao
convertidas para minusculas e caracteres de pontuacao sao removidos
de ambos os extremos. A pontuacao presente no interior da string
e' mantida intacta.
'''
punctuation = ''''!"',;:.-?)([]<>*#\n\t\r'''
result = s.lower().strip(punctuation)
return result
def split_on_separators(original, separators):
''' Retorna um vetor de strings nao vazias obtido a partir da quebra
da string original em qualquer dos caracteres contidos em 'separators'.
'separtors' e' uma string formada com caracteres unicos a serem usados
como separadores. Por exemplo, '^$' e' uma string valida, indicando que
a string original sera quebrada em '^' e '$'.
'''
return filter(lambda x: x != '',re.split('[{0}]'.format(separators),original))
arq.close()
for x in words.keys():
words[x] = x[2]/x[1]
return words
def readTestSet(fname):
reviews = []
arq = open(fname,'r')
for line in arq:
reviews.append((int(line[0]),clean_up(line[1:-1])))
arq.close()
return reviews
def computeSentiment(review,words):
''' Retorna o sentimento do comentario recebido como parametro.
O sentimento de um comentario e' a media dos escores de suas
palavras. Se uma palavra nao estiver no conjunto de palavras do
conjunto de treinamento, entao seu escore e' 2.
Review e' a parte textual de um comentario.
Words e' o dicionario com as palavras e seus escores medios no conjunto
de treinamento.
'''
score = 0.0
count = 0
s = split_on_separators(review,' ')
for i in s:
palavra = clean_up(i)
for p in words.keys():
if p[0] == palavra:
score += p[2]
count += 1
else:
score += 2
count += 1
return score/count
def computeSumSquaredErrors(reviews,words):
''' Computa a soma dos quadrados dos erros dos comentarios recebidos
como parametro. O sentimento de um comentario e' obtido com a
funcao computeSentiment.
Reviews e' um vetor de pares (escore,texto)
Words e' um dicionario com as palavras e seus escores medios no conjunto
de treinamento.
'''
sse = 0
final = 0
for line in reviews:
sentiment = computeSentiment(line[1],words)
if line[0] != sentiment:
diference = float(line[0]) - sentiment
result = diference**2
final += result
sse = final / len(reviews)
return sse
def main():
# Os arquivos sao passados como argumentos da linha de comando para o programa
# Voce deve buscar mais informacoes sobre o funcionamento disso (e' parte do
# projeto).
# A ordem dos parametros e' a seguinte: o primeiro e' o nome do arquivo
# com o conjunto de treinamento, em seguida o arquivo do conjunto de teste.
if len(sys.argv) < 3:
print ('Numero invalido de argumentos')
print ('O programa deve ser executado como python sentiment_analysis.py <arq-treino> <arq-teste>')
sys.exit(0)
# Lendo conjunto de treinamento e computando escore das palavras
words = readTrainingSet(sys.argv[1])
# Lendo conjunto de teste
reviews = readTestSet(sys.argv[2])
# Inferindo sentimento e computando soma dos quadrados dos erros
sse = computeSumSquaredErrors(reviews,words)
print( 'A soma do quadrado dos erros e\': {0}'.format(sse))
if __name__ == '__main__':
main()
| 35.855491 | 107 | 0.563759 |
3e8a5b0b6fc0612db9638f1736e52adef498431d
| 37,129 |
py
|
Python
|
morm/db.py
|
neurobin/python-morm
|
2b6dcedc7090a9e642331300a24dfcca41ea1afe
|
[
"BSD-3-Clause"
] | 4 |
2021-03-12T16:36:24.000Z
|
2022-03-06T09:26:14.000Z
|
morm/db.py
|
neurobin/python-morm
|
2b6dcedc7090a9e642331300a24dfcca41ea1afe
|
[
"BSD-3-Clause"
] | null | null | null |
morm/db.py
|
neurobin/python-morm
|
2b6dcedc7090a9e642331300a24dfcca41ea1afe
|
[
"BSD-3-Clause"
] | null | null | null |
"""DB utilities.
"""
__author__ = 'Md Jahidul Hamid <[email protected]>'
__copyright__ = 'Copyright Md Jahidul Hamid <https://github.com/neurobin/>'
__license__ = '[BSD](http://www.opensource.org/licenses/bsd-license.php)'
__version__ = '0.1.0'
import collections
import re
import asyncio
import nest_asyncio # type: ignore
import atexit
import logging
import asyncpg # type: ignore
from asyncpg import Record, Connection # type: ignore
from typing import Optional, Dict, List, Tuple, TypeVar, Union, Any
from morm import exceptions
from morm.model import ModelType, Model, ModelBase, _FieldNames
from morm.q import Q
from morm.types import Void
LOGGER_NAME = 'morm.db-'
log = logging.getLogger(LOGGER_NAME)
nest_asyncio.apply()
def record_to_model(record: Record, model_class: ModelType) -> Model:
"""Convert a Record object to Model object.
Args:
record (Record): Record object.
model_class (ModelType): Model class
Returns:
Model: Model instance.
"""
new_record = model_class()
for k,v in record.items():
new_record.Meta._fromdb_.append(k)
setattr(new_record, k, v)
return new_record
def get_update_query(self, mob: ModelBase, reset=False) -> Tuple[str, List[Any]]:
"""Get the update query for the changed data in the model object (mob)
Args:
mob (ModelBase): Model object
reset (bool): If True, this method can be called just once to get the changes done on mob. Subsequent call will return empty query.
Raises:
AttributeError: If primary key does not exists i.e if not updatable
Returns:
str, args: tuple of query, args
"""
pkval = getattr(mob, mob.__class__._get_pk_()) #save method depends on it's AttributeError
data = mob.Meta._fields_
new_data_gen = mob.__class__._get_FieldValue_data_valid_(data, up=True)
colval = []
values = []
c = 0
for n,v in new_data_gen:
if n == mob.__class__._get_pk_(): continue
if v.value_change_count > 0:
c += 1
colval.append(f'"{n}"=${c}')
values.append(v.value)
if reset:
v.value_change_count = 0
colval_q = ', '.join(colval)
if colval_q:
where = f'"{mob.__class__._get_pk_()}"=${c+1}'
values.append(pkval)
query = f'UPDATE "{mob.__class__._get_db_table_()}" SET {colval_q} WHERE {where}'
else:
query = ''
return query, values
def get_delete_query(self, mob: ModelBase) -> Tuple[str, List[Any]]:
"""Get the delete query for the model object.
Args:
mob (ModelBase): model object.
Returns:
Tuple[str, List[Any]]: quey, args
"""
pkval = getattr(mob, mob.__class__._get_pk_())
query = f'DELETE FROM "{mob.__class__._get_db_table_()}" WHERE "{mob.__class__._get_pk_()}"=$1'
return query, [pkval]
def q(self, model: ModelType = None) -> 'ModelQuery':
"""Return a ModelQuery for model
If `None` is passed, it will give a `ModelQuery` without setting
`self.model` on the `ModelQuery` object.
Args:
model (ModelType, optional): model class. Defaults to None.
Raises:
TypeError: If invalid model type is passed
Returns:
ModelQuery: ModelQuery object
"""
return self(model)
def __call__(self, model: ModelType = None) -> 'ModelQuery':
"""Return a ModelQuery for model
If `None` is passed, it will give a `ModelQuery` without setting
`self.model` on the `ModelQuery` object.
Args:
model (ModelType, optional): model class. Defaults to None.
Raises:
TypeError: If invalid model type is passed
Returns:
ModelQuery: ModelQuery object
"""
if isinstance(model, ModelType) or model is None:
return ModelQuery(self, model)
raise TypeError(f"Invalid model: {model}. model must be of type {ModelType.__name__}. Make sure you did not pass a model object by mistake.")
class ModelQuery():
"""Query builder for model class.
Calling `db(Model)` gives you a model query handler which have several query methods to help you make queries.
Use `q(query, *args)` method to make queries with positional arguments. If you want named arguments, use the uderscored version of these methods. For example, `q(query, *args)` has an underscored version `q_(query, *args, **kwargs)` that can take named arguments.
You can add a long query part by part:
```python
from morm.db import DB
db = DB(DB_POOL) # get a db handle.
qh = db(User) # get a query handle.
query, args = qh.q(f'SELECT * FROM {qh.db_table}')\
.q(f'WHERE {qh.f.profession} = ${qh.c}', 'Teacher')\
.q_(f'AND {qh.f.age} = :age', age=30)\
.getq()
print(query, args)
# fetch:
await qh.fetch()
```
The `q` family of methods (`q, qc, qu etc..`) can be used to
build a query step by step. These methods can be chained
together to break down the query building in multiple steps.
Several properties are available to get information of the model
such as:
1. `qh.db_table`: Quoted table name e.g `"my_user_table"`.
2. `qh.pk`: Quoted primary key name e.g `"id"`.
3. `qh.ordering`: ordering e.g `"price" ASC, "quantity" DESC`.
4. `qh.f.<field_name>`: quoted field names e.g`"profession"`.
5. `qh.c`: Current available position for positional argument (Instead of hardcoded `$1`, `$2`, use `f'${qh.c}'`, `f'${qh.c+1}'`).
`qh.c` is a counter that gives an integer representing the
last existing argument position plus 1.
`reset()` can be called to reset the query to start a new.
To execute a query, you need to run one of the execution methods
: `fetch, fetchrow, fetchval, execute`.
**Notable convenience methods:**
* `qupdate(data)`: Initialize a update query for data
* `qfilter()`: Initialize a filter query upto WHERE clasue.
* `get(pkval)`: Get an item by primary key.
Args:
db (DB): DB object
model_class (ModelType): model
"""
def reset(self) -> 'ModelQuery':
"""Reset the model query by returning it to its initial state.
Returns:
self (Enables method chaining)
"""
self._query_str_queue: List[str] = []
self.end_query_str = ''
self.start_query_str = ''
self._args: List[Any] = []
self._arg_count = 0
self._named_args: Dict[str, Any] = {}
self._named_args_mapper: Dict[str, int] = {}
self.__filter_initiated = False
self._ordering = ''
self.__update_initiated = False
return self
def _process_positional_args(self, *args):
if args:
self._args.extend(args)
self._arg_count += len(args)
def q(self, q: str, *args: Any) -> 'ModelQuery':
"""Add raw query stub without parsing to check for keyword arguments
Use `$1`, `$2` etc. for arguments.
Use `self.c` (instance property, use fstring) to get the current
available argument position.
This is an efficient way to add query that do not have any
keyword arguments to handle, compared to `q_()` which checks for
keyword arguments everytime it is called.
Example:
```python
mq = db(SomeModel)
mq\
.q('SELECT * FROM "table" WHERE $1', True)\
.q('AND "price" >= $2', 33)\
.q(f'OR "price" = ${mq.c}', 0) # mq.c=3 (now)\
.q_('OR "status" = :status', status='OK')\
# :status is $4:
.q('OR "active" = $5', 0)\
.q_('AND "status" = :status')\
# status='OK' from previous call
.q('OR "price" = $2')\
# $2=33 from previous call
#using format string and mq.c to get the argument position:
.q(f'OR "price" > ${mq.c} OR "quantity" > ${mq.c+1}', 12, 3)
# mq.c=6 ^
```
Args:
q (str): raw query string
*args (Any): positional arguments
Returns:
ModelQuery: self, enables method chaining.
"""
self._process_positional_args(*args)
self._query_str_queue.append(q)
return self
def q_(self, q: str, *args, **kwargs) -> 'ModelQuery':
"""Add a query stub having keyword params.
Use the format `:field_name` for keyword parameter.
`:field_name` is converted to positional parameter (`$n`).
This method checks the query against all keyword arguments
that has been added so far with other `q*()` methods.
Args:
q (str): query string (SQL)
Returns:
ModelQuery: returns `self` to enable method chaining
"""
self._process_positional_args(*args)
q = self._process_keyword_args(q, **kwargs)
self._query_str_queue.append(q)
return self
def qq(self, word: str) -> 'ModelQuery':
"""Quote and add a word to the query.
Enable to add names with auto-quote. For example, if the name
for a field value is `status`, it can be added to the query
with auto-quoting, i.e for postgresql it will be added
as `"status"`.
Example:
```python
.qq('price').q('>= $1',34)
```
Args:
word (str): the word that needs to be added with quote.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
if word:
self._query_str_queue.append(Q(word))
return self
def qc(self, word: str, rest: str, *args) -> 'ModelQuery':
"""Add query by quoting `word` while adding the `rest` as is.
This is a shorthand for making where clause conditions.
For example: `qc('price', '>=$1', 34)` is a safe way to write
a where condition like: `"price" >=34`.
The same can be achieved by using a combination of
`qq()` and `q()` or manually quoting and using
with `q()`
Example:
```python
.qc('price', '>= $1', 34)
```
Args:
word (str): left part of query that needs to be quoted
rest (str): right part of query that does not need to be quoted
*args (any): args
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.qq(word).q(rest, *args)
def qc_(self, word: str, rest: str, *args, **kwargs) -> 'ModelQuery':
"""Add query by quoting `word` while adding the `rest` as is.
Same as `qc()` except this method parses the `rest` query string
for keyword params in the format: `:field_name`
Args:
word (str): left part of query that needs to be quoted
rest (str): right part of query that does not need to be quoted
*args (any): args
*kwargs: keyword args
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.qq(word).q_(rest, *args, **kwargs)
def qorder(self):
"""Add ORDER BY
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.q('ORDER BY')
def qo(self, order: str) -> 'ModelQuery':
"""Convert `+/-field_name,` to proper order_by criteria and add to query.
Example: `-field_name,` will become: `"field_name" DESC,`
* `+` at beginning means ascending order (default)
* `-` at beginning means descending order
* `,` at end means you will add more order criteria
Ommit the comma (`,`) when it is the last ordering criteria.
Args:
order (str): order criteria in the format `+/-field_name,`
Returns:
ModelQuery: returns `self` to enable method chaining
"""
direction = 'ASC'
if order.startswith('-'):
order = order[1:]
direction = 'DESC'
elif order.startswith('+'):
order = order[1:]
if order.endswith(','):
order = order[0:-1]
direction += ','
return self.qq(order).q(direction)
def qu(self, data: dict) -> 'ModelQuery':
"""Convert data to `"column"=$n` query with args as the
values and add to the main query.
The counter of positional arguments increases by the number of
items in `data`. Make use of `self.c` counter to add more
queries after using this method.
Args:
data (dict): data in format: `{'column': value}`
Returns:
ModelQuery: returns `self` to enable method chaining
"""
setq = ', '.join([f'"{c}"=${i}' for i,c in enumerate(data, self.c)])
return self.q(setq, *data.values())
def qreturning(self, *column_names) -> 'ModelQuery':
"""Convenience to add a `RETURNING` clause.
Args:
column_names: column names.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
q = '","'.join(column_names)
if q:
q = f'RETURNING "{q}"'
return self.q(q)
def qwhere(self) -> 'ModelQuery':
"""Convenience to add 'WHERE' to the main query.
Make use of `qc()` method to add conditions.
Returns:
ModelQuery: returns `self` to enable method chaining
"""
return self.q('WHERE')
def qfilter(self, no_ordering=False) -> 'ModelQuery':
"""Initiate a filter.
This initiates a `SELECT` query upto `WHERE`. You can then use the
`q()`, `qc()`, etc. methods to add conditions and finally
execute the `fetch()` method to get all results or execute the
`fetchrow()` method to get a single row.
Example:
```python
.qfilter().q('"price" >= $1 AND "status" = $2', 32.12, 'OK')
```
Args:
no_ordering (bool): Whether to remove the default ordering SQL. Defaults to False.
Returns:
ModelQuery: returns self to enable method chaining
"""
if not self.__filter_initiated:
down_fields = ','.join([Q(x) for x in self.model._get_fields_(up=False)]) #type: ignore
self.reset().q(f'SELECT {down_fields} FROM "{self.model._get_db_table_()}" WHERE') #type: ignore
self.__filter_initiated = True
order_by = self.ordering
if order_by and not no_ordering:
self.end_query_str = f'ORDER BY {order_by}'
else:
raise ValueError(f"Filter is already initiated for this {self.__class__.__name__} query object: {self}")
return self
def qupdate(self, data: dict) -> 'ModelQuery':
"""Initiate a UPDATE query for data.
This initiates an `UPDATE` query upto `WHERE` and leaves you to
add conditions with other methods such as `qc` or the generic
method `q()`.
Finally call the `execute()` method to execute the query or
call the `fetchval()` method if using `RETURNING` clause.
Args:
data (dict): data in key value dictionary
Returns:
ModelQuery: returns `self` to enable method chaining
"""
if not self.__update_initiated:
self.reset().q(f'UPDATE {self.db_table} SET').qu(data).qwhere()
self.__update_initiated = True
else:
raise ValueError(f"update is already initiated for this {self.__class__.__name__} query: {self}")
return self
def getq(self) -> Tuple[str, List[Any]]:
"""Return query string and arg list
Returns:
tuple: (str, list) : (query, args)
"""
query = ' '.join(self._query_str_queue)
self._query_str_queue = [query]
query = f'{self.start_query_str} {query} {self.end_query_str}'
return query, self._args
SERIALIZABLE = 'serializable'
REPEATABLE_READ = 'repeatable_read'
READ_COMMITTED = 'read_committed'
| 33.969808 | 267 | 0.577177 |
3e8b7eee7855784a75f5858aea2cd7099da89f3d
| 4,197 |
py
|
Python
|
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | 1 |
2019-01-26T22:33:02.000Z
|
2019-01-26T22:33:02.000Z
|
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | null | null | null |
gistsig/cli.py
|
derekmerck/check-hashes
|
aaa7d596281e41bbb5b73850c5d43113b7d0632b
|
[
"MIT"
] | null | null | null |
"""
gistsig
Derek Merck
Winter 2019
Sign and verify Python packages using public gists.
"""
import logging
from pprint import pformat
from datetime import datetime
import click
from . import get_gist, update_gist
from . import get_pkg_info, get_pkg_gist
def find_gist_id(pkg_name):
# Check the package
gist_id = get_pkg_gist(pkg_name)
if gist_id and \
click.confirm("No reference gist set, use package declared gist? ({})".format(gist_id)):
return gist_id
click.echo("No gist found for this package")
return None
def _cli():
cli.add_command(show)
cli.add_command(pull)
cli.add_command(verify)
cli.add_command(push)
cli(auto_envvar_prefix="GISTSIG", obj={})
if __name__ == "__main__":
_cli()
| 27.794702 | 99 | 0.646414 |
3e8c2e49f52c5a966e053c091e7e268d680d58d4
| 2,397 |
py
|
Python
|
cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py
|
mostafaelaraby/cvxpy
|
078e025be8b8315b5f579bd0209e8e3a1e2a2a19
|
[
"ECL-2.0",
"Apache-2.0"
] | 2 |
2021-09-24T12:59:45.000Z
|
2021-09-24T13:00:08.000Z
|
cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py
|
mostafaelaraby/cvxpy
|
078e025be8b8315b5f579bd0209e8e3a1e2a2a19
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cvxpy/reductions/solvers/conic_solvers/super_scs_conif.py
|
mostafaelaraby/cvxpy
|
078e025be8b8315b5f579bd0209e8e3a1e2a2a19
|
[
"ECL-2.0",
"Apache-2.0"
] | 1 |
2020-04-12T05:17:18.000Z
|
2020-04-12T05:17:18.000Z
|
"""
Copyright 2018 Riley Murray
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import cvxpy.settings as s
from cvxpy.reductions.solvers.conic_solvers.conic_solver import ConicSolver
from cvxpy.reductions.solvers.conic_solvers.scs_conif import dims_to_solver_dict, SCS
| 33.291667 | 88 | 0.635378 |
3e8fb96193b2244d64a924fa63c9c59dfafd9741
| 557 |
py
|
Python
|
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | 1 |
2022-01-26T07:38:11.000Z
|
2022-01-26T07:38:11.000Z
|
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | null | null | null |
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | null | null | null |
product_type = input("Enter the product type(coffee, water, coke, snacks): ")
quantity = int(input("Enter the quantity: "))
print(f"{price():.2f}")
| 30.944444 | 77 | 0.626571 |
e40f115d7100a36cb4b801ec2f9f1a7a1eb33d05
| 4,984 |
py
|
Python
|
linear_model.py
|
gavb222/flatpanel-localize
|
6504eb94379f5df268ae280f996c7dd66f063e4e
|
[
"MIT"
] | 1 |
2021-02-01T18:17:11.000Z
|
2021-02-01T18:17:11.000Z
|
linear_model.py
|
gavb222/flatpanel-localize
|
6504eb94379f5df268ae280f996c7dd66f063e4e
|
[
"MIT"
] | null | null | null |
linear_model.py
|
gavb222/flatpanel-localize
|
6504eb94379f5df268ae280f996c7dd66f063e4e
|
[
"MIT"
] | 1 |
2021-02-01T18:07:12.000Z
|
2021-02-01T18:07:12.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import time
import random
import matlab.engine
#panel_x, panel_y = panel dimensions
#n_freq = n frequency bins
#x, y = top left of gaussian
#spread = spread of the gaussian
#NB that x-spread > 0, y-spread > 0, x+spread < panel_x, y+spread < panel_y
model = Conv_Net(1,16,24)
model.cuda()
model.train()
loss_fn = nn.MSELoss
criterion = torch.optim.Adam(model.parameters(), lr = .0001, betas = (.5,.999))
keep_training = True
epoch_counter = 0
panel_x = 50
panel_y = 50
eng = matlab.engine.start_matlab()
#make a panel
driver_locations = torch.tensor((0.25, 0.25, 0.75, 0.75, 0.25, 0.75, 0.75, 0.25)).view(4,2)
Lx = 0.3
Ly = 0.5
while keep_training:
epoch_counter = epoch_counter + 1
time_start = time.time()
gt = torch.ones(panel_x,panel_y)*-1
model.zero_grad()
#random init starting conditions
while gt[0,0] == -1:
#returns -1 for invalid configuration
gt = produce_freq_response(panel_x,panel_y,1,random.randint(1,panel_x-1),random.randint(1,panel_y-1),random.randint(3,15))
coefs = model(gt.unsqueeze(0).unsqueeze(0).cuda())
print(coefs.size())
#very possible that the interpreter doesnt like torch tensors, might have to go numpy with this
response1, frequencies = eng.get_biquad_response(coefs[0,:].cpu().detach().numpy(),44100,nargout = 2)
response2, temp = eng.get_biquad_response(coefs[1,:].cpu().detach().numpy(),44100,nargout = 2)
response3, temp = eng.get_biquad_response(coefs[2,:].cpu().detach().numpy(),44100,nargout = 2)
response4, temp = eng.get_biquad_response(coefs[3,:].cpu().detach().numpy(),44100,nargout = 2)
responses = torch.stack((response1,response2,response3,response4),dim=-1)
matlab_panel = eng.Clamped_Panel[driver_locations,responses,frequencies,Lx,Ly]
matlab_out = eng.matlab_panel.view_total_scan(200,0)
loss = loss_fn(matlab_out,gt)
criterion.step()
print("holy moly!")
| 33.006623 | 131 | 0.647673 |
e40f68af3b51a18af4106a68a0e2666e5541b720
| 4,438 |
py
|
Python
|
client/client.py
|
s-ball/remo_serv
|
66accbd77183db0628a9618cf258656ec2d81316
|
[
"MIT"
] | null | null | null |
client/client.py
|
s-ball/remo_serv
|
66accbd77183db0628a9618cf258656ec2d81316
|
[
"MIT"
] | null | null | null |
client/client.py
|
s-ball/remo_serv
|
66accbd77183db0628a9618cf258656ec2d81316
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2020 SBA- MIT License
import getpass
import argparse
import sys
import cmd
import shlex
from urllib.error import HTTPError
from cryptography.hazmat.primitives import serialization
from client.clientlib import login, Connection
from client import smartcard
# noinspection PyArgumentList
if __name__ == '__main__':
run(sys.argv[1:])
| 30.190476 | 83 | 0.570077 |
e410307635af99e3b3cc52fdda648a0910806c95
| 1,867 |
py
|
Python
|
unfollower.py
|
Sam-F90/unfollower
|
feee9815f440d3a654f77a21ec84680ac92022c1
|
[
"MIT"
] | null | null | null |
unfollower.py
|
Sam-F90/unfollower
|
feee9815f440d3a654f77a21ec84680ac92022c1
|
[
"MIT"
] | null | null | null |
unfollower.py
|
Sam-F90/unfollower
|
feee9815f440d3a654f77a21ec84680ac92022c1
|
[
"MIT"
] | null | null | null |
import tweepy
import datetime
import os
# get keys from evironment variable "TWITTER_KEYS"
TWITTER_API_KEYS = (os.environ.get("TWITTER_KEYS").split(","))
consumer_key,consumer_secret,access_token_key,access_token_secret = TWITTER_API_KEYS
# Authenticate to Twitter
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token_key, access_token_secret)
# establish api
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
# verify
try:
api.verify_credentials()
except:
print("Error during authentication")
exit()
# get my id
me = api.me()
# get list of friends (id)
friends = api.friends_ids(me.id)
# get list of followers (id)
follower_ids = []
for follower in tweepy.Cursor(api.followers, me.screen_name).items(api.me().friends_count):
follower_ids.append(follower.id)
# get list of muted friends (id)
muted_friends = api.mutes_ids()
# create list of users who are muted and do not follow you
to_unfollow = []
for friend in friends:
if friend not in follower_ids and friend in muted_friends:
to_unfollow.append(friend)
# create log to record data and string to send to DM
log = [datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")]
dm = [datetime.datetime.now().strftime("%m-%d-%Y %H:%M:%S")]
# unfollow useres in to_unfollow[] and record them in log[] and dm[]
for user in to_unfollow:
# unfollowed = api.destroy_friendship(user)
unfollowed = api.get_user(user)
log.append('unfollowed ' + unfollowed.screen_name + " [" +str(unfollowed.friends_count) + "," + str(unfollowed.followers_count) + "]")
dm.append("@" + unfollowed.screen_name)
# write info to log
with open("unfollow_log.txt","a") as fp:
for line in log:
fp.write(line + "\n")
fp.write("\n")
api.send_direct_message(api.me().id,"\n".join(dm))
print("finished")
| 27.455882 | 139 | 0.719336 |
e4129e9fa1ffc789238869830a16a81f822bb51c
| 2,113 |
py
|
Python
|
alpha/NN/autoencoders/charlie.py
|
DanielBerns/keras-effective-adventure
|
d9bc8c08f769f0c07379d2a3756d040ca14239f2
|
[
"MIT"
] | null | null | null |
alpha/NN/autoencoders/charlie.py
|
DanielBerns/keras-effective-adventure
|
d9bc8c08f769f0c07379d2a3756d040ca14239f2
|
[
"MIT"
] | null | null | null |
alpha/NN/autoencoders/charlie.py
|
DanielBerns/keras-effective-adventure
|
d9bc8c08f769f0c07379d2a3756d040ca14239f2
|
[
"MIT"
] | null | null | null |
# https://medium.com/datadriveninvestor/deep-autoencoder-using-keras-b77cd3e8be95
from keras.datasets import mnist
from keras.layers import Input, Dense
from keras.models import Model
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
(X_train, _), (X_test, _) = mnist.load_data()
X_train = X_train.astype('float32')/255
X_test = X_test.astype('float32')/255
X_train = X_train.reshape(len(X_train), np.prod(X_train.shape[1:]))
X_test = X_test.reshape(len(X_test), np.prod(X_test.shape[1:]))
print(X_train.shape)
print(X_test.shape)
input_img= Input(shape=(784,))
encoded = Dense(units=128, activation='relu')(input_img)
encoded = Dense(units=64, activation='relu')(encoded)
encoded = Dense(units=32, activation='relu')(encoded)
decoded = Dense(units=64, activation='relu')(encoded)
decoded = Dense(units=128, activation='relu')(decoded)
decoded = Dense(units=784, activation='sigmoid')(decoded)
autoencoder=Model(input_img, decoded)
encoder = Model(input_img, encoded)
print('autoencoder')
autoencoder.summary()
print('encoder')
encoder.summary()
autoencoder.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
autoencoder.fit(X_train, X_train,
epochs=50,
batch_size=256,
shuffle=True,
validation_data=(X_test, X_test))
encoded_imgs = encoder.predict(X_test)
predicted = autoencoder.predict(X_test)
plt.figure(figsize=(40, 4))
for i in range(10):
# display original images
ax = plt.subplot(3, 20, i + 1)
plt.imshow(X_test[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display encoded images
ax = plt.subplot(3, 20, i + 1 + 20)
plt.imshow(encoded_imgs[i].reshape(8,4))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstructed images
ax = plt.subplot(3, 20, 2*20 +i+ 1)
plt.imshow(predicted[i].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
| 27.802632 | 87 | 0.69664 |
e41482448ad0c9a9ce2ec0102c5edc24cd4e69ff
| 11,339 |
py
|
Python
|
tests/test_views/test_memberships.py
|
freelancing-solutions/GCP-Based-Database-as-a-Service
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 1 |
2021-04-15T19:45:04.000Z
|
2021-04-15T19:45:04.000Z
|
tests/test_views/test_memberships.py
|
freelancing-solutions/pinydesk
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 516 |
2021-05-02T11:46:36.000Z
|
2022-03-29T06:09:49.000Z
|
tests/test_views/test_memberships.py
|
freelancing-solutions/pinydesk
|
7d6a12c33db238ca2f748bf4ddea6d2cf3c16da3
|
[
"MIT"
] | 1 |
2021-09-04T22:40:14.000Z
|
2021-09-04T22:40:14.000Z
|
import random
import typing
from datetime import datetime, timedelta
from random import randint
from google.cloud import ndb
from data_service.config.stocks import currency_symbols
from data_service.store.mixins import AmountMixin
from data_service.views.memberships import MembershipsView
from data_service.store.memberships import Memberships, MembershipPlans
from data_service.utils.utils import create_id
from .. import test_app
# noinspection PyUnresolvedReferences
from pytest import raises
# noinspection PyUnresolvedReferences
from pytest_mock import mocker
membership_mock_data: dict = {
"uid": create_id(),
"plan_id": create_id(),
"status": "unpaid",
"date_created": datetime.now(),
"plan_start_date": datetime.date(datetime.now() + timedelta(days=5))
}
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
# noinspection PyShadowingNames
| 45.175299 | 115 | 0.71038 |
e414c3ce91122f63e50497c6f5b8998f2cc88f9e
| 3,893 |
py
|
Python
|
padmini/prakarana/dvitva.py
|
sanskrit/padmini
|
8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0
|
[
"MIT"
] | 1 |
2022-03-01T05:05:04.000Z
|
2022-03-01T05:05:04.000Z
|
padmini/prakarana/dvitva.py
|
sanskrit/padmini
|
8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0
|
[
"MIT"
] | null | null | null |
padmini/prakarana/dvitva.py
|
sanskrit/padmini
|
8e7e8946a7d2df9c941f689ea4bc7b6ebb7ca1d0
|
[
"MIT"
] | null | null | null |
from padmini import filters as f
from padmini import operations as op
from padmini.constants import Tag as T
from padmini.sounds import s
from padmini.prakriya import Term, Prakriya
from padmini.term_views import TermView
from padmini.prakarana.utils import eka_ac
| 31.144 | 81 | 0.554585 |
e415ae5887fb3b1c6bbb5eae6bf773f7d423747e
| 1,152 |
py
|
Python
|
8kyu/a-wolf-in-sheeps-clothing/solution.py
|
Morioki/Code-Katas
|
65bffc0675d3c0f68c60706e95e38ab1dcfc8636
|
[
"MIT"
] | null | null | null |
8kyu/a-wolf-in-sheeps-clothing/solution.py
|
Morioki/Code-Katas
|
65bffc0675d3c0f68c60706e95e38ab1dcfc8636
|
[
"MIT"
] | null | null | null |
8kyu/a-wolf-in-sheeps-clothing/solution.py
|
Morioki/Code-Katas
|
65bffc0675d3c0f68c60706e95e38ab1dcfc8636
|
[
"MIT"
] | null | null | null |
import unittest
| 57.6 | 175 | 0.62934 |
e4160c8bd63d807a761f9c2eb1581d092fef5ff0
| 449 |
py
|
Python
|
modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | null | null | null |
modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | null | null | null |
modules/dbnd-airflow/src/dbnd_airflow/scheduler/dags/dbnd_dropin_scheduler.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | null | null | null |
import logging
logger = logging.getLogger("dbnd-scheduler")
try:
from dbnd_airflow.scheduler.scheduler_dags_provider import get_dags
# airflow will only scan files containing the text DAG or airflow. This comment performs this function
dags = get_dags()
if dags:
for dag in dags:
globals()[dag.dag_id] = dag
except Exception as e:
logging.exception("Failed to get dags form databand server")
raise e
| 24.944444 | 106 | 0.710468 |
e41914e68f6a31dadb107fe8bb9eaf841bed6173
| 4,268 |
py
|
Python
|
tanacompendium/utils/modelmanagers.py
|
nkoech/tanacompendium
|
b4fd81b23f2c8263735806765d93eb4a70be8aba
|
[
"MIT"
] | null | null | null |
tanacompendium/utils/modelmanagers.py
|
nkoech/tanacompendium
|
b4fd81b23f2c8263735806765d93eb4a70be8aba
|
[
"MIT"
] | null | null | null |
tanacompendium/utils/modelmanagers.py
|
nkoech/tanacompendium
|
b4fd81b23f2c8263735806765d93eb4a70be8aba
|
[
"MIT"
] | null | null | null |
import datetime
from django.contrib.contenttypes.models import ContentType
from django.db.models import FieldDoesNotExist
from django.db.models.base import ObjectDoesNotExist
def create_model_type(instance, model_type, key, slugify, **kwargs):
"""
Create object by model type
:param instance: Model manager instance
:param model_type: Content/model type
:param key: Primary key or slug
:param slugify: Boolean to indicate availability of a slug or primary key
:param kwargs: Fields to be created
:return: Data object
:rtype: Object
"""
model_qs = ContentType.objects.filter(model=model_type)
if model_qs.exists():
any_model = model_qs.first().model_class()
if slugify:
obj_qs = any_model.objects.filter(slug=key)
else:
obj_qs = any_model.objects.filter(pk=key)
if obj_qs.exists() and obj_qs.count() == 1:
field_values = {
'content_type': model_qs.first(),
'object_id': obj_qs.first().id
}
field_values.update(kwargs)
data_instance = instance.model(**field_values)
data_instance.save()
return data_instance
return None
def model_instance_filter(call_instance, current_instance, model_manager):
"""
Object query based on a model instance
:param call_instance: Instance of the model calling this method
:param current_instance: Instance of the model manager class this method would be called from
:param model_manager: The model manager class
:return: Object due to instantiation of the calling model class
:rtye: Object/record
"""
parent_obj = super(model_manager, current_instance)
content_type = ContentType.objects.get_for_model(call_instance.__class__)
try:
qs = parent_obj.filter(content_type=content_type, object_id=call_instance.id)
except parent_obj.DoesNotExist:
return None
return qs
def model_foreign_key_qs(call_instance, current_instance, model_manager):
"""
Object query based on foreign key
:param call_instance: Instance of the model calling this method
:param current_instance: Instance of the model manager class this method would be called from
:param model_manager: The model manager class
:return: Object query based on foreign key otherwise return none
:rtype: Object/record
"""
model_name = str(call_instance._meta.model_name) # Foreignkey name should be similar to related model name
qs_filter = {model_name: call_instance.id}
obj_qs = super(model_manager, current_instance).filter(**qs_filter)
return obj_qs
def model_type_filter(current_instance, obj_qs, model_manager):
"""
Object query based on a model class
:param current_instance: Instance of the model manager class this method would be called from
:param obj_qs: Initial object query
:param model_manager: The model manager class
:return: Object query based on the model type/class otherwise return none
:rtype: Object/record
"""
if obj_qs.exists():
if model_field_exists(obj_qs, 'content_type'):
for obj in obj_qs.iterator():
try:
qs = super(model_manager, current_instance).filter(content_type=obj.content_type) and obj_qs
return qs
except ObjectDoesNotExist:
return None
return obj_qs
def model_field_exists(instance, field_name):
"""
Check if field exists
:param instance: Instance of the model manager class this method would be called from
:param field_name: Field name to be checked
:return: True if field exists otherwise return false
:rtype: Boolean
"""
try:
instance.model._meta.get_field(field_name)
return True
except FieldDoesNotExist:
return False
def get_year_choices():
"""
Get years as model choices
:return: Years
"""
year_choice = []
for r in range(1950, (datetime.datetime.now().year + 1)):
year_choice.append((r, r))
return year_choice
def get_datetime_now():
"""
Get current year
:return: Curreent year
"""
return datetime.datetime.now().year
| 34.419355 | 112 | 0.684161 |
e4193bf7c1b3cd811dde985083067c06d301bbfb
| 2,588 |
py
|
Python
|
deletion_test.py
|
tjake/cassandra-dtest
|
df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3
|
[
"Apache-2.0"
] | null | null | null |
deletion_test.py
|
tjake/cassandra-dtest
|
df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3
|
[
"Apache-2.0"
] | null | null | null |
deletion_test.py
|
tjake/cassandra-dtest
|
df49e4f16b2ed8b9c38f767fffd796ae3d9cc6f3
|
[
"Apache-2.0"
] | null | null | null |
from dtest import Tester
import os, sys, time
from ccmlib.cluster import Cluster
from tools import require, since
from jmxutils import make_mbean, JolokiaAgent
| 34.052632 | 91 | 0.632921 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.