hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
53c79195c421ab20eafd11d18287a51c1a99fb79
| 779 |
py
|
Python
|
python_minecraft_tut_2021/weatherCraft.py
|
LeGamermc/ursina_tutorials
|
f0ad518be3a02cdb52f27c87f2f70817b4d0e8b0
|
[
"MIT"
] | 13 |
2021-09-01T01:38:13.000Z
|
2022-03-29T01:43:50.000Z
|
python_minecraft_tut_2021/weatherCraft.py
|
LeGamermc/ursina_tutorials
|
f0ad518be3a02cdb52f27c87f2f70817b4d0e8b0
|
[
"MIT"
] | 14 |
2021-08-01T05:00:22.000Z
|
2022-02-03T21:53:23.000Z
|
python_minecraft_tut_2021/weatherCraft.py
|
LeGamermc/ursina_tutorials
|
f0ad518be3a02cdb52f27c87f2f70817b4d0e8b0
|
[
"MIT"
] | 31 |
2021-08-09T04:08:11.000Z
|
2022-03-23T11:06:15.000Z
|
"""
Weather functions.
"""
from ursina import color, window, time
from nMap import nMap
| 22.911765 | 50 | 0.519897 |
53c796e3204469330950f66fd76505dd80903be6
| 8,086 |
py
|
Python
|
davenetgame/dispatch/dispatcher.py
|
davefancella/davenetgame
|
f16c36539a3898ab4a021e63feef7fe497e5bc69
|
[
"Apache-2.0"
] | null | null | null |
davenetgame/dispatch/dispatcher.py
|
davefancella/davenetgame
|
f16c36539a3898ab4a021e63feef7fe497e5bc69
|
[
"Apache-2.0"
] | null | null | null |
davenetgame/dispatch/dispatcher.py
|
davefancella/davenetgame
|
f16c36539a3898ab4a021e63feef7fe497e5bc69
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
'''
Copyright 2016 Dave Fancella
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import threading, time
from davenetgame.dispatch.base import DispatcherBase
from davenetgame.protocol import connection
## @file dispatcher
#
# This file contains the standard, generic EventDispatcher class. It's the one you use if
# the library doesn't support your preferred game engine, or if you'd rather manage the library
# independently of your game engine.
## This is the standard EventDispatcher.
## This is a special server-oriented EventDispatcher that provides for an interactive console
# on the server when run in a terminal. This is probably most useful for testing the library,
# though it's not unheard of for a server to run in a terminal and have a console.
## This class implements console commands. To create a new console command, simply make an instance of
# this class, giving all the keyword arguments in the constructor.
# @param 'command' : the name of the command, what the user types to use it.
# @param 'callback' : a function that will process the command when the user types it.
# @param 'helpshort' : short help text, usually one line of text, preferably not more than 50 characters.
# In output, it will be prepended with "Usage: "
# @param 'helplong' : long help text, can be as long as needed, as many lines as needed. Do not put
# line endings, however. Those will be added as needed. You may put line endings to
# signify paragraph breaks, if need be.
## This class makes the console input non-blocking.
| 34.703863 | 157 | 0.589661 |
53c80402ffddb5cb55023d530bbbc0ac778cca90
| 416 |
py
|
Python
|
account/migrations/0003_customuser_phone_number.py
|
zenofewords/thebrushstash
|
7d53bd5f22a2daa1011bb502bce56e735504dc84
|
[
"MIT"
] | null | null | null |
account/migrations/0003_customuser_phone_number.py
|
zenofewords/thebrushstash
|
7d53bd5f22a2daa1011bb502bce56e735504dc84
|
[
"MIT"
] | 18 |
2019-12-05T07:27:52.000Z
|
2022-02-12T20:50:22.000Z
|
account/migrations/0003_customuser_phone_number.py
|
zenofewords/thebrushstash
|
7d53bd5f22a2daa1011bb502bce56e735504dc84
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.7 on 2019-11-17 17:19
from django.db import migrations, models
| 21.894737 | 63 | 0.620192 |
53c8f59b4f5c675f0331d7886d8de3f13a17f272
| 322 |
py
|
Python
|
03_Estrutura_de_Repeticao/13_potenciacao.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | null | null | null |
03_Estrutura_de_Repeticao/13_potenciacao.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | 10 |
2020-08-19T04:31:52.000Z
|
2020-09-21T22:48:29.000Z
|
03_Estrutura_de_Repeticao/13_potenciacao.py
|
gabrieldcpadilha/ListaDeExercicios-PythonBrasil
|
a92d477468bde5eac8987a26ea79af2ffeb6ad81
|
[
"MIT"
] | null | null | null |
base = int(input('Digite o valor da base: '))
expoente = 0
while expoente <= 0:
expoente = int(input('Digite o valor do expoente: '))
if expoente <= 0:
print('O expoente tem que ser positivo')
potencia = 1
for c in range(1, expoente + 1):
potencia *= base
print(f'{base}^ {expoente} = {potencia}')
| 21.466667 | 57 | 0.624224 |
53cb133ef9cebb74671b9c48466b895d83fd6371
| 1,313 |
py
|
Python
|
accounting/accounting/doctype/journal_entry/journal_entry.py
|
noahjacob/Accounting
|
6be90c4f82867156532ca71b1faa9d017e3269af
|
[
"MIT"
] | 1 |
2021-04-05T06:22:16.000Z
|
2021-04-05T06:22:16.000Z
|
accounting/accounting/doctype/journal_entry/journal_entry.py
|
mohsinalimat/Accounting
|
6be90c4f82867156532ca71b1faa9d017e3269af
|
[
"MIT"
] | null | null | null |
accounting/accounting/doctype/journal_entry/journal_entry.py
|
mohsinalimat/Accounting
|
6be90c4f82867156532ca71b1faa9d017e3269af
|
[
"MIT"
] | 2 |
2021-04-05T06:22:17.000Z
|
2021-04-10T06:05:36.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Noah Jacob and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.utils import flt
from accounting.accounting.general_ledger import make_gl_entry, make_reverse_gl_entry
| 29.840909 | 113 | 0.760853 |
53cba4400da1d1c4d684c06ae9715e48948281c2
| 568 |
py
|
Python
|
polls/models.py
|
mmeooo/test_django
|
0364f43549d4082df7100d11c67dd42dc2a82b32
|
[
"Apache-2.0"
] | null | null | null |
polls/models.py
|
mmeooo/test_django
|
0364f43549d4082df7100d11c67dd42dc2a82b32
|
[
"Apache-2.0"
] | null | null | null |
polls/models.py
|
mmeooo/test_django
|
0364f43549d4082df7100d11c67dd42dc2a82b32
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
# :
# 2 ok
# link, string-> CharField, data-> DecimalField
# max_length= 100
| 33.411765 | 71 | 0.746479 |
53ccd38a42372cb4c8b6646892db6cc4fe7a6bd1
| 722 |
py
|
Python
|
ipcam/test_snap.py
|
jack139/HF
|
4810f4ee2faf9ab51c867e105addc139da2adfd1
|
[
"BSD-3-Clause"
] | 10 |
2019-04-07T20:13:23.000Z
|
2021-12-07T06:23:52.000Z
|
ipcam/test_snap.py
|
jack139/HF
|
4810f4ee2faf9ab51c867e105addc139da2adfd1
|
[
"BSD-3-Clause"
] | 1 |
2020-05-29T16:11:22.000Z
|
2020-05-29T16:11:22.000Z
|
ipcam/test_snap.py
|
jack139/HF
|
4810f4ee2faf9ab51c867e105addc139da2adfd1
|
[
"BSD-3-Clause"
] | 6 |
2017-10-20T10:53:33.000Z
|
2020-04-24T06:34:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys,os,time
if len(sys.argv)<2:
print "usage: test_snap.py <check|show>"
sys.exit(2)
kam_cmd=sys.argv[1]
path='/var/data2/snap_store'
a=os.listdir(path)
a.remove('535e1a5c1ecffb2fa372fd7d') # this is a camera not used in HF system
if kam_cmd=='show' or kam_cmd=='check':
last_sub=int(time.time()/600)
for i in a:
sub='%s/%s' % (path, i)
b=os.listdir(sub)
if 'capture' in b:
b.remove('capture')
b.sort()
sub2='%s/%s' % (sub, b[-1])
c=os.listdir(sub2)
if kam_cmd=='show' or last_sub-int(b[-1])>3:
print "%s - %d, %s - %d, (%d)" % (i, len(b), b[-1], len(c), last_sub-int(b[-1]))
else:
print "usage: test_snap.py <check|show>"
sys.exit(2)
| 21.878788 | 83 | 0.613573 |
53cd4bfd1a117d3dcaa2d01161d38a59434bcf2f
| 5,608 |
py
|
Python
|
sources/datasets/client_dataset_definitions/client_dataset.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
sources/datasets/client_dataset_definitions/client_dataset.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
sources/datasets/client_dataset_definitions/client_dataset.py
|
M4rukku/impact_of_non_iid_data_in_federated_learning
|
c818db03699c82e42217d56f8ddd4cc2081c8bb1
|
[
"MIT"
] | null | null | null |
import functools
import gc
from abc import ABC
from sources.datasets.client_dataset_definitions.client_dataset_loaders.client_dataset_loader import ClientDatasetLoader, DatasetComponents
from sources.datasets.client_dataset_definitions.client_dataset_processors.client_dataset_processor import ClientDatasetProcessor
from sources.utils.exception_definitions import OutsideOfContextError
| 38.675862 | 139 | 0.652461 |
53ce7501b9e972d2df63aa7b92834c10ac73f623
| 2,377 |
py
|
Python
|
src/rmt/kinematics.py
|
mfrigerio17/robot-model-tools
|
97e25d5c4d1386c503d37a70b57400022c5b7ca0
|
[
"BSD-3-Clause"
] | 2 |
2020-06-16T09:23:46.000Z
|
2021-01-20T09:11:43.000Z
|
src/rmt/kinematics.py
|
mfrigerio17/robot-model-tools
|
97e25d5c4d1386c503d37a70b57400022c5b7ca0
|
[
"BSD-3-Clause"
] | null | null | null |
src/rmt/kinematics.py
|
mfrigerio17/robot-model-tools
|
97e25d5c4d1386c503d37a70b57400022c5b7ca0
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import numpy
import kgprim.motions as motions
import kgprim.ct.frommotions as frommotions
import kgprim.ct.repr.mxrepr as mxrepr
import motiondsl.motiondsl as motdsl
logger = logging.getLogger(__name__)
| 34.955882 | 119 | 0.738746 |
53cfe05a29410444b4904c98e9ea7e4826833ee4
| 4,702 |
py
|
Python
|
awx/main/management/commands/run_dispatcher.py
|
atr0s/awx
|
388ef077c384f4c5296d4870d3b0cf0e6718db80
|
[
"Apache-2.0"
] | null | null | null |
awx/main/management/commands/run_dispatcher.py
|
atr0s/awx
|
388ef077c384f4c5296d4870d3b0cf0e6718db80
|
[
"Apache-2.0"
] | null | null | null |
awx/main/management/commands/run_dispatcher.py
|
atr0s/awx
|
388ef077c384f4c5296d4870d3b0cf0e6718db80
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import os
import logging
from multiprocessing import Process
from django.conf import settings
from django.core.cache import cache as django_cache
from django.core.management.base import BaseCommand
from django.db import connection as django_connection
from kombu import Connection, Exchange, Queue
from awx.main.dispatch import get_local_queuename, reaper
from awx.main.dispatch.control import Control
from awx.main.dispatch.pool import AutoscalePool
from awx.main.dispatch.worker import AWXConsumer, TaskWorker
logger = logging.getLogger('awx.main.dispatch')
| 37.616 | 96 | 0.576989 |
53d0271d7e3d9c0d0f41f088e5b38f2630dec774
| 5,318 |
py
|
Python
|
pcdet/utils/box_coder_utils.py
|
Nuri-benbarka/PCDet
|
8da66ead3bb1120db2fa919187948c8c134e85ae
|
[
"Apache-2.0"
] | 7 |
2020-11-28T03:38:51.000Z
|
2021-12-31T07:44:19.000Z
|
pcdet/utils/box_coder_utils.py
|
Nuri-benbarka/PCDet
|
8da66ead3bb1120db2fa919187948c8c134e85ae
|
[
"Apache-2.0"
] | null | null | null |
pcdet/utils/box_coder_utils.py
|
Nuri-benbarka/PCDet
|
8da66ead3bb1120db2fa919187948c8c134e85ae
|
[
"Apache-2.0"
] | 1 |
2021-04-01T15:54:21.000Z
|
2021-04-01T15:54:21.000Z
|
import numpy as np
import torch
from . import common_utils
if __name__ == '__main__':
pass
| 35.691275 | 118 | 0.507334 |
53d12a0522be9c1f94c8076c489fd23a012f880f
| 15,175 |
py
|
Python
|
utils/utils.py
|
jainajinkya/deep_bingham
|
2ea85b3ea2af579eab36567091b88a1bbf4a627b
|
[
"MIT"
] | null | null | null |
utils/utils.py
|
jainajinkya/deep_bingham
|
2ea85b3ea2af579eab36567091b88a1bbf4a627b
|
[
"MIT"
] | null | null | null |
utils/utils.py
|
jainajinkya/deep_bingham
|
2ea85b3ea2af579eab36567091b88a1bbf4a627b
|
[
"MIT"
] | null | null | null |
""" Utilities for learning pipeline."""
from __future__ import print_function
import copy
import dill
import hashlib
import itertools
import third_party.deep_bingham.bingham_distribution as ms
import math
import numpy as np
import os
import scipy
import scipy.integrate as integrate
import scipy.special
import sys
import torch
from pathos.multiprocessing import ProcessingPool as Pool
from pathos.multiprocessing import cpu_count
def convert_euler_to_quaternion(roll, yaw, pitch):
"""Converts roll, yaw, pitch to a quaternion.
"""
# roll (z), yaw (y), pitch (x)
cy = math.cos(math.radians(roll) * 0.5)
sy = math.sin(math.radians(roll) * 0.5)
cp = math.cos(math.radians(yaw) * 0.5)
sp = math.sin(math.radians(yaw) * 0.5)
cr = math.cos(math.radians(pitch) * 0.5)
sr = math.sin(math.radians(pitch) * 0.5)
w = cy * cp * cr + sy * sp * sr
x = cy * cp * sr - sy * sp * cr
y = sy * cp * sr + cy * sp * cr
z = sy * cp * cr - cy * sp * sr
quat = np.array([w, x, y, z])
quat = quat / np.linalg.norm(quat)
return quat
def radians(degree_tensor):
"""
Method to convert a torch tensor of angles in degree format to radians.
Arguments:
degree_tensor (torch.Tensor): Tensor consisting of angles in degree format.
Returns:
radian_tensor (torch.Tensor): Tensor consisting of angles in radian format.
"""
radian_tensor = degree_tensor/180 * math.pi
return radian_tensor
def generate_coordinates(coords):
"""
A function that returns all possible triples of coords
Parameters:
coords: a numpy array of coordinates
Returns:
x: the first coordinate of possible triples
y: the second coordinate of possible triples
z the third coordinate of possible triples
"""
x = coords.reshape(-1, 1).repeat(1, len(coords) * len(coords)).flatten()
y = coords.reshape(-1, 1).repeat(1, len(coords)).flatten().repeat(len(coords))
z = coords.reshape(-1, 1).flatten().repeat(len(coords)*len(coords))
return x, y, z
def ensure_dir_exists(path):
""" Checks if a directory exists and creates it otherwise. """
if not os.path.exists(path):
os.makedirs(path)
def load_lookup_table(path):
"""
Loads lookup table from dill serialized file.
Returns a table specific tuple. For the Bingham case, the tuple containins:
table_type (str):
options (dict): The options used to generate the lookup table.
res_tensor (numpy.ndarray): The actual lookup table data.
coords (numpy.ndarray): Coordinates at which lookup table was evaluated.
For the von Mises case, it contains:
options (dict): The options used to generate the lookup table.
res_tensor (numpy.ndarray): The actual lookup table data.
"""
assert os.path.exists(path), "Lookup table file not found."
with open(path, "rb") as dillfile:
return dill.load(dillfile)
def eaad_von_mises(kappas, integral_options=None):
""" Expected Absolute Angular Deviation of Bingham Random Vector
Arguments:
kappas: Von Mises kappa parameters for roll, pitch, yaw.
integral_options: Options to pass on to the scipy integrator for
computing the eaad and the bingham normalization constant.
"""
if integral_options is None:
integral_options = {"epsrel": 1e-2, "epsabs": 1e-2}
param_mu = np.array([0., 0., 0.]) # radians
quat_mu = convert_euler_to_quaternion(
math.degrees(param_mu[0]), math.degrees(param_mu[1]),
math.degrees(param_mu[2])
)
param_kappa = kappas
direct_norm_const = 8.0 * (np.pi ** 3) \
* scipy.special.iv(0, param_kappa[0]) \
* scipy.special.iv(0, param_kappa[1]) \
* scipy.special.iv(0, param_kappa[2])
eaad_int = integrate.tplquad(
integrand_aad,
0.0, 2.0 * np.pi, # phi3
lambda x: 0.0, lambda x: 2. * np.pi, # phi2
lambda x, y: 0.0, lambda x, y: 2. * np.pi, # phi1
**integral_options
)
return eaad_int[0]/direct_norm_const
def eaad_bingham(bingham_z, integral_options=None):
""" Expected Absolute Angular Deviation of Bingham Random Vector
Arguments:
bingham_z: Bingham dispersion parameter in the format expected by the
manstats BinghamDistribution class.
integral_options: Options to pass on to the scipy integrator for
computing the eaad and the bingham normalization constant.
"""
if integral_options is None:
integral_options = {"epsrel": 1e-4, "epsabs": 1e-4}
bd = ms.BinghamDistribution(
np.eye(4), bingham_z,
{"norm_const_mode": "numerical",
"norm_const_options": integral_options}
)
eaad_int = integrate.tplquad(
integrand,
0.0, 2.0 * np.pi, # phi3
lambda x: 0.0, lambda x: np.pi, # phi2
lambda x, y: 0.0, lambda x, y: np.pi, # phi1
**integral_options
)
return eaad_int[0] / bd.norm_const
def build_bd_lookup_table(table_type, options, path=None):
"""
Builds a lookup table for interpolating the bingham normalization
constant. If a lookup table with the given options already exists, it is
loaded and returned instead of building a new one.
Arguments:
table_type: Type of lookup table used. May be 'uniform' or 'nonuniform'
options: Dict cotaining type specific options.
If type is "uniform" this dict must contain:
"bounds" = Tuple (lower_bound, upper_bound) representing bounds.
"num_points" = Number of points per dimension.
If type is "nonuniform" this dict must contain a key "coords" which
is a numpy arrays representing the coordinates at which the
interpolation is evaluated.
path: absolute path for the lookup table (optional). The default is to
create a hash based on the options and to use this for constructing
a file name and placing the file in the precomputed folder.
"""
hash_obj = hashlib.sha256()
hash_obj.update(table_type.encode('utf-8'))
hash_obj.update(dill.dumps(options))
config_hash = hash_obj.hexdigest()
if not path:
path = os.path.dirname(__file__) \
+ "/../precomputed/lookup_{}.dill".format(config_hash)
# Load existing table or create new one.
if os.path.exists(path):
with open(path, "rb") as dillfile:
(serialized_type, serialized_options, res_table, coords) \
= dill.load(dillfile)
hash_obj = hashlib.sha256()
hash_obj.update(serialized_type)
hash_obj.update(dill.dumps(serialized_options))
file_config_hash = hash_obj.hexdigest()
assert file_config_hash == config_hash, \
"Serialized lookup table does not match given type & options."
elif table_type == "uniform":
# Number of points per axis.
(lbound, rbound) = options["bounds"]
num_points = options["num_points"]
assert num_points > 1, \
"Grid must have more than one point per dimension."
nc_options = {"epsrel": 1e-3, "epsabs": 1e-7}
coords = np.linspace(lbound, rbound, num_points)
res_table = _compute_bd_lookup_table(coords, nc_options)
with open(path, "wb") as dillfile:
dill.dump((table_type, options, res_table, coords), dillfile)
elif table_type == "nonuniform":
nc_options = {"epsrel": 1e-3, "epsabs": 1e-7}
coords = options["coords"]
res_table = _compute_bd_lookup_table(coords, nc_options)
with open(path, "wb") as dillfile:
dill.dump((table_type, options, res_table, coords), dillfile)
else:
sys.exit("Unknown lookup table type")
return res_table
def build_vm_lookup_table(options, path=None):
"""
Builds a lookup table for interpolating the bingham normalization
constant. If a lookup table with the given options already exists, it is
loaded and returned instead of building a new one.
Arguments:
options: Dict cotaining table options. It must contain a key "coords"
which is a numpy arrays representing the coordinates at which the
interpolation is evaluated.
path: absolute path for the lookup table (optional). The default is to
create a hash based on the options and to use this for constructing
a file name and placing the file in the precomputed folder.
"""
hash_obj = hashlib.sha256()
hash_obj.update(dill.dumps(options))
config_hash = hash_obj.hexdigest()
if not path:
path = os.path.dirname(__file__) \
+ "/../precomputed/lookup_{}.dill".format(config_hash)
# Load existing table or create new one.
if os.path.exists(path):
with open(path, "rb") as dillfile:
(serialized_options, res_table) \
= dill.load(dillfile)
hash_obj = hashlib.sha256()
hash_obj.update(dill.dumps(serialized_options))
file_config_hash = hash_obj.hexdigest()
assert file_config_hash == config_hash, \
"Serialized lookup table does not match given type & options."
else:
coords = options["coords"]
res_table = _compute_vm_lookup_table(coords)
with open(path, "wb") as dillfile:
dill.dump((options, res_table), dillfile)
return res_table
| 34.805046 | 83 | 0.623394 |
53d21a61b1f0af656cef94761b86e69e5114d1b2
| 8,108 |
py
|
Python
|
cli_ui.py
|
obatsis/Distributed-NTUA
|
0bf39163b64aaefb2576be01337e0ec6e026ce6d
|
[
"MIT"
] | null | null | null |
cli_ui.py
|
obatsis/Distributed-NTUA
|
0bf39163b64aaefb2576be01337e0ec6e026ce6d
|
[
"MIT"
] | null | null | null |
cli_ui.py
|
obatsis/Distributed-NTUA
|
0bf39163b64aaefb2576be01337e0ec6e026ce6d
|
[
"MIT"
] | null | null | null |
import requests
import os
from PyInquirer import style_from_dict, Token, prompt
import sys
import utils.config as config
import utils.ends as ends
from utils.colorfy import *
from auto.testing import test_trans
import time
import json
style = style_from_dict({
Token.QuestionMark: '#E91E63 bold',
Token.Selected: '#673AB7 bold',
Token.Instruction: '#0bf416',
Token.Answer: '#2196f3 bold',
Token.Question: '#0bf416 bold',
})
if __name__ == '__main__':
if len(sys.argv) < 3:
print("!! you must tell me the port. Ex. -p 5000 !!")
exit(0)
if sys.argv[1] in ("-p", "-P"):
my_port = sys.argv[2]
my_ip = os.popen('ip addr show ' + config.NETIFACE + ' | grep "\<inet\>" | awk \'{ print $2 }\' | awk -F "/" \'{ print $1 }\'').read().strip()
client(my_ip, my_port)
| 34.21097 | 170 | 0.604465 |
53d38a232396aeecc14c7708fa90954da15a7129
| 21,306 |
py
|
Python
|
Contents/scripts/siweighteditor/weight.py
|
jdrese/SIWeightEditor
|
0529c1a366b955f4373acd2e2f08f63b7909ff82
|
[
"MIT"
] | 1 |
2018-12-12T15:39:13.000Z
|
2018-12-12T15:39:13.000Z
|
Contents/scripts/siweighteditor/weight.py
|
jdrese/SIWeightEditor
|
0529c1a366b955f4373acd2e2f08f63b7909ff82
|
[
"MIT"
] | null | null | null |
Contents/scripts/siweighteditor/weight.py
|
jdrese/SIWeightEditor
|
0529c1a366b955f4373acd2e2f08f63b7909ff82
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from maya import mel
from maya import cmds
from . import lang
from . import common
import os
import json
import re
def transfer_weight(skinMesh, transferedMesh, transferWeight=True, returnInfluences=False, logTransfer=True):
'''
skinMesh1,
transferedMesh(,)
transferWeightTrue
logTransfer
returnInfluencesFalse
'''
massege01 = lang.Lang(
en=': It does not perform the transfer of weight because it is not a skin mesh.',
ja=u': '
).output()
massege02 = lang.Lang(
en='Transfer the weight:',
ja=u':'
).output()
massege03 = lang.Lang(
en='Transfer bind influences:',
ja=u':'
).output()
if isinstance(skinMesh, list): #
skinMesh = skinMesh[0] #
# #inMeshSkinCluster
srcSkinCluster = cmds.ls(cmds.listHistory(skinMesh), type='skinCluster')
# srcSkinCluster = cmds.listConnections(skinMesh+'.inMesh', s=True, d=False)
if not srcSkinCluster:
if logTransfer:
print skinMesh + massege01
return False #
#
srcSkinCluster = srcSkinCluster[0]
skinningMethod = cmds.getAttr(srcSkinCluster + ' .skm')
dropoffRate = cmds.getAttr(srcSkinCluster + ' .dr')
maintainMaxInfluences = cmds.getAttr(srcSkinCluster + ' .mmi')
maxInfluences = cmds.getAttr(srcSkinCluster + ' .mi')
bindMethod = cmds.getAttr(srcSkinCluster + ' .bm')
normalizeWeights = cmds.getAttr(srcSkinCluster + ' .nw')
influences = cmds.skinCluster(srcSkinCluster, q=True, inf=True) # qe
#
if not isinstance(transferedMesh, list):
temp = transferedMesh
transferedMesh = []
transferedMesh.append(temp)
for dst in transferedMesh:
#
dummy = common.TemporaryReparent().main(mode='create')
common.TemporaryReparent().main(dst,dummyParent=dummy, mode='cut')
shapes = cmds.listRelatives(dst, s=True, pa=True, type='mesh')
if not shapes: #
continue #
#
dstSkinCluster = cmds.ls(cmds.listHistory(shapes[0]), type='skinCluster')
#
if not dstSkinCluster:
#
dstSkinCluster = cmds.skinCluster(
dst,
influences,
omi=maintainMaxInfluences,
mi=maxInfluences,
dr=dropoffRate,
sm=skinningMethod,
nw=normalizeWeights,
tsb=True,
)
if logTransfer:
print massege03 + '[' + skinMesh + '] >>> [' + dst + ']'
dstSkinCluster = dstSkinCluster[0]
if transferWeight:
cmds.copySkinWeights(
ss=srcSkinCluster,
ds=dstSkinCluster,
surfaceAssociation='closestPoint',
influenceAssociation=['name', 'closestJoint', 'oneToOne'],
normalize=True,
noMirror=True
)
if logTransfer:
print massege02 + '[' + skinMesh + '] >>> [' + dst + ']'
#
common.TemporaryReparent().main(dst,dummyParent=dummy, mode='parent')
#
common.TemporaryReparent().main(dummyParent=dummy, mode='delete')
if returnInfluences:
return influences
else:
return True
def symmetry_weight(srcNode=None, dstNode=None, symWeight=True):
'''
srcNode
dstNode
symWeight
'''
#
if srcNode is None:
return
srcShapes = cmds.listRelatives(srcNode, s=True, pa=True, type='mesh')
if srcShapes:
srcSkinCluster = cmds.ls(cmds.listHistory(srcNode), type='skinCluster')
#
if srcSkinCluster:
#
skinJointAll = cmds.skinCluster(srcSkinCluster, q=True, inf=True) #
for skinJoint in skinJointAll:
#
joint_label(skinJoint, visibility=False)
if symWeight is False or dstNode is None:
return
transfer_weight(srcNode, dstNode, transferWeight=False, returnInfluences=True)
dstShapes = cmds.listRelatives(dstNode, s=True, pa=True, type='mesh')
dstSkinCluster = cmds.listConnections(dstShapes[0] + '.inMesh', s=True, d=False)
cmds.copySkinWeights(ss=srcSkinCluster[0], ds=dstSkinCluster[0],
mirrorMode='YZ', surfaceAssociation='closestComponent',
influenceAssociation='label', normalize=True)
def joint_label(object, visibility=False):
'''
object
visibilityFalse
'''
#
left_list_list, right_list_list = load_joint_label_rules()
#
if not isinstance(object, list):
temp = object
object = []
object.append(temp)
for skinJoint in object:
objTypeName = cmds.objectType(skinJoint)
if objTypeName == 'joint':
split_name = skinJoint.split('|')[-1]
# LR
side = 0
side_name = ''
for i, (l_list, r_list) in enumerate(zip(left_list_list, right_list_list)):
for j, lr_list in enumerate([l_list, r_list]):
for k, lr in enumerate(lr_list):
if i == 0:
if re.match(lr, split_name):
side = j + 1
if i == 1:
if re.search(lr, split_name):
side = j + 1
if i == 2:
if re.match(lr[::-1], split_name[::-1]):
side = j + 1
if side:#
side_name = lr
break
if side:
break
if side:
break
#print 'joint setting :', split_name, side, side_name
#
cmds.setAttr(skinJoint + '.side', side)
#
cmds.setAttr(skinJoint + '.type', 18)
new_joint_name = split_name.replace(side_name.replace('.', ''), '')
#
cmds.setAttr(skinJoint + '.otherType', new_joint_name, type='string')
#
cmds.setAttr(skinJoint + '.drawLabel', visibility)
else:
print(str(skinJoint) + ' : ' + str(objTypeName) + ' Skip Command')
#
| 43.129555 | 120 | 0.555102 |
53d3daf836c3d211bfbd295aeb46edb04453a89a
| 1,350 |
py
|
Python
|
pyConTextNLP/__init__.py
|
Blulab-Utah/pyConTextPipeline
|
d4060f89d54f4db56914832033f8ce589ee3c181
|
[
"Apache-2.0"
] | 1 |
2021-04-30T11:18:32.000Z
|
2021-04-30T11:18:32.000Z
|
pyConTextNLP/__init__.py
|
Blulab-Utah/pyConTextPipeline
|
d4060f89d54f4db56914832033f8ce589ee3c181
|
[
"Apache-2.0"
] | null | null | null |
pyConTextNLP/__init__.py
|
Blulab-Utah/pyConTextPipeline
|
d4060f89d54f4db56914832033f8ce589ee3c181
|
[
"Apache-2.0"
] | 1 |
2020-06-28T01:51:56.000Z
|
2020-06-28T01:51:56.000Z
|
#Copyright 2010 Brian E. Chapman
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""This is an alternative implementation of the pyConText package where I make
use of graphs to indicate relationships between targets and modifiers. Nodes of
thegraphs are the targets and modifiers identified in the text; edges of the
graphs are relationships between the targets. This provides for much simpler
code than what exists in the other version of pyConText where each object has a
dictionary of __modifies and __modifiedby that must be kept in sync with each
other.
Also it is hoped that the use of a directional graph could ultimately simplify
our itemData structures as we could chain together items"""
import os
version = {}
with open(os.path.join(os.path.dirname(__file__),"version.py")) as f0:
exec(f0.read(), version)
__version__ = version['__version__']
| 43.548387 | 79 | 0.786667 |
53d42695123c2326facf4f279256b1c384089fd3
| 78,742 |
py
|
Python
|
pypeit/metadata.py
|
rcooke-ast/PYPIT
|
0cb9c4cb422736b855065a35aefc2bdba6d51dd0
|
[
"BSD-3-Clause"
] | null | null | null |
pypeit/metadata.py
|
rcooke-ast/PYPIT
|
0cb9c4cb422736b855065a35aefc2bdba6d51dd0
|
[
"BSD-3-Clause"
] | null | null | null |
pypeit/metadata.py
|
rcooke-ast/PYPIT
|
0cb9c4cb422736b855065a35aefc2bdba6d51dd0
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Provides a class that handles the fits metadata required by PypeIt.
.. include common links, assuming primary doc root is up one directory
.. include:: ../include/links.rst
"""
import os
import io
import string
from copy import deepcopy
import datetime
from IPython import embed
import numpy as np
import yaml
from astropy import table, coordinates, time, units
from pypeit import msgs
from pypeit import utils
from pypeit.core import framematch
from pypeit.core import flux_calib
from pypeit.core import parse
from pypeit.core import meta
from pypeit.io import dict_to_lines
from pypeit.par import PypeItPar
from pypeit.par.util import make_pypeit_file
from pypeit.bitmask import BitMask
# TODO: Turn this into a DataContainer
# Initially tried to subclass this from astropy.table.Table, but that
# proved too difficult.
# TODO: Is there a reason why this is not an attribute of
# PypeItMetaData?
def row_match_config(row, config, spectrograph):
"""
Queries whether a row from the fitstbl matches the
input configuration
Args:
row (astropy.table.Row): From fitstbl
config (dict): Defines the configuration
spectrograph (pypeit.spectrographs.spectrograph.Spectrograph):
Used to grab the rtol value for float meta (e.g. dispangle)
Returns:
bool: True if the row matches the input configuration
"""
# Loop on keys in config
match = []
for k in config.keys():
# Deal with floating configs (e.g. grating angle)
if isinstance(config[k], float):
if row[k] is None:
match.append(False)
elif np.abs(config[k]-row[k])/config[k] < spectrograph.meta[k]['rtol']:
match.append(True)
else:
match.append(False)
else:
# The np.all allows for arrays in the Table (e.g. binning)
match.append(np.all(config[k] == row[k]))
# Check
return np.all(match)
| 42.817836 | 122 | 0.575411 |
53d54a4c34c0a67e36d2d017230ceb288acd1564
| 2,341 |
py
|
Python
|
aql/aql/main/aql_builtin_tools.py
|
menify/sandbox
|
32166c71044f0d5b414335b2b6559adc571f568c
|
[
"MIT"
] | null | null | null |
aql/aql/main/aql_builtin_tools.py
|
menify/sandbox
|
32166c71044f0d5b414335b2b6559adc571f568c
|
[
"MIT"
] | null | null | null |
aql/aql/main/aql_builtin_tools.py
|
menify/sandbox
|
32166c71044f0d5b414335b2b6559adc571f568c
|
[
"MIT"
] | null | null | null |
import os.path
import shutil
import errno
from aql.nodes import Builder, FileBuilder
from .aql_tools import Tool
__all__ = ( "ExecuteCommand",
"InstallBuilder",
"BuiltinTool",
)
"""
Unique Value - name + type
value
node
node = ExecuteCommand('gcc --help -v')
tools.cpp.cxx
node = ExecuteCommand( tools.cpp.cxx, '--help -v' )
node = ExecuteMethod( target = my_function )
dir_node = CopyFiles( prog_node, target = dir_name )
dir_node = CopyFilesAs( prog_node, target = dir_name )
dir_node = MoveFiles( prog_node, )
dir_node = MoveFilesAs( prog_node )
dir_node = RemoveFiles( prog_node )
node = FindFiles( dir_node )
dir_node = FileDir( prog_node )
"""
#//===========================================================================//
#//===========================================================================//
#//===========================================================================//
| 22.509615 | 80 | 0.529688 |
53d57360a984bc0c7e7afecf352b5a5635dc9a06
| 3,303 |
py
|
Python
|
cms/test_utils/project/placeholderapp/models.py
|
stefanw/django-cms
|
048ec9e7a529549d51f4805fdfbcd50ea1e624b0
|
[
"BSD-3-Clause"
] | null | null | null |
cms/test_utils/project/placeholderapp/models.py
|
stefanw/django-cms
|
048ec9e7a529549d51f4805fdfbcd50ea1e624b0
|
[
"BSD-3-Clause"
] | null | null | null |
cms/test_utils/project/placeholderapp/models.py
|
stefanw/django-cms
|
048ec9e7a529549d51f4805fdfbcd50ea1e624b0
|
[
"BSD-3-Clause"
] | null | null | null |
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from cms.models.fields import PlaceholderField
from cms.utils import get_language_from_request
from cms.utils.urlutils import admin_reverse
from hvad.models import TranslatableModel, TranslatedFields
| 33.363636 | 113 | 0.737511 |
53d609582a8fdb847888342336e2fc62ce309ea0
| 159 |
py
|
Python
|
150-Challenges/Challenges 80 - 87/Challenge 84.py
|
DGrifferty/Python
|
d725301664db2cbcfd5c4f5974745b4d81c8e28a
|
[
"Apache-2.0"
] | null | null | null |
150-Challenges/Challenges 80 - 87/Challenge 84.py
|
DGrifferty/Python
|
d725301664db2cbcfd5c4f5974745b4d81c8e28a
|
[
"Apache-2.0"
] | null | null | null |
150-Challenges/Challenges 80 - 87/Challenge 84.py
|
DGrifferty/Python
|
d725301664db2cbcfd5c4f5974745b4d81c8e28a
|
[
"Apache-2.0"
] | null | null | null |
# 084
# Ask the user to type in their postcode.Display the first two
# letters in uppercase.
# very simple
print(input('Enter your postcode: ')[0:2].upper())
| 22.714286 | 62 | 0.716981 |
53d70d3013eebf509bd463bbe169adf9205bf22b
| 4,367 |
py
|
Python
|
api_youtube.py
|
OnoArnaldo/PythonApiYoutube
|
8507eac234cd3d05a223db3beebd10412505bcf8
|
[
"MIT"
] | 2 |
2019-11-15T16:46:36.000Z
|
2020-11-30T07:34:26.000Z
|
api_youtube.py
|
OnoArnaldo/PythonApiYoutube
|
8507eac234cd3d05a223db3beebd10412505bcf8
|
[
"MIT"
] | null | null | null |
api_youtube.py
|
OnoArnaldo/PythonApiYoutube
|
8507eac234cd3d05a223db3beebd10412505bcf8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import sys
import json
import urllib2
import codecs
BASE_DIR = os.path.dirname(__file__)
BASE_URL = 'https://www.googleapis.com/youtube/v3/'
API_CHANNELS = 'channels'
API_PLAYLIST = 'playlistItems'
API_KEY = 'YOUR KEY'
CHANNELS = [
'videosimprovaveis',
'nerdologia',
'Kurzgesagt',
'1veritasium',
'minutephysics',
'xadrezverbal',
'estevaoslow',
'Vsauce',
'braincraftvideo',
'CienciaTodoDia',
]
if __name__ == '__main__':
args = sys.argv[1:]
if '-channel' in args:
channel = ApiChannel(CHANNELS)
channel.run()
if '-playlist' in args:
channels = ApiPlayList.import_channels(ApiChannel.FILE_NAME)
play_list = ApiPlayList(channels)
play_list.run()
| 24.672316 | 90 | 0.587589 |
53d750a045a189f59e633e7a1ce562b90e7d821b
| 2,744 |
py
|
Python
|
python_and_ebpf/train.py
|
be4r/ssh-miner-detection
|
47003db1d9f72ae44d5a27e92d0109d5111bec35
|
[
"MIT"
] | null | null | null |
python_and_ebpf/train.py
|
be4r/ssh-miner-detection
|
47003db1d9f72ae44d5a27e92d0109d5111bec35
|
[
"MIT"
] | null | null | null |
python_and_ebpf/train.py
|
be4r/ssh-miner-detection
|
47003db1d9f72ae44d5a27e92d0109d5111bec35
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from sklearn.tree import DecisionTreeClassifier
import pickle
import numpy as np
no = [b'runc:[2:INIT]', b'containerssh-ag', b'apt',b'dpkg']
| 24.283186 | 95 | 0.622085 |
53d8b7928beadd81971824eb5f4c9a1dab184d41
| 1,318 |
py
|
Python
|
data/parse_hipp_data.py
|
slinderman/pyhsmm-spiketrains
|
462d8d2c59bd2e7c39d20d624bd8b289a31baaa2
|
[
"MIT"
] | 10 |
2016-04-23T00:23:20.000Z
|
2022-01-05T19:28:08.000Z
|
data/parse_hipp_data.py
|
slinderman/pyhsmm-spiketrains
|
462d8d2c59bd2e7c39d20d624bd8b289a31baaa2
|
[
"MIT"
] | 1 |
2017-06-24T06:37:12.000Z
|
2017-07-07T17:19:59.000Z
|
data/parse_hipp_data.py
|
slinderman/pyhsmm-spiketrains
|
462d8d2c59bd2e7c39d20d624bd8b289a31baaa2
|
[
"MIT"
] | 9 |
2016-03-29T21:37:46.000Z
|
2022-01-05T19:28:11.000Z
|
import os
import numpy as np
from scipy.io import loadmat
data = loadmat("data/hipp_2dtrack_a/smJun03p2.dat")
N = 49
data = reshape(data, 3, length(data)/3);
data = data';
size(data) % 43799-by-3
fclose(fid);
% sampling time
Ts = 0.0333;
duration = size(data,1) * Ts; % in second
Tmax = data(end, 3);
Tmin = data(1,3);
time_edges = [Tmin: 0.25: Tmax]; % 250 ms per bin
% interpolated rat's position in time bins
Rat_pos = interp1(data(:, 3), [data(:, 1), data(:, 2)], time_edges');
vel = abs(diff(Rat_pos, 1, 1 )); % row difference
vel = [vel(1, :); vel];
% 250 ms
rat_vel = 4 * sqrt(vel(:, 1).^2 + vel(:, 2).^2); % unit: cm/s
vel_ind = find(rat_vel >= 10); % RUN velocity threshold
% using RUN only
T = length(vel_ind);
% using Run + pause periods
T = length(time_edges);
AllSpikeData = zeros(C,T);
for i=1:C
str = ['Cell_num' num2str(i)];
fid = fopen(str, 'r');
cell_data = fscanf(fid, '%f');
cell_data = reshape(cell_data, 3, length(cell_data)/3)';
spike_time = cell_data(:, 3);
spike_pos = cell_data(:, 1:2);
[spike_time_count, bin] = histc(spike_time, time_edges); % column vector
% if analyzing the RUN period only uncomment this
% spike_time_count = spike_time_count(vel_ind);
AllSpikeData(i, :) = spike_time_count';
fclose(fid);
end
| 22.338983 | 78 | 0.634294 |
53d94f243224facafe883070b86bd959182c98e6
| 9,455 |
py
|
Python
|
repokid/tests/test_roledata.py
|
tomdev/repokid
|
e1a4839290bafccfaa304d87bbdeae85b9dc80aa
|
[
"Apache-2.0"
] | null | null | null |
repokid/tests/test_roledata.py
|
tomdev/repokid
|
e1a4839290bafccfaa304d87bbdeae85b9dc80aa
|
[
"Apache-2.0"
] | null | null | null |
repokid/tests/test_roledata.py
|
tomdev/repokid
|
e1a4839290bafccfaa304d87bbdeae85b9dc80aa
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mock import patch
import repokid.utils.roledata
from repokid.role import Role
from repokid.tests.test_repokid_cli import ROLE_POLICIES, ROLES
AARDVARK_DATA = {
"arn:aws:iam::123456789012:role/all_services_used": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "s3"}],
"arn:aws:iam::123456789012:role/unused_ec2": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": 0,
"serviceNamespace": "ec2"}],
"arn:aws:iam::123456789012:role/young_role": [
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "iam"},
{"lastAuthenticated": int(time.time()) * 1000,
"serviceNamespace": "s3"}]
}
| 51.950549 | 120 | 0.639662 |
53da2e6911920cb3cc789891eed24c27f4a325c6
| 1,838 |
py
|
Python
|
DL_Scripts/image_recognition.py
|
Matnay/KPIT_Deep_Learning
|
14f3815fc2829db9bede86c31f23e721f6423f79
|
[
"MIT"
] | 1 |
2020-05-01T15:28:12.000Z
|
2020-05-01T15:28:12.000Z
|
DL_Scripts/image_recognition.py
|
Matnay/KPIT_Deep_Learning
|
14f3815fc2829db9bede86c31f23e721f6423f79
|
[
"MIT"
] | null | null | null |
DL_Scripts/image_recognition.py
|
Matnay/KPIT_Deep_Learning
|
14f3815fc2829db9bede86c31f23e721f6423f79
|
[
"MIT"
] | null | null | null |
import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import String
from cv_bridge import CvBridge
import cv2
import numpy as np
import tensorflow as tf
import classify_image
if __name__ == '__main__':
classify_image.setup_args()
rospy.init_node('rostensorflow')
tensor = RosTensorFlow()
tensor.main()
| 36.039216 | 94 | 0.661589 |
53dd0a97f61bddb70bdbb1861eb823497caf7e52
| 21,202 |
py
|
Python
|
plugins/grouputils.py
|
aviskumar/speedo
|
758e8ac1fdeeb0b72c3a57742032ca5c79f0b2fa
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/grouputils.py
|
aviskumar/speedo
|
758e8ac1fdeeb0b72c3a57742032ca5c79f0b2fa
|
[
"BSD-3-Clause"
] | null | null | null |
plugins/grouputils.py
|
aviskumar/speedo
|
758e8ac1fdeeb0b72c3a57742032ca5c79f0b2fa
|
[
"BSD-3-Clause"
] | 3 |
2021-10-12T08:17:01.000Z
|
2021-12-21T01:17:54.000Z
|
# Copyright (C) 2020-2021 by TeamSpeedo@Github, < https://github.com/TeamSpeedo >.
#
# This file is part of < https://github.com/TeamSpeedo/FridayUserBot > project,
# and is released under the "GNU v3.0 License Agreement".
# Please see < https://github.com/TeamSpeedo/blob/master/LICENSE >
#
# All rights reserved.
import asyncio
import os
import time
from asyncio import sleep
from pyrogram.types import ChatPermissions
import pyrogram
from main_start.core.decorators import speedo_on_cmd
from main_start.helper_func.basic_helpers import (
edit_or_reply,
edit_or_send_as_file,
get_text,
get_user,
is_admin_or_owner,
)
from main_start.helper_func.logger_s import LogIt
from main_start.helper_func.plugin_helpers import (
convert_to_image,
convert_vid_to_vidnote,
generate_meme,
)
| 33.076443 | 146 | 0.621215 |
53dd16873458e07dbdbf665e77a30bc20865dfcb
| 16,809 |
py
|
Python
|
carberretta/bot/cogs/feeds.py
|
Nereg/Carberretta
|
01e25bc8ece4c310ab541304e8809dfdd3eec3b8
|
[
"BSD-3-Clause"
] | null | null | null |
carberretta/bot/cogs/feeds.py
|
Nereg/Carberretta
|
01e25bc8ece4c310ab541304e8809dfdd3eec3b8
|
[
"BSD-3-Clause"
] | null | null | null |
carberretta/bot/cogs/feeds.py
|
Nereg/Carberretta
|
01e25bc8ece4c310ab541304e8809dfdd3eec3b8
|
[
"BSD-3-Clause"
] | null | null | null |
"""
FEEDS
Handles YouTube and Twitch feed notifications.
"""
import datetime as dt
import discord
import feedparser
from apscheduler.triggers.cron import CronTrigger
from discord.ext import commands
from carberretta import Config
from carberretta.utils import DEFAULT_EMBED_COLOUR, chron
LIVE_EMBED_COLOUR = 0x9146FF
VOD_EMBED_COLOUR = 0x3498DB
| 44.586207 | 166 | 0.50467 |
53dd795653b27c0823e1d06e1e8c37e9cd9ead3e
| 5,676 |
py
|
Python
|
gdb/proxy.py
|
abaire/gdb_sniffer
|
f330193c65a39ce6abb01f25737ca967a0af9629
|
[
"Unlicense"
] | 1 |
2021-12-22T04:04:22.000Z
|
2021-12-22T04:04:22.000Z
|
gdb/proxy.py
|
abaire/gdb_sniffer
|
f330193c65a39ce6abb01f25737ca967a0af9629
|
[
"Unlicense"
] | null | null | null |
gdb/proxy.py
|
abaire/gdb_sniffer
|
f330193c65a39ce6abb01f25737ca967a0af9629
|
[
"Unlicense"
] | null | null | null |
"""Provides a GDB logging proxy.
See https://sourceware.org/gdb/onlinedocs/gdb/Remote-Protocol.html
See https://www.embecosm.com/appnotes/ean4/embecosm-howto-rsp-server-ean4-issue-2.html
"""
from __future__ import annotations
import logging
import socket
from typing import Optional
from typing import Tuple
from .packet import GDBPacket
from net import ip_transport
logger = logging.getLogger(__name__)
| 35.698113 | 112 | 0.617512 |
53ddde78f62a83aa118f0171be55b4c481a15868
| 1,373 |
py
|
Python
|
pylayers/em/openems/test/Rect_Waveguide.py
|
usmanwardag/pylayers
|
2e8a9bdc993b2aacc92610a9c7edf875c6c7b24a
|
[
"MIT"
] | 143 |
2015-01-09T07:50:20.000Z
|
2022-03-02T11:26:53.000Z
|
pylayers/em/openems/test/Rect_Waveguide.py
|
usmanwardag/pylayers
|
2e8a9bdc993b2aacc92610a9c7edf875c6c7b24a
|
[
"MIT"
] | 148 |
2015-01-13T04:19:34.000Z
|
2022-03-11T23:48:25.000Z
|
pylayers/em/openems/test/Rect_Waveguide.py
|
usmanwardag/pylayers
|
2e8a9bdc993b2aacc92610a9c7edf875c6c7b24a
|
[
"MIT"
] | 95 |
2015-05-01T13:22:42.000Z
|
2022-03-15T11:22:28.000Z
|
from openems.openems import *
# A simple simulation
#
# FDTD Simulation Setting
#
F = FDTD()
F.add(Exc(typ='Sinus',f0=100000))
F.add(BoundaryCond(['PMC','PMC','PEC','PEC','MUR','MUR']))
#
# CSX (Geometry setting)
#
C = CSX()
# The Box is added as a property
C.add(Excitation('excitation'),p=Box(P1=[-10,-10,0],P2=[10,10,0],Pr=0))
C.add(DumpBox('Et'),p=Box(P1=[-10,0,-10],P2=[10,0,30],Pr=0))
C.add(RectilinearGrid(np.arange(-10,11,1),np.arange(-10,11,1),np.arange(-10,11,1)))
C.add(Polyhedron())
S = OpenEMS(F,C)
S.save(filename='RectWaveguide.xml')
#gnd = Matter('gnd')
#sphere = Matter('sphere')
#patch = Matter('patch')
#substrate = Matter('substrate',typ='Ma',Epsilon="3.38",Kappa="0.00046")
#cdgsht = Matter('copper',typ='Cs',conductivity="56e6",thickness="40e-6")
#b1 = Box(P1=[0,0,0],P2=[100,100,200],Pr=0)
#b2 = Box(P1=[0,0,0],P2=[10,20,30],Pr=10)
#b4 = Box(P1=[-10,0,-10],P2=[10,0,30],Pr=0)
#s1 = Sphere(P=[0,0,0],R=100,Pr=50)
#dump = DumpBox()
#C.add(gnd)
#C.add(patch)
#C.add(substrate)
#C.add(sphere)
#C.add(cdgsht)
#C.add(exc)
#C.add(dump)
#C.set('gnd',b1)
#C.set('gnd',b2)
#C.set('sphere',s1)
#C.set('copper',b1)
#C.set('copper',b2)
#C.set('Et',b4)
#C.save(filename='structure.xml')
##C.AddBox(prop='ConductingSheet',name='copper',P1=[0,-50,200],P2=[1000,50,200],Pri=10)
##C.AddCylinder(prop='Metal',name='cyl0',P1=[0,0,0],P2=[0,0,100],Rad=50,Pri=10)
#
| 25.90566 | 87 | 0.632921 |
53debe5489e3f53b73538719925c989ad4ce399d
| 381 |
py
|
Python
|
DataPreprocessing/_segment_Y.py
|
vd1371/CBSA
|
f2b3f03c91ccd9ec02c2331f43573d7d6e72fd47
|
[
"MIT"
] | null | null | null |
DataPreprocessing/_segment_Y.py
|
vd1371/CBSA
|
f2b3f03c91ccd9ec02c2331f43573d7d6e72fd47
|
[
"MIT"
] | null | null | null |
DataPreprocessing/_segment_Y.py
|
vd1371/CBSA
|
f2b3f03c91ccd9ec02c2331f43573d7d6e72fd47
|
[
"MIT"
] | null | null | null |
import numpy as np
| 19.05 | 50 | 0.677165 |
53df3216d619040fc2551d1e35eda4fe2e177604
| 3,868 |
py
|
Python
|
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
WifiEnigma/BattleAI/question.py
|
Puzzlebox-IMT/Puzzlebox
|
6b80e22a4aee3228140692bd6352de18b2f6a96d
|
[
"MIT"
] | null | null | null |
import mysql.connector
import random
from voice import synthetize_voice, delete_wav
if (__name__ == '__main__'):
result = questionAI(1)
tell_question(result)
| 31.447154 | 140 | 0.520941 |
53e02e91fc0737f80d21208f1511392c2bcd37d1
| 875 |
py
|
Python
|
toy-amr/flux_functions.py
|
IanHawke/toy-amr
|
1f616791993ccd83cc6034616c08e09fa4ba310d
|
[
"MIT"
] | 5 |
2019-05-27T18:13:45.000Z
|
2021-01-06T09:42:28.000Z
|
toy-amr/flux_functions.py
|
IanHawke/toy-amr
|
1f616791993ccd83cc6034616c08e09fa4ba310d
|
[
"MIT"
] | 1 |
2019-10-21T13:34:48.000Z
|
2019-12-11T22:11:17.000Z
|
toy-amr/flux_functions.py
|
IanHawke/toy-amr
|
1f616791993ccd83cc6034616c08e09fa4ba310d
|
[
"MIT"
] | 2 |
2019-05-08T18:00:36.000Z
|
2021-05-27T16:57:57.000Z
|
import numpy
| 39.772727 | 79 | 0.609143 |
53e0390b65014122e4de16c06f08712946e2a007
| 2,084 |
py
|
Python
|
pi/auth.py
|
vmagamedov/pi
|
6ee98af69b757d96aa4eddc32513309e0fe05d1d
|
[
"BSD-3-Clause"
] | 7 |
2016-06-24T04:49:48.000Z
|
2020-06-29T17:34:12.000Z
|
pi/auth.py
|
vmagamedov/pi
|
6ee98af69b757d96aa4eddc32513309e0fe05d1d
|
[
"BSD-3-Clause"
] | 11 |
2016-06-19T13:16:59.000Z
|
2019-11-02T13:14:19.000Z
|
pi/auth.py
|
vmagamedov/pi
|
6ee98af69b757d96aa4eddc32513309e0fe05d1d
|
[
"BSD-3-Clause"
] | null | null | null |
import re
import json
import base64
import codecs
import os.path
import asyncio
import subprocess
_PREFIX = 'docker-credential-'
| 25.108434 | 72 | 0.644914 |
53e05b14f47fe11d4c2e4b89d1492b45ec46b072
| 5,199 |
py
|
Python
|
etl/transform.py
|
ACWI-SOGW/ngwmn_monitoring_locations_etl
|
e9ebfebbc5fa349a58669fb1d9944786f26729c3
|
[
"CC0-1.0"
] | 1 |
2020-10-07T14:44:30.000Z
|
2020-10-07T14:44:30.000Z
|
etl/transform.py
|
ACWI-SOGW/ngwmn_monitoring_locations_etl
|
e9ebfebbc5fa349a58669fb1d9944786f26729c3
|
[
"CC0-1.0"
] | 7 |
2020-10-14T19:13:10.000Z
|
2021-10-06T20:04:38.000Z
|
etl/transform.py
|
ACWI-SOGW/ngwmn_monitoring_locations_etl
|
e9ebfebbc5fa349a58669fb1d9944786f26729c3
|
[
"CC0-1.0"
] | 1 |
2020-10-02T14:43:18.000Z
|
2020-10-02T14:43:18.000Z
|
"""
Transform the data into a form that
works with the WELL_REGISTRY_STG table.
"""
import re
WELL_TYPES = {
'surveillance': 1,
'trend': 2,
'special': 3,
}
map_well_type = mapping_factory(WELL_TYPES)
WELL_PURPOSE = {
'dedicated monitoring/observation': 1,
'other': 2
}
map_well_purpose = mapping_factory(WELL_PURPOSE)
QW_WELL_CHARS = {
'background': 1,
'suspected/anticipated changes': 2,
'known changes': 3
}
map_qw_well_chars = mapping_factory(QW_WELL_CHARS)
WL_WELL_CHARS = {
'background': 1,
'suspected/anticipated changes': 2,
'known changes': 3,
'unknown': 999
}
map_wl_well_chars = mapping_factory(WL_WELL_CHARS)
def transform_mon_loc_data(ml_data):
"""
Map the fields from the API JSON response to
the fields in the WELL_REGISTRY_STG table with
appropriate foreign key values.
"""
mapped_data = dict()
mapped_data['AGENCY_CD'] = ml_data['agency']['agency_cd']
mapped_data['AGENCY_NM'] = ml_data['agency']['agency_nm']
mapped_data['AGENCY_MED'] = ml_data['agency']['agency_med']
mapped_data['SITE_NO'] = ml_data['site_no']
mapped_data['SITE_NAME'] = ml_data['site_name']
mapped_data['DEC_LAT_VA'] = ml_data['dec_lat_va']
mapped_data['DEC_LONG_VA'] = ml_data['dec_long_va']
mapped_data['HORZ_DATUM'] = ml_data['horizontal_datum']
mapped_data['ALT_VA'] = ml_data['alt_va']
mapped_data['ALT_DATUM_CD'] = ml_data['altitude_datum']
try:
mapped_data['NAT_AQUIFER_CD'] = ml_data['nat_aqfr']['nat_aqfr_cd']
mapped_data['NAT_AQFR_DESC'] = ml_data['nat_aqfr']['nat_aqfr_desc']
except (AttributeError, KeyError, TypeError):
mapped_data['NAT_AQUIFER_CD'] = None
mapped_data['NAT_AQFR_DESC'] = None
mapped_data['LOCAL_AQUIFER_NAME'] = ml_data['local_aquifer_name']
mapped_data['AQFR_CHAR'] = ml_data['aqfr_type']
mapped_data['QW_SN_FLAG'] = to_flag(ml_data['qw_sn_flag'])
mapped_data['QW_BASELINE_FLAG'] = to_flag(ml_data['qw_baseline_flag'])
mapped_data['QW_WELL_CHARS'] = map_qw_well_chars(ml_data['qw_well_chars'])
mapped_data['QW_WELL_PURPOSE'] = map_well_purpose(ml_data['qw_well_purpose'])
mapped_data['QW_SYS_NAME'] = ml_data['qw_network_name']
mapped_data['WL_SN_FLAG'] = to_flag(ml_data['wl_sn_flag'])
mapped_data['WL_BASELINE_FLAG'] = to_flag(ml_data['wl_baseline_flag'])
mapped_data['WL_WELL_CHARS'] = map_wl_well_chars(ml_data['wl_well_chars'])
mapped_data['WL_WELL_PURPOSE'] = map_well_purpose(ml_data['wl_well_purpose'])
mapped_data['WL_SYS_NAME'] = ml_data['wl_network_name']
mapped_data['DATA_PROVIDER'] = None
mapped_data['DISPLAY_FLAG'] = to_flag(ml_data['display_flag'])
mapped_data['WL_DATA_PROVIDER'] = None
mapped_data['QW_DATA_PROVIDER'] = None
mapped_data['LITH_DATA_PROVIDER'] = None
mapped_data['CONST_DATA_PROVIDER'] = None
mapped_data['WELL_DEPTH'] = ml_data['well_depth']
mapped_data['LINK'] = ml_data['link']
mapped_data['INSERT_DATE'] = ml_data['insert_date']
mapped_data['UPDATE_DATE'] = ml_data['update_date']
mapped_data['WL_WELL_PURPOSE_NOTES'] = ml_data['wl_well_purpose_notes']
mapped_data['QW_WELL_PURPOSE_NOTES'] = ml_data['qw_well_purpose_notes']
mapped_data['INSERT_USER_ID'] = ml_data['insert_user']
mapped_data['UPDATE_USER_ID'] = ml_data['update_user']
mapped_data['WL_WELL_TYPE'] = map_well_type(ml_data['wl_well_type'])
mapped_data['QW_WELL_TYPE'] = map_well_type(ml_data['qw_well_type'])
mapped_data['LOCAL_AQUIFER_CD'] = None
mapped_data['REVIEW_FLAG'] = None
try:
mapped_data['STATE_CD'] = ml_data['state']['state_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['STATE_CD'] = None
try:
mapped_data['COUNTY_CD'] = ml_data['county']['county_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['COUNTY_CD'] = None
try:
mapped_data['COUNTRY_CD'] = ml_data['country']['country_cd']
except (AttributeError, KeyError, TypeError):
mapped_data['COUNTRY_CD'] = None
mapped_data['WELL_DEPTH_UNITS'] = ml_data['well_depth_units']['unit_id'] if ml_data['well_depth_units'] else None
mapped_data['ALT_UNITS'] = ml_data['altitude_units']['unit_id'] if ml_data['altitude_units'] else None
mapped_data['SITE_TYPE'] = ml_data['site_type']
mapped_data['HORZ_METHOD'] = ml_data['horz_method']
mapped_data['HORZ_ACY'] = ml_data['horz_acy']
mapped_data['ALT_METHOD'] = ml_data['alt_method']
mapped_data['ALT_ACY'] = ml_data['alt_acy']
return mapped_data
| 38.227941 | 117 | 0.695903 |
53e0d34e58ad9e0686dc6ee3e5a7f6fc0076f469
| 55 |
py
|
Python
|
django_reporter_pro/config/model_configs.py
|
shamilison/django-reporter-pro
|
0c6f60bbae939d318e7aafaec83613d2768a4f63
|
[
"Apache-2.0"
] | null | null | null |
django_reporter_pro/config/model_configs.py
|
shamilison/django-reporter-pro
|
0c6f60bbae939d318e7aafaec83613d2768a4f63
|
[
"Apache-2.0"
] | null | null | null |
django_reporter_pro/config/model_configs.py
|
shamilison/django-reporter-pro
|
0c6f60bbae939d318e7aafaec83613d2768a4f63
|
[
"Apache-2.0"
] | null | null | null |
# Created by shamilsakib at 04/10/20
BASE_MODEL = None
| 18.333333 | 36 | 0.763636 |
53e10c53f31c7e396a4573a421ae3212e9a11856
| 1,543 |
py
|
Python
|
DPSparkImplementations/paf_kernels.py
|
TEAlab/DPSpark
|
4d53ee13b03e2e12119c28fe2b2241ad20231eac
|
[
"MIT"
] | null | null | null |
DPSparkImplementations/paf_kernels.py
|
TEAlab/DPSpark
|
4d53ee13b03e2e12119c28fe2b2241ad20231eac
|
[
"MIT"
] | null | null | null |
DPSparkImplementations/paf_kernels.py
|
TEAlab/DPSpark
|
4d53ee13b03e2e12119c28fe2b2241ad20231eac
|
[
"MIT"
] | 1 |
2020-12-30T22:12:55.000Z
|
2020-12-30T22:12:55.000Z
|
__author__ = "Zafar Ahmad, Mohammad Mahdi Javanmard"
__copyright__ = "Copyright (c) 2019 Tealab@SBU"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Zafar Ahmad"
__email__ = "[email protected]"
__status__ = "Development"
import numpy as np
import numba as nb
'''
Iterative kernels
'''
| 35.068182 | 104 | 0.610499 |
53e339cc8fb766eb00e75883c4d6064e436e942f
| 1,343 |
py
|
Python
|
terrakg/rates.py
|
terrapain/terrakg
|
90c52ca3b227d2daabd604255e793ac5f536c246
|
[
"Apache-2.0"
] | null | null | null |
terrakg/rates.py
|
terrapain/terrakg
|
90c52ca3b227d2daabd604255e793ac5f536c246
|
[
"Apache-2.0"
] | null | null | null |
terrakg/rates.py
|
terrapain/terrakg
|
90c52ca3b227d2daabd604255e793ac5f536c246
|
[
"Apache-2.0"
] | null | null | null |
from terra_sdk.exceptions import LCDResponseError
from terrakg import logger
# Logging
from terrakg.client import ClientContainer
logger = logger.get_logger(__name__)
| 30.522727 | 117 | 0.568876 |
53e44f41ef2d0962b6580e25176980ba9b2fe713
| 2,868 |
py
|
Python
|
src/tracking_module.py
|
HonzaKlicpera/Effective-footage-processing-Blender-add-on
|
f3faae3fc56a3ef8f2eabba9af8be718e57f4d35
|
[
"MIT"
] | 1 |
2020-06-09T11:23:44.000Z
|
2020-06-09T11:23:44.000Z
|
src/tracking_module.py
|
HonzaKlicpera/Effective-footage-processing-Blender
|
f3faae3fc56a3ef8f2eabba9af8be718e57f4d35
|
[
"MIT"
] | null | null | null |
src/tracking_module.py
|
HonzaKlicpera/Effective-footage-processing-Blender
|
f3faae3fc56a3ef8f2eabba9af8be718e57f4d35
|
[
"MIT"
] | null | null | null |
import bpy
import os, glob
from pathlib import Path
from enum import Enum
from abc import ABC, abstractmethod
import csv
from . import keying_module
#----------------------------------------
# PROPERTIES
#----------------------------------------
classes = (
TrackingExportDataOp,
TrackingPanel,
TrackingSceneProps
)
| 30.189474 | 87 | 0.644003 |
53e4b90b1159d838a8edfa7ab52a953ffb4eca72
| 437 |
py
|
Python
|
nodes/2.x/python/View.ViewTemplate.py
|
andydandy74/ClockworkForDynamo
|
bd4ac2c13956a02352a458d01096a35b7258d9f2
|
[
"MIT"
] | 147 |
2016-02-24T16:37:03.000Z
|
2022-02-18T12:10:34.000Z
|
nodes/2.x/python/View.ViewTemplate.py
|
johnpierson/ClockworkForDynamo
|
953d3f56b75e99561978925756e527357f9978dd
|
[
"MIT"
] | 269 |
2016-02-25T14:04:14.000Z
|
2022-03-26T07:30:53.000Z
|
nodes/2.x/python/View.ViewTemplate.py
|
johnpierson/ClockworkForDynamo
|
953d3f56b75e99561978925756e527357f9978dd
|
[
"MIT"
] | 89 |
2016-03-16T18:21:56.000Z
|
2022-02-03T14:34:30.000Z
|
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
views = UnwrapElement(IN[0])
if isinstance(IN[0], list): OUT = [GetViewTemplate(x) for x in views]
else: OUT = GetViewTemplate(views)
| 29.133333 | 69 | 0.757437 |
53e73c9f153e27f98b4ee8cc325ad02d4ef90185
| 8,267 |
py
|
Python
|
infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
|
bohdana-kuzmenko/incubator-dlab
|
d052709450e7916860c7dd191708d5524cf44c1e
|
[
"Apache-2.0"
] | null | null | null |
infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
|
bohdana-kuzmenko/incubator-dlab
|
d052709450e7916860c7dd191708d5524cf44c1e
|
[
"Apache-2.0"
] | null | null | null |
infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
|
bohdana-kuzmenko/incubator-dlab
|
d052709450e7916860c7dd191708d5524cf44c1e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# *****************************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# ******************************************************************************
import json
import time
from fabric.api import *
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import sys
import os
import uuid
import logging
from Crypto.PublicKey import RSA
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.INFO,
filename=local_log_filepath)
try:
os.environ['exploratory_name']
except:
os.environ['exploratory_name'] = ''
if os.path.exists('/response/.dataproc_creating_{}'.format(os.environ['exploratory_name'])):
time.sleep(30)
print('Generating infrastructure names and tags')
dataproc_conf = dict()
try:
dataproc_conf['exploratory_name'] = (os.environ['exploratory_name']).lower().replace('_', '-')
except:
dataproc_conf['exploratory_name'] = ''
try:
dataproc_conf['computational_name'] = (os.environ['computational_name']).lower().replace('_', '-')
except:
dataproc_conf['computational_name'] = ''
dataproc_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
dataproc_conf['edge_user_name'] = (os.environ['edge_user_name']).lower().replace('_', '-')
dataproc_conf['key_name'] = os.environ['conf_key_name']
dataproc_conf['key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
dataproc_conf['region'] = os.environ['gcp_region']
dataproc_conf['zone'] = os.environ['gcp_zone']
dataproc_conf['subnet'] = '{0}-{1}-subnet'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['cluster_name'] = '{0}-{1}-des-{2}-{3}'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'],
dataproc_conf['exploratory_name'], dataproc_conf['computational_name'])
dataproc_conf['cluster_tag'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['bucket_name'] = '{}-{}-bucket'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['release_label'] = os.environ['dataproc_version']
dataproc_conf['cluster_labels'] = {
os.environ['notebook_instance_name']: "not-configured",
"name": dataproc_conf['cluster_name'],
"sbn": dataproc_conf['service_base_name'],
"user": dataproc_conf['edge_user_name'],
"notebook_name": os.environ['notebook_instance_name'],
"product": "dlab",
"computational_name": dataproc_conf['computational_name']
}
dataproc_conf['dataproc_service_account_name'] = '{0}-{1}-ps'.format(dataproc_conf['service_base_name'],
dataproc_conf['edge_user_name'])
service_account_email = "{}@{}.iam.gserviceaccount.com".format(dataproc_conf['dataproc_service_account_name'],
os.environ['gcp_project_id'])
dataproc_conf['edge_instance_hostname'] = '{0}-{1}-edge'.format(dataproc_conf['service_base_name'], dataproc_conf['edge_user_name'])
dataproc_conf['dlab_ssh_user'] = os.environ['conf_os_user']
edge_status = GCPMeta().get_instance_status(dataproc_conf['edge_instance_hostname'])
if edge_status != 'RUNNING':
logging.info('ERROR: Edge node is unavailable! Aborting...')
print('ERROR: Edge node is unavailable! Aborting...')
ssn_hostname = GCPMeta().get_private_ip_address(dataproc_conf['service_base_name'] + '-ssn')
put_resource_status('edge', 'Unavailable', os.environ['ssn_dlab_path'], os.environ['conf_os_user'], ssn_hostname)
append_result("Edge node is unavailable")
sys.exit(1)
print("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(dataproc_conf, sort_keys=True, indent=4, separators=(',', ': '))))
logging.info(json.dumps(dataproc_conf))
local('touch /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
local("echo Waiting for changes to propagate; sleep 10")
dataproc_cluster = json.loads(open('/root/templates/dataengine-service_cluster.json').read().decode('utf-8-sig'))
dataproc_cluster['projectId'] = os.environ['gcp_project_id']
dataproc_cluster['clusterName'] = dataproc_conf['cluster_name']
dataproc_cluster['labels'] = dataproc_conf['cluster_labels']
dataproc_cluster['config']['configBucket'] = dataproc_conf['bucket_name']
dataproc_cluster['config']['gceClusterConfig']['serviceAccount'] = service_account_email
dataproc_cluster['config']['gceClusterConfig']['zoneUri'] = dataproc_conf['zone']
dataproc_cluster['config']['gceClusterConfig']['subnetworkUri'] = dataproc_conf['subnet']
dataproc_cluster['config']['masterConfig']['machineTypeUri'] = os.environ['dataproc_master_instance_type']
dataproc_cluster['config']['workerConfig']['machineTypeUri'] = os.environ['dataproc_slave_instance_type']
dataproc_cluster['config']['masterConfig']['numInstances'] = int(os.environ['dataproc_master_count'])
dataproc_cluster['config']['workerConfig']['numInstances'] = int(os.environ['dataproc_slave_count'])
if int(os.environ['dataproc_preemptible_count']) != 0:
dataproc_cluster['config']['secondaryWorkerConfig']['numInstances'] = int(os.environ['dataproc_preemptible_count'])
else:
del dataproc_cluster['config']['secondaryWorkerConfig']
dataproc_cluster['config']['softwareConfig']['imageVersion'] = dataproc_conf['release_label']
ssh_user_pubkey = open(os.environ['conf_key_dir'] + os.environ['edge_user_name'] + '.pub').read()
key = RSA.importKey(open(dataproc_conf['key_path'], 'rb').read())
ssh_admin_pubkey = key.publickey().exportKey("OpenSSH")
dataproc_cluster['config']['gceClusterConfig']['metadata']['ssh-keys'] = '{0}:{1}\n{0}:{2}'.format(dataproc_conf['dlab_ssh_user'], ssh_user_pubkey, ssh_admin_pubkey)
dataproc_cluster['config']['gceClusterConfig']['tags'][0] = dataproc_conf['cluster_tag']
try:
logging.info('[Creating Dataproc Cluster]')
print('[Creating Dataproc Cluster]')
params = "--region {0} --bucket {1} --params '{2}'".format(dataproc_conf['region'], dataproc_conf['bucket_name'], json.dumps(dataproc_cluster))
try:
local("~/scripts/{}.py {}".format('dataengine-service_create', params))
except:
traceback.print_exc()
raise Exception
keyfile_name = "/root/keys/{}.pem".format(dataproc_conf['key_name'])
local('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
except Exception as err:
print('Error: {0}'.format(err))
append_result("Failed to create Dataproc Cluster.", str(err))
local('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']))
sys.exit(1)
| 57.013793 | 178 | 0.670134 |
53e7f5b9bbd28821250ea584ab34945cec2c0582
| 931 |
py
|
Python
|
02.py
|
mattias-lundell/aoc2021
|
32bd41446d963c5788d4614106405be65de81bcd
|
[
"MIT"
] | null | null | null |
02.py
|
mattias-lundell/aoc2021
|
32bd41446d963c5788d4614106405be65de81bcd
|
[
"MIT"
] | null | null | null |
02.py
|
mattias-lundell/aoc2021
|
32bd41446d963c5788d4614106405be65de81bcd
|
[
"MIT"
] | null | null | null |
test = """forward 5
down 5
forward 8
up 3
down 8
forward 2
"""
if __name__ == '__main__':
part1(test.splitlines())
part1(open('in02.txt').readlines())
part2(test.splitlines())
part2(open('in02.txt').readlines())
| 19.395833 | 39 | 0.493018 |
53e86b46c3285488d7ebc41a01e6a577e706cb66
| 693 |
py
|
Python
|
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
associations/migrations/0001_initial.py
|
ollc-code/django-back
|
205f3adc61f9e62c88dfcc170999cef495cebed7
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-11-09 08:56
from django.db import migrations, models
| 27.72 | 114 | 0.580087 |
53e96f34f945ecef4aebd95bbb66a14049ee97c2
| 4,631 |
py
|
Python
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 4 |
2019-09-16T15:50:22.000Z
|
2021-04-08T15:32:48.000Z
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 3 |
2018-05-04T09:28:24.000Z
|
2018-12-03T09:00:31.000Z
|
tests/pds/test_times.py
|
seignovert/pyvims
|
a70b5b9b8bc5c37fa43b7db4d15407f312a31849
|
[
"BSD-3-Clause"
] | 1 |
2020-10-12T15:14:17.000Z
|
2020-10-12T15:14:17.000Z
|
"""Test PDS times modules."""
from datetime import datetime as dt
from pyvims.pds.times import (cassini2utc, cassini_time, dt_date, dt_doy, dt_iso,
dyear, pds_folder, pds_time, utc2cassini)
from pytest import approx, raises
def test_dt_iso():
"""Test parsing ISO time pattern."""
assert str(dt_iso('2005-02-14T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_iso('2005-02-14 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_iso('2005-02-14:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_iso('2005-02-14')) == '2005-02-14 00:00:00+00:00'
times = dt_iso('from 2005-02-14T18:02:29 to 2005-02-14T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_iso('2005-045')
def test_dt_doy():
"""Test parsing DOY time pattern."""
assert str(dt_doy('2005-045T18:02:29.123')) == '2005-02-14 18:02:29.123000+00:00'
assert str(dt_doy('2005-045 18:02:29')) == '2005-02-14 18:02:29+00:00'
assert str(dt_doy('2005-045:18:02')) == '2005-02-14 18:02:00+00:00'
assert str(dt_doy('2005-045')) == '2005-02-14 00:00:00+00:00'
times = dt_doy('from 2005-045T18:02:29 to 2005-045T18:03')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 18:02:29+00:00'
assert str(times[1]) == '2005-02-14 18:03:00+00:00'
with raises(ValueError):
_ = dt_doy('2005-02-14')
def test_dt_date():
"""Test date pattern."""
assert str(dt_date('Feb 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Febr 14, 2005')) == '2005-02-14 00:00:00+00:00'
assert str(dt_date('Feb 14, 2005', eod=True)) == '2005-02-14 23:59:59+00:00'
assert str(dt_date('to Feb 14, 2005')) == '2005-02-14 23:59:59+00:00'
times = dt_date('from Feb 14, 2005 through March 12, 2006')
assert len(times) == 2
assert str(times[0]) == '2005-02-14 00:00:00+00:00'
assert str(times[1]) == '2006-03-12 23:59:59+00:00'
with raises(ValueError):
_ = dt_date('2005-02-14')
def test_pds_time():
"""Test PDS time parsing."""
assert str(pds_time('May 17, 2007')) == '2007-05-17 00:00:00+00:00'
assert str(pds_time('2010-274T00:00:00')) == '2010-10-01 00:00:00+00:00'
assert str(pds_time('2011-10-01T00:02:04.244')) == '2011-10-01 00:02:04.244000+00:00'
t0, t1 = pds_time(' May 17, 2007 through Jun 30, 2007')
assert str(t0) == '2007-05-17 00:00:00+00:00'
assert str(t1) == '2007-06-30 23:59:59+00:00'
t0, t1 = pds_time(' 2010-274T00:00:00 through 2010-365T23:59:59')
assert str(t0) == '2010-10-01 00:00:00+00:00'
assert str(t1) == '2010-12-31 23:59:59+00:00'
t0, t1 = pds_time(' 2011-10-01T00:02:04.244 through 2011-12-31T12:28:45.128')
assert str(t0) == '2011-10-01 00:02:04.244000+00:00'
assert str(t1) == '2011-12-31 12:28:45.128000+00:00'
t0, t1 = pds_time('2005015T175855_2005016T184233/')
assert str(t0) == '2005-01-15 17:58:55+00:00'
assert str(t1) == '2005-01-16 18:42:33+00:00'
with raises(ValueError):
_ = pds_time('No data available')
def test_cassini_time():
"""Test Cassini time parsing."""
assert cassini_time('v1487096932_1.qub') == 1487096932.0
assert cassini_time(1483230358.172) == 1483230358.172
with raises(ValueError):
_ = cassini_time('v123_1')
with raises(ValueError):
_ = cassini_time(123)
def test_cassini2utc():
"""Test Cassini time to UTC converter."""
assert str(cassini2utc('v1487096932_1')) == '2005-02-14 18:02:29'
assert str(cassini2utc(1483230358.172)) == '2005-01-01 00:00:00'
def test_utc2cassini():
"""Test UTC to Cassini time converter."""
assert utc2cassini('2005-02-14T18:02:29') == approx(1487096932.068, abs=1e-3)
times = utc2cassini('May 17, 2007 through Jun 30, 2007')
assert len(times) == 2
assert times[0] == approx(1558053238.602, abs=1e-3)
assert times[1] == approx(1561941262.879, abs=1e-3)
def test_pds_folder():
"""Test convert PDS folder as string."""
assert pds_folder('2005015T175855') == '2005-015T17:58:55'
assert pds_folder('2005015T175855_2005016T184233/') == \
'2005-015T17:58:55 2005-016T18:42:33'
def test_dyear():
"""Test decimal year."""
assert dyear('2005-01-01') == 2005.0
assert dyear('2005-12-31') == 2005.9973
assert dyear('2004-12-31') == 2004.9973
assert dyear(dt(2005, 1, 1)) == 2005.0
assert dyear(dt(2005, 12, 31)) == 2005.9973
assert dyear(dt(2004, 12, 31)) == 2004.9973
| 34.559701 | 89 | 0.628374 |
53e9f02f64051ff304c3ebef251b469302530c2e
| 626 |
py
|
Python
|
e/mail-relay/web/apps/mail/migrations/0109_auto_20171130_1047.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | null | null | null |
e/mail-relay/web/apps/mail/migrations/0109_auto_20171130_1047.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | 18 |
2020-06-05T18:17:40.000Z
|
2022-03-11T23:25:21.000Z
|
e/mail-relay/web/apps/mail/migrations/0109_auto_20171130_1047.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
| 27.217391 | 113 | 0.635783 |
53ea00fc5aec5aef16f52f772300f59c029df625
| 11,168 |
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_test/_data/sanity/code-smell/runtime-metadata.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1 |
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_test/_data/sanity/code-smell/runtime-metadata.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12 |
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_test/_data/sanity/code-smell/runtime-metadata.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import re
import sys
from distutils.version import StrictVersion, LooseVersion
from functools import partial
import yaml
from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA
from voluptuous import Required, Schema, Invalid
from voluptuous.humanize import humanize_error
from ansible.module_utils.six import string_types
from ansible.utils.version import SemanticVersion
def isodate(value, check_deprecation_date=False, is_tombstone=False):
"""Validate a datetime.date or ISO 8601 date string."""
# datetime.date objects come from YAML dates, these are ok
if isinstance(value, datetime.date):
removal_date = value
else:
# make sure we have a string
msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date'
if not isinstance(value, string_types):
raise Invalid(msg)
# From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions,
# we have to do things manually.
if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value):
raise Invalid(msg)
try:
removal_date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
except ValueError:
raise Invalid(msg)
# Make sure date is correct
today = datetime.date.today()
if is_tombstone:
# For a tombstone, the removal date must be in the past
if today < removal_date:
raise Invalid(
'The tombstone removal_date (%s) must not be after today (%s)' % (removal_date, today))
else:
# For a deprecation, the removal date must be in the future. Only test this if
# check_deprecation_date is truish, to avoid checks to suddenly start to fail.
if check_deprecation_date and today > removal_date:
raise Invalid(
'The deprecation removal_date (%s) must be after today (%s)' % (removal_date, today))
return value
def removal_version(value, is_ansible, current_version=None, is_tombstone=False):
"""Validate a removal version string."""
msg = (
'Removal version must be a string' if is_ansible else
'Removal version must be a semantic version (https://semver.org/)'
)
if not isinstance(value, string_types):
raise Invalid(msg)
try:
if is_ansible:
version = StrictVersion()
version.parse(value)
version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion
else:
version = SemanticVersion()
version.parse(value)
if version.major != 0 and (version.minor != 0 or version.patch != 0):
raise Invalid('removal_version (%r) must be a major release, not a minor or patch release '
'(see specification at https://semver.org/)' % (value, ))
if current_version is not None:
if is_tombstone:
# For a tombstone, the removal version must not be in the future
if version > current_version:
raise Invalid('The tombstone removal_version (%r) must not be after the '
'current version (%s)' % (value, current_version))
else:
# For a deprecation, the removal version must be in the future
if version <= current_version:
raise Invalid('The deprecation removal_version (%r) must be after the '
'current version (%s)' % (value, current_version))
except ValueError:
raise Invalid(msg)
return value
def any_value(value):
"""Accepts anything."""
return value
def get_ansible_version():
"""Return current ansible-core version"""
from ansible.release import __version__
return LooseVersion('.'.join(__version__.split('.')[:3]))
def get_collection_version():
"""Return current collection version, or None if it is not available"""
import importlib.util
collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
'collection_detail.py')
collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path)
collection_detail = importlib.util.module_from_spec(collection_detail_spec)
sys.modules['collection_detail'] = collection_detail
collection_detail_spec.loader.exec_module(collection_detail)
# noinspection PyBroadException
try:
result = collection_detail.read_manifest_json('.') or collection_detail.read_galaxy_yml('.')
return SemanticVersion(result['version'])
except Exception: # pylint: disable=broad-except
# We do not care why it fails, in case we cannot get the version
# just return None to indicate "we don't know".
return None
def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
"""Validate explicit runtime metadata file"""
try:
with open(path, 'r') as f_path:
routing = yaml.safe_load(f_path)
except yaml.error.MarkedYAMLError as ex:
print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line +
1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex))))
return
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' %
(path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
return
if is_ansible:
current_version = get_ansible_version()
else:
current_version = get_collection_version()
# Updates to schema MUST also be reflected in the documentation
# ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html
# plugin_routing schema
avoid_additional_data = Schema(
Any(
{
Required('removal_version'): any_value,
'warning_text': any_value,
},
{
Required('removal_date'): any_value,
'warning_text': any_value,
}
),
extra=PREVENT_EXTRA
)
deprecation_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version),
'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
tombstoning_schema = All(
# The first schema validates the input, and the second makes sure no extra keys are specified
Schema(
{
'removal_version': partial(removal_version, is_ansible=is_ansible,
current_version=current_version, is_tombstone=True),
'removal_date': partial(isodate, is_tombstone=True),
'warning_text': Any(*string_types),
}
),
avoid_additional_data
)
plugin_routing_schema = Any(
Schema({
('deprecation'): Any(deprecation_schema),
('tombstone'): Any(tombstoning_schema),
('redirect'): Any(*string_types),
}, extra=PREVENT_EXTRA),
)
list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema}
for str_type in string_types]
plugin_schema = Schema({
('action'): Any(None, *list_dict_plugin_routing_schema),
('become'): Any(None, *list_dict_plugin_routing_schema),
('cache'): Any(None, *list_dict_plugin_routing_schema),
('callback'): Any(None, *list_dict_plugin_routing_schema),
('cliconf'): Any(None, *list_dict_plugin_routing_schema),
('connection'): Any(None, *list_dict_plugin_routing_schema),
('doc_fragments'): Any(None, *list_dict_plugin_routing_schema),
('filter'): Any(None, *list_dict_plugin_routing_schema),
('httpapi'): Any(None, *list_dict_plugin_routing_schema),
('inventory'): Any(None, *list_dict_plugin_routing_schema),
('lookup'): Any(None, *list_dict_plugin_routing_schema),
('module_utils'): Any(None, *list_dict_plugin_routing_schema),
('modules'): Any(None, *list_dict_plugin_routing_schema),
('netconf'): Any(None, *list_dict_plugin_routing_schema),
('shell'): Any(None, *list_dict_plugin_routing_schema),
('strategy'): Any(None, *list_dict_plugin_routing_schema),
('terminal'): Any(None, *list_dict_plugin_routing_schema),
('test'): Any(None, *list_dict_plugin_routing_schema),
('vars'): Any(None, *list_dict_plugin_routing_schema),
}, extra=PREVENT_EXTRA)
# import_redirection schema
import_redirection_schema = Any(
Schema({
('redirect'): Any(*string_types),
# import_redirect doesn't currently support deprecation
}, extra=PREVENT_EXTRA)
)
list_dict_import_redirection_schema = [{str_type: import_redirection_schema}
for str_type in string_types]
# top level schema
schema = Schema({
# All of these are optional
('plugin_routing'): Any(plugin_schema),
('import_redirection'): Any(None, *list_dict_import_redirection_schema),
# requires_ansible: In the future we should validate this with SpecifierSet
('requires_ansible'): Any(*string_types),
('action_groups'): dict,
}, extra=PREVENT_EXTRA)
# Ensure schema is valid
try:
schema(routing)
except MultipleInvalid as ex:
for error in ex.errors:
# No way to get line/column numbers
print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error)))
def main():
"""Validate runtime metadata"""
paths = sys.argv[1:] or sys.stdin.read().splitlines()
collection_legacy_file = 'meta/routing.yml'
collection_runtime_file = 'meta/runtime.yml'
# This is currently disabled, because if it is enabled this test can start failing
# at a random date. For this to be properly activated, we (a) need to be able to return
# codes for this test, and (b) make this error optional.
check_deprecation_dates = False
for path in paths:
if path == collection_legacy_file:
print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file)))
continue
validate_metadata_file(
path,
is_ansible=path not in (collection_legacy_file, collection_runtime_file),
check_deprecation_dates=check_deprecation_dates)
if __name__ == '__main__':
main()
| 39.885714 | 112 | 0.632969 |
53eb2f5275fa111e5a11e8a6b19fe5db87a5dc8d
| 2,160 |
py
|
Python
|
catkin_ws/src/o2ac_flexbe/o2ac_flexbe_states/src/o2ac_flexbe_states/align_bearing_holes.py
|
mitdo/o2ac-ur
|
74c82a54a693bf6a3fc995ff63e7c91ac1fda6fd
|
[
"MIT"
] | 32 |
2021-09-02T12:29:47.000Z
|
2022-03-30T21:44:10.000Z
|
catkin_ws/src/o2ac_flexbe/o2ac_flexbe_states/src/o2ac_flexbe_states/align_bearing_holes.py
|
kroglice/o2ac-ur
|
f684f21fd280a22ec061dc5d503801f6fefb2422
|
[
"MIT"
] | 4 |
2021-09-22T00:51:14.000Z
|
2022-01-30T11:54:19.000Z
|
catkin_ws/src/o2ac_flexbe/o2ac_flexbe_states/src/o2ac_flexbe_states/align_bearing_holes.py
|
kroglice/o2ac-ur
|
f684f21fd280a22ec061dc5d503801f6fefb2422
|
[
"MIT"
] | 7 |
2021-11-02T12:26:09.000Z
|
2022-02-01T01:45:22.000Z
|
#!/usr/bin/env python
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyActionClient
# example import of required action
from o2ac_msgs.msg import AlignBearingHolesAction, AlignBearingHolesGoal
| 30.422535 | 72 | 0.600463 |
53eb9134fe73eaf59759bdec6bb46f044d4317f1
| 6,710 |
py
|
Python
|
find_unicode_control.py
|
sebastian-philipp/find-unicode-control
|
170730aff64d17a4d9c57b0284d862c932e1565c
|
[
"BSD-3-Clause"
] | null | null | null |
find_unicode_control.py
|
sebastian-philipp/find-unicode-control
|
170730aff64d17a4d9c57b0284d862c932e1565c
|
[
"BSD-3-Clause"
] | null | null | null |
find_unicode_control.py
|
sebastian-philipp/find-unicode-control
|
170730aff64d17a4d9c57b0284d862c932e1565c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
"""Find unicode control characters in source files
By default the script takes one or more files or directories and looks for
unicode control characters in all text files. To narrow down the files, provide
a config file with the -c command line, defining a scan_exclude list, which
should be a list of regular expressions matching paths to exclude from the scan.
There is a second mode enabled with -p which when set to 'all', prints all
control characters and when set to 'bidi', prints only the 9 bidirectional
control characters.
"""
import sys, os, argparse, re, unicodedata, magic
import importlib
from stat import *
scan_exclude = [r'\.git/', r'\.hg/', r'\.desktop$', r'ChangeLog$', r'NEWS$',
r'\.ppd$', r'\.txt$', r'\.directory$']
scan_exclude_mime = [r'text/x-po$', r'text/x-tex$', r'text/x-troff$',
r'text/html$']
verbose_mode = False
# Print to stderr in verbose mode.
# Decode a single latin1 line.
# Make a text string from a file, attempting to decode from latin1 if necessary.
# Other non-utf-8 locales are not supported at the moment.
# Look for disallowed characters in the text. We reduce all characters into a
# set to speed up analysis. FIXME: Add a slow mode to get line numbers in files
# that have these disallowed chars.
# Get file text and feed into analyze_text.
# Actual implementation of the recursive descent into directories.
# Recursively analyze files in the directory.
# All control characters. We omit the ascii control characters.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Look for Unicode control characters")
parser.add_argument('path', metavar='path', nargs='+',
help='Sources to analyze')
parser.add_argument('-p', '--nonprint', required=False,
type=str, choices=['all', 'bidi'],
help='Look for either all non-printable unicode characters or bidirectional control characters.')
parser.add_argument('-v', '--verbose', required=False, action='store_true',
help='Verbose mode.')
parser.add_argument('-d', '--detailed', required=False, action='store_true',
help='Print line numbers where characters occur.')
parser.add_argument('-t', '--notests', required=False,
action='store_true', help='Exclude tests (basically test.* as a component of path).')
parser.add_argument('-c', '--config', required=False, type=str,
help='Configuration file to read settings from.')
args = parser.parse_args()
verbose_mode = args.verbose
detailed_mode = args.detailed
if not args.nonprint:
# Formatting control characters in the unicode space. This includes the
# bidi control characters.
disallowed = set(chr(c) for c in range(sys.maxunicode) if \
unicodedata.category(chr(c)) == 'Cf')
msg = 'unicode control characters'
elif args.nonprint == 'all':
# All control characters.
disallowed = set(chr(c) for c in range(sys.maxunicode) if \
nonprint_unicode(chr(c)))
msg = 'disallowed characters'
else:
# Only bidi control characters.
disallowed = set([
chr(0x202a), chr(0x202b), chr(0x202c), chr(0x202d), chr(0x202e),
chr(0x2066), chr(0x2067), chr(0x2068), chr(0x2069)])
msg = 'bidirectional control characters'
if args.config:
spec = importlib.util.spec_from_file_location("settings", args.config)
settings = importlib.util.module_from_spec(spec)
spec.loader.exec_module(settings)
if hasattr(settings, 'scan_exclude'):
scan_exclude = scan_exclude + settings.scan_exclude
if hasattr(settings, 'scan_exclude_mime'):
scan_exclude_mime = scan_exclude_mime + settings.scan_exclude_mime
if args.notests:
scan_exclude = scan_exclude + [r'/test[^/]+/']
analyze_paths(args.path, disallowed, msg)
| 35.882353 | 109 | 0.634426 |
53ebe27af2c0c28dac914d098023620cb50fc322
| 1,529 |
py
|
Python
|
igibson/object_states/aabb.py
|
mamadbiabon/iGibson
|
d416a470240eb7ad86e04fee475ae4bd67263a7c
|
[
"MIT"
] | 360 |
2020-04-02T11:12:09.000Z
|
2022-03-24T21:46:58.000Z
|
igibson/object_states/aabb.py
|
mamadbiabon/iGibson
|
d416a470240eb7ad86e04fee475ae4bd67263a7c
|
[
"MIT"
] | 169 |
2020-04-07T21:01:05.000Z
|
2022-03-31T10:07:39.000Z
|
igibson/object_states/aabb.py
|
mamadbiabon/iGibson
|
d416a470240eb7ad86e04fee475ae4bd67263a7c
|
[
"MIT"
] | 94 |
2020-04-09T23:22:17.000Z
|
2022-03-17T21:49:03.000Z
|
import numpy as np
from igibson.external.pybullet_tools.utils import aabb_union, get_aabb, get_all_links
from igibson.object_states.object_state_base import CachingEnabledObjectState
| 36.404762 | 109 | 0.699804 |
53ed119c9b07bf3b0dd5b8ddf0cc3d573400eed1
| 34,187 |
py
|
Python
|
vsphere/tests/test_vsphere.py
|
fujigon/integrations-core
|
256b1c138fd1bf1c71db63698737e813cfda00f8
|
[
"BSD-3-Clause"
] | null | null | null |
vsphere/tests/test_vsphere.py
|
fujigon/integrations-core
|
256b1c138fd1bf1c71db63698737e813cfda00f8
|
[
"BSD-3-Clause"
] | null | null | null |
vsphere/tests/test_vsphere.py
|
fujigon/integrations-core
|
256b1c138fd1bf1c71db63698737e813cfda00f8
|
[
"BSD-3-Clause"
] | 1 |
2019-12-23T13:35:17.000Z
|
2019-12-23T13:35:17.000Z
|
# (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
from __future__ import unicode_literals
import time
from datetime import datetime
import mock
import pytest
from mock import MagicMock
from pyVmomi import vim
from datadog_checks.vsphere import VSphereCheck
from datadog_checks.vsphere.cache_config import CacheConfig
from datadog_checks.vsphere.common import SOURCE_TYPE
from datadog_checks.vsphere.errors import BadConfigError, ConnectionError
from datadog_checks.vsphere.vsphere import (
REFRESH_METRICS_METADATA_INTERVAL,
REFRESH_MORLIST_INTERVAL,
RESOURCE_TYPE_METRICS,
SHORT_ROLLUP,
)
from .utils import MockedMOR, assertMOR, disable_thread_pool, get_mocked_server
SERVICE_CHECK_TAGS = ["vcenter_server:vsphere_mock", "vcenter_host:None", "foo:bar"]
def test__is_excluded():
"""
* Exclude hosts/vms not compliant with the user's `*_include` configuration.
* Exclude "non-labeled" virtual machines when the user configuration instructs to.
"""
# Sample(s)
include_regexes = {'host_include': "f[o]+", 'vm_include': "f[o]+"}
# OK
included_host = MockedMOR(spec="HostSystem", name="foo")
included_vm = MockedMOR(spec="VirtualMachine", name="foo")
assert not VSphereCheck._is_excluded(included_host, {"name": included_host.name}, include_regexes, None)
assert not VSphereCheck._is_excluded(included_vm, {"name": included_vm.name}, include_regexes, None)
# Not OK!
excluded_host = MockedMOR(spec="HostSystem", name="bar")
excluded_vm = MockedMOR(spec="VirtualMachine", name="bar")
assert VSphereCheck._is_excluded(excluded_host, {"name": excluded_host.name}, include_regexes, None)
assert VSphereCheck._is_excluded(excluded_vm, {"name": excluded_vm.name}, include_regexes, None)
# Sample(s)
include_regexes = None
include_only_marked = True
# OK
included_vm = MockedMOR(spec="VirtualMachine", name="foo", label=True)
assert not VSphereCheck._is_excluded(
included_vm, {"customValue": included_vm.customValue}, include_regexes, include_only_marked
)
# Not OK
included_vm = MockedMOR(spec="VirtualMachine", name="foo")
assert VSphereCheck._is_excluded(included_vm, {"customValue": []}, include_regexes, include_only_marked)
def test_vms_in_filtered_host_are_filtered(vsphere, instance):
"""Test that all vms belonging to a filtered host are also filtered"""
server_instance = vsphere._get_server_instance(instance)
filtered_host = MockedMOR(spec="HostSystem")
filtered_vm = MockedMOR(spec="VirtualMachine")
non_filtered_host = MockedMOR(spec="HostSystem")
non_filtered_vm = MockedMOR(spec="VirtualMachine")
mocked_mors_attrs = {
filtered_host: {"name": "filtered_host_number_1", "parent": None},
filtered_vm: {
"name": "this_vm_is_filtered",
"runtime.powerState": vim.VirtualMachinePowerState.poweredOn,
"runtime.host": filtered_host,
},
non_filtered_host: {"name": "non_filtered_host_number_1", "parent": None},
non_filtered_vm: {
"name": "this_vm_is_not_filtered",
"runtime.powerState": vim.VirtualMachinePowerState.poweredOn,
"runtime.host": non_filtered_host,
},
}
regex = {'host_include': '^(?!filtered_.+)'}
with mock.patch("datadog_checks.vsphere.VSphereCheck._collect_mors_and_attributes", return_value=mocked_mors_attrs):
obj_list = vsphere._get_all_objs(server_instance, regex, False, [])
assert len(obj_list[vim.VirtualMachine]) == 1
assert len(obj_list[vim.HostSystem]) == 1
assert {
"mor_type": "vm",
"mor": non_filtered_vm,
"hostname": "this_vm_is_not_filtered",
"tags": ["vsphere_host:non_filtered_host_number_1", "vsphere_type:vm"],
} == obj_list[vim.VirtualMachine][0]
assert {
"mor_type": "host",
"mor": non_filtered_host,
"hostname": "non_filtered_host_number_1",
"tags": ["vsphere_type:host"],
} == obj_list[vim.HostSystem][0]
def test__get_all_objs(vsphere, instance):
"""
Test that we don't raise KeyError if the property collector failed to collect some attributes
and that we handle the case were there are missing attributes
"""
server_instance = vsphere._get_server_instance(instance)
vm_no_parent = MockedMOR(spec="VirtualMachine")
vm_no_powerstate = MockedMOR(spec="VirtualMachine")
vm_host_parent = MockedMOR(spec="VirtualMachine")
mocked_host = MockedMOR(spec="HostSystem")
mocked_datastore = MockedMOR(spec="Datastore")
mocked_datacenter = MockedMOR(spec="Datacenter")
mocked_cluster = MockedMOR(spec="ClusterComputeResource")
mocked_mors_attrs = {
vm_no_parent: {"name": "vm_no_parent", "runtime.powerState": vim.VirtualMachinePowerState.poweredOn},
vm_no_powerstate: {"name": "vm_no_powerstate"},
vm_host_parent: {"parent": mocked_host, "runtime.powerState": vim.VirtualMachinePowerState.poweredOn},
mocked_host: {"name": "mocked_host", "parent": None},
mocked_datastore: {},
mocked_cluster: {"name": "cluster"},
mocked_datacenter: {"parent": MockedMOR(spec="Folder", name="unknown folder"), "name": "datacenter"},
}
with mock.patch("datadog_checks.vsphere.VSphereCheck._collect_mors_and_attributes", return_value=mocked_mors_attrs):
obj_list = vsphere._get_all_objs(server_instance, None, False, [])
assert len(obj_list[vim.VirtualMachine]) == 2
assert {
"mor_type": "vm",
"mor": vm_no_parent,
"hostname": "vm_no_parent",
"tags": ["vsphere_host:unknown", "vsphere_type:vm"],
} in obj_list[vim.VirtualMachine]
assert {
"mor_type": "vm",
"mor": vm_host_parent,
"hostname": "unknown",
"tags": ["vsphere_host:mocked_host", "vsphere_host:unknown", "vsphere_type:vm"],
} in obj_list[vim.VirtualMachine]
assert len(obj_list[vim.HostSystem]) == 1
assert {
"mor_type": "host",
"mor": mocked_host,
"hostname": "mocked_host",
"tags": ["vsphere_type:host"],
} in obj_list[vim.HostSystem]
assert len(obj_list[vim.Datastore]) == 1
assert {
"mor_type": "datastore",
"mor": mocked_datastore,
"hostname": None,
"tags": ["vsphere_datastore:unknown", "vsphere_type:datastore"],
} in obj_list[vim.Datastore]
assert len(obj_list[vim.Datacenter]) == 1
assert {
"mor_type": "datacenter",
"mor": mocked_datacenter,
"hostname": None,
"tags": ["vsphere_folder:unknown", "vsphere_datacenter:datacenter", "vsphere_type:datacenter"],
} in obj_list[vim.Datacenter]
assert len(obj_list[vim.ClusterComputeResource]) == 1
assert {
"mor_type": "cluster",
"mor": mocked_cluster,
"hostname": None,
"tags": ["vsphere_cluster:cluster", "vsphere_type:cluster"],
} in obj_list[vim.ClusterComputeResource]
def test__collect_mors_and_attributes(vsphere, instance):
"""
Test that we check for errors when collecting properties with property collector
"""
server_instance = vsphere._get_server_instance(instance)
with mock.patch("datadog_checks.vsphere.vsphere.vmodl"):
obj = MagicMock(missingSet=None, obj="obj")
result = MagicMock(token=None, objects=[obj])
server_instance.content.propertyCollector.RetrievePropertiesEx.return_value = result
log = MagicMock()
vsphere.log = log
mor_attrs = vsphere._collect_mors_and_attributes(server_instance)
log.error.assert_not_called()
assert len(mor_attrs) == 1
obj.missingSet = [MagicMock(path="prop", fault="fault")]
mor_attrs = vsphere._collect_mors_and_attributes(server_instance)
log.error.assert_called_once_with('Unable to retrieve property %s for object %s: %s', 'prop', 'obj', 'fault')
assert len(mor_attrs) == 1
def test__cache_morlist_raw(vsphere, instance):
"""
Explore the vCenter infrastructure to discover hosts, virtual machines.
Input topology:
```
rootFolder
- datacenter1
- compute_resource1
- host1 # Filtered out
- host2
- folder1
- datacenter2
- compute_resource2
- host3
- vm1 # Not labeled
- vm2 # Filtered out
- vm3 # Powered off
- vm4
```
"""
# Samples
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
instance["host_include_only_regex"] = "host[2-9]"
instance["vm_include_only_regex"] = "vm[^2]"
instance["include_only_marked"] = True
# Discover hosts and virtual machines
vsphere._cache_morlist_raw(instance)
# Assertions: 1 labeled+monitored VM + 2 hosts + 2 datacenters + 2 clusters + 1 datastore.
assertMOR(vsphere, instance, count=8)
# ...on hosts
assertMOR(vsphere, instance, spec="host", count=2)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:rootFolder",
"vsphere_datacenter:datacenter1",
"vsphere_compute:compute_resource1",
"vsphere_cluster:compute_resource1",
"vsphere_type:host",
]
assertMOR(vsphere, instance, name="host2", spec="host", tags=tags)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:rootFolder",
"vsphere_folder:folder1",
"vsphere_datacenter:datacenter2",
"vsphere_compute:compute_resource2",
"vsphere_cluster:compute_resource2",
"vsphere_type:host",
]
assertMOR(vsphere, instance, name="host3", spec="host", tags=tags)
# ...on VMs
assertMOR(vsphere, instance, spec="vm", count=1)
tags = [
"vcenter_server:vsphere_mock",
"vsphere_folder:folder1",
"vsphere_datacenter:datacenter2",
"vsphere_compute:compute_resource2",
"vsphere_cluster:compute_resource2",
"vsphere_host:host3",
"vsphere_type:vm",
]
assertMOR(vsphere, instance, name="vm4", spec="vm", subset=True, tags=tags)
def test_collect_realtime_only(vsphere, instance):
"""
Test the collect_realtime_only parameter acts as expected
"""
vsphere._process_mor_objects_queue_async = MagicMock()
instance["collect_realtime_only"] = False
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere._cache_morlist_raw(instance)
vsphere._process_mor_objects_queue(instance)
# Called once to process the 2 datacenters, then 2 clusters, then the datastore
assert vsphere._process_mor_objects_queue_async.call_count == 3
instance["collect_realtime_only"] = True
vsphere._process_mor_objects_queue_async.reset_mock()
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
vsphere._cache_morlist_raw(instance)
vsphere._process_mor_objects_queue(instance)
assert vsphere._process_mor_objects_queue_async.call_count == 0
def test_check(vsphere, instance):
"""
Test the check() method
"""
with mock.patch('datadog_checks.vsphere.vsphere.vmodl'):
with mock.patch.object(vsphere, 'set_external_tags') as set_external_tags:
vsphere.check(instance)
set_external_tags.assert_called_once()
all_the_tags = dict(set_external_tags.call_args[0][0])
assert all_the_tags['vm4'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['host1'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_datacenter:datacenter1',
'vsphere_cluster:compute_resource1',
'vsphere_compute:compute_resource1',
'vsphere_type:host',
]
assert all_the_tags['host3'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_type:host',
]
assert all_the_tags['vm2'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['vm1'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_folder:folder1',
'vsphere_datacenter:datacenter2',
'vsphere_cluster:compute_resource2',
'vsphere_compute:compute_resource2',
'vsphere_host:host3',
'vsphere_host:host3',
'vsphere_type:vm',
]
assert all_the_tags['host2'][SOURCE_TYPE] == [
'vcenter_server:vsphere_mock',
'vsphere_folder:rootFolder',
'vsphere_datacenter:datacenter1',
'vsphere_cluster:compute_resource1',
'vsphere_compute:compute_resource1',
'vsphere_type:host',
]
| 42.363073 | 120 | 0.678796 |
53f022c5295afcf5069c62aac2f57d65cf97e719
| 2,147 |
py
|
Python
|
data_steward/constants/validation/email_notification.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | 1 |
2021-04-05T18:06:25.000Z
|
2021-04-05T18:06:25.000Z
|
data_steward/constants/validation/email_notification.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | null | null | null |
data_steward/constants/validation/email_notification.py
|
jp3477/curation
|
41f98d57c8273d9963ad6d466a237c99b63c74be
|
[
"MIT"
] | null | null | null |
MANDRILL_API_KEY = 'MANDRILL_API_KEY'
UNSET_MANDRILL_API_KEY_MSG = f"Mandrill API key not set in environment variable {MANDRILL_API_KEY}"
CONTACT_LIST_QUERY = """
SELECT *
FROM `{{project}}.{{dataset}}.{{contact_table}}`
"""
EHR_OPERATIONS = 'EHR Ops'
EHR_OPS_ZENDESK = '[email protected]'
DATA_CURATION_LISTSERV = '[email protected]'
NO_REPLY_ADDRESS = '[email protected]'
NO_DATA_STEWARD = 'no data steward'
# HPO contact list table columns
SITE_NAME = 'site_name'
HPO_ID = 'hpo_id'
SITE_POINT_OF_CONTACT = 'site_point_of_contact'
# Mandrill API constants
MAIL_TO = 'mail_to'
EHR_OPS_SITE_URL = 'https://sites.google.com/view/ehrupload'
# Email content
EMAIL_BODY = """
<p style="font-size:115%;">Hi {{ site_name }},</p>
<p style="font-size:115%;">Your submission <b>{{ folder }}</b>
{% if submission_error %}was NOT successfully loaded on {{ timestamp }}.<br>
{% else %}was successfully loaded on {{ timestamp }}.<br>
{% endif %}
Please review the <code>results.html</code> submission report attached to this email{% if submission_error %}<br>
and resolve the errors before making a new submission{% endif %}.<br>
If any of your files have not been successfully uploaded, please run the
<a href="https://github.com/all-of-us/aou-ehr-file-check">local file check</a> before making your submission.<br>
To view the full set of curation reports, please visit the submission folder in your
GCS bucket <a href="{{ submission_folder_url }}">here</a>.<br>
For more information on the reports and how to download them, please refer to our
<a href="{{ ehr_ops_site_url }}">EHR Ops website</a>.</p>
<p style="font-size:115%;">You are receiving this email because you are listed as a point of contact
for HPO Site <em>{{ site_name }}</em>.<br>
If you have additional questions or wish to no longer receive these emails, please reply/send an
email to <a href="mailto:{{ eo_zendesk }}">{{ eo_zendesk }}</a>.</p>
<p style="font-size:115%;">EHR Ops team, DRC<br>
<em>All of Us</em> Research Program<br>
<img src="cid:{{ aou_logo }}"/></p>
"""
AOU_LOGO = 'aou_logo'
AOU_LOGO_PNG = 'all-of-us-logo.png'
| 39.036364 | 116 | 0.726129 |
53f15f1ad7b41be043cf58489197157314abeded
| 2,110 |
py
|
Python
|
clip/clip.py
|
keshav11/clip
|
f426dee5c3a6885ddeba20d450d85fc71951c5ca
|
[
"MIT"
] | 1 |
2018-03-27T05:13:43.000Z
|
2018-03-27T05:13:43.000Z
|
clip/clip.py
|
keshav11/clip
|
f426dee5c3a6885ddeba20d450d85fc71951c5ca
|
[
"MIT"
] | 1 |
2018-03-27T14:57:05.000Z
|
2018-03-27T14:57:05.000Z
|
clip/clip.py
|
keshav11/clip
|
f426dee5c3a6885ddeba20d450d85fc71951c5ca
|
[
"MIT"
] | null | null | null |
import os
import argparse
from pathlib import Path
CLIP_FILE = os.path.join(Path.home(), '.clip')
TEMP_FILE = '.TEMP_FILE'
if __name__ == '__main__':
main()
| 26.708861 | 95 | 0.555924 |
53f16f379316b618805c2343722f2905bbfec891
| 2,383 |
py
|
Python
|
tests/unit/test_nsga2.py
|
learsi1911/GAMA_pygmo_v4
|
459807db352dd1c9f9c1e0e322f8c1e9b5abbca0
|
[
"Apache-2.0"
] | 49 |
2018-10-22T06:05:29.000Z
|
2021-09-07T20:12:36.000Z
|
tests/unit/test_nsga2.py
|
learsi1911/GAMA_pygmo_v4
|
459807db352dd1c9f9c1e0e322f8c1e9b5abbca0
|
[
"Apache-2.0"
] | 102 |
2018-10-02T12:00:47.000Z
|
2021-02-24T14:35:30.000Z
|
tests/unit/test_nsga2.py
|
learsi1911/GAMA_pygmo_v4
|
459807db352dd1c9f9c1e0e322f8c1e9b5abbca0
|
[
"Apache-2.0"
] | 11 |
2021-06-04T11:56:19.000Z
|
2022-03-21T20:21:15.000Z
|
from typing import List, Tuple
from gama.genetic_programming.nsga2 import (
NSGAMeta,
fast_non_dominated_sort,
crowding_distance_assignment,
)
def _tuples_to_NSGAMeta(tuples: List[Tuple]) -> List[NSGAMeta]:
""" Converts a list of tuples to NSGAMeta objects. """
# Can't declare it directly in a loop as it does not create a new scope.
metrics = [fetch_value(i) for i in range(len(tuples[0]))]
return [NSGAMeta(t, metrics) for t in tuples]
| 33.56338 | 84 | 0.698699 |
53f1e3a9ae5af85a04a5bf0c18896233f3416fe3
| 2,738 |
py
|
Python
|
stac_ingest/utils/tds.py
|
crim-ca/stac-ingest
|
e4cc2a66fee4b86ec238f139135d78215ec91ea4
|
[
"Apache-2.0"
] | null | null | null |
stac_ingest/utils/tds.py
|
crim-ca/stac-ingest
|
e4cc2a66fee4b86ec238f139135d78215ec91ea4
|
[
"Apache-2.0"
] | null | null | null |
stac_ingest/utils/tds.py
|
crim-ca/stac-ingest
|
e4cc2a66fee4b86ec238f139135d78215ec91ea4
|
[
"Apache-2.0"
] | null | null | null |
# File taken from https://github.com/Ouranosinc/pavics-vdb/blob/master/catalog/tds.py
"""Utility function to parse metadata from a THREDDS Data Server catalog."""
def attrs_from_ds(ds):
"""Extract attributes from TDS Dataset."""
url = ds.access_urls["NCML"]
attrs = attrs_from_ncml(url)
attrs["__services__"] = ds.access_urls
return attrs
def attrs_from_ncml(url):
"""Extract attributes from NcML file.
Parameters
----------
url : str
Link to NcML service of THREDDS server for a dataset.
Returns
-------
dict
Global attribute values keyed by facet names, with variable attributes in `__variable__` nested dict, and
additional specialized attributes in `__group__` nested dict.
"""
import lxml.etree
import requests
parser = lxml.etree.XMLParser(encoding='UTF-8')
ns = {"ncml": "http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2"}
# Parse XML content - UTF-8 encoded documents need to be read as bytes
xml = requests.get(url).content
doc = lxml.etree.fromstring(xml, parser=parser)
nc = doc.xpath("/ncml:netcdf", namespaces=ns)[0]
# Extract global attributes
out = _attrib_to_dict(nc.xpath("ncml:attribute", namespaces=ns))
# Extract group attributes
gr = {}
for group in nc.xpath("ncml:group", namespaces=ns):
gr[group.attrib["name"]] = _attrib_to_dict(group.xpath("ncml:attribute", namespaces=ns))
# Extract variable attributes
va = {}
for variable in nc.xpath("ncml:variable", namespaces=ns):
if '_CoordinateAxisType' in variable.xpath("ncml:attribute/@name", namespaces=ns):
continue
va[variable.attrib["name"]] = _attrib_to_dict(variable.xpath("ncml:attribute", namespaces=ns))
out["__group__"] = gr
out["__variable__"] = va
return out
def _attrib_to_dict(elems):
"""Convert element attributes to dictionary.
Ignore attributes with names starting with _
"""
hidden_prefix = "_"
out = {}
for e in elems:
a = e.attrib
if a["name"].startswith(hidden_prefix):
continue
out[a["name"]] = a["value"]
return out
| 29.44086 | 111 | 0.648283 |
53f27d7f999c3ddce62ec7074bca13f18a96eb7b
| 4,484 |
py
|
Python
|
tact/util.py
|
brunel-physics/mva_scikit
|
b0182da89efa466461aaf2cff4387c821df1758b
|
[
"BSD-3-Clause"
] | null | null | null |
tact/util.py
|
brunel-physics/mva_scikit
|
b0182da89efa466461aaf2cff4387c821df1758b
|
[
"BSD-3-Clause"
] | null | null | null |
tact/util.py
|
brunel-physics/mva_scikit
|
b0182da89efa466461aaf2cff4387c821df1758b
|
[
"BSD-3-Clause"
] | 2 |
2020-05-18T19:52:32.000Z
|
2022-01-24T10:07:35.000Z
|
# -*- coding: utf-8 -*-
"""
Module containing miscellaneous utility functions.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import collections
import itertools
import numpy as np
def deep_update(d1, d2):
"""
Adds key-value pairs in d2 to d1. Conflicts are resolved in favour of d2.
Recurses into all values in d2 which belong to the collections.Mapping
abstract base class.
Parameters
----------
d1 : collections.Mapping
Base dictionary
d2 : collections.Mapping
Dictionary with updated values
Returns
-------
d1 : collections.Mapping
Updated dictionary
"""
for k, v in d2.iteritems():
if isinstance(v, collections.Mapping):
d1[k] = deep_update(d1.get(k, {}), v)
else:
d1[k] = v
return d1
def nodes(tree):
"""
Return a list of values at every node of a tree.
Parameters
----------
tree : BinaryTree
BinaryTree to extract nodes from.
Returns
-------
nodelist : list
List of values at tree nodes.
"""
nodelist = []
def _get_nodes(tree):
"""
Build up a list of nodes.
Parameters
----------
tree : BinaryTree
BinaryTree to extract nodes from.
Returns
-------
None
"""
nodelist.append(tree.val)
try:
_get_nodes(tree.left)
except AttributeError:
nodelist.append(tree.left)
try:
_get_nodes(tree.right)
except AttributeError:
nodelist.append(tree.right)
_get_nodes(tree)
return nodelist
def corrcoef(x, y=None, rowvar=True, fweights=None, aweights=None):
"""
Return Pearson product-moment correlation coefficients.
This is a copy of the implementation found in numpy, with the removal of
the deperecated bias and ddof keyword arguments, and the addition of
the fweights and aweights arguments, which are pased to np.cov.
Parameters
----------
x : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `x` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same
shape as `x`.
rowvar : bool, optional
If `rowvar` is True (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
fweights : array_like, int, optional
1-D array of integer freguency weights; the number of times each
observation vector should be repeated.
aweights : array_like, optional
1-D array of observation vector weights. These relative weights are
typically large for observations considered "important" and smaller for
observations considered less "important". If ``ddof=0`` the array of
weights can be used to assign probabilities to observation vectors.
Returns
-------
R : ndarray
The correlation coefficient matrix of the variables.
"""
c = np.cov(x, y, rowvar, fweights=fweights, aweights=aweights)
try:
d = np.diag(c)
except ValueError:
# scalar covariance
# nan if incorrect value (nan, inf, 0), 1 otherwise
return c / c
stddev = np.sqrt(d.real)
c /= stddev[:, None]
c /= stddev[None, :]
# Clip real and imaginary parts to [-1, 1]. This does not guarantee
# abs(a[i,j]) <= 1 for complex arrays, but is the best we can do without
# excessive work.
np.clip(c.real, -1, 1, out=c.real)
if np.iscomplexobj(c):
np.clip(c.imag, -1, 1, out=c.imag)
return c
| 26.222222 | 79 | 0.61686 |
53f2926766ffb4a7606e6a1c06800d6ce10ac775
| 3,893 |
py
|
Python
|
src/stochastic_tour.py
|
DavidNKraemer/ams553-final-project
|
fc23fe5f126a8bd9ea593c0b339883ec71820a05
|
[
"MIT"
] | null | null | null |
src/stochastic_tour.py
|
DavidNKraemer/ams553-final-project
|
fc23fe5f126a8bd9ea593c0b339883ec71820a05
|
[
"MIT"
] | null | null | null |
src/stochastic_tour.py
|
DavidNKraemer/ams553-final-project
|
fc23fe5f126a8bd9ea593c0b339883ec71820a05
|
[
"MIT"
] | null | null | null |
import numpy as np
import random
from collections import namedtuple
Drone = namedtuple('Drone', 'speed probability')
Site = namedtuple('Site', 'location')
| 25.781457 | 75 | 0.5813 |
53f4891624f4d3bc5f0cf1971fce25d204c1cf18
| 1,325 |
py
|
Python
|
orbit/actions/conditional_action_test.py
|
mcasanova1445/models
|
37be0fdb4abccca633bb3199a4e6f3f71cd174d9
|
[
"Apache-2.0"
] | 1 |
2020-09-14T10:46:07.000Z
|
2020-09-14T10:46:07.000Z
|
orbit/actions/conditional_action_test.py
|
mdsaifhaider/models
|
7214e17eb425963ec3d0295be215d5d26deaeb32
|
[
"Apache-2.0"
] | 8 |
2020-05-19T00:52:30.000Z
|
2020-06-04T23:57:20.000Z
|
orbit/actions/conditional_action_test.py
|
mdsaifhaider/models
|
7214e17eb425963ec3d0295be215d5d26deaeb32
|
[
"Apache-2.0"
] | 2 |
2021-10-07T04:47:04.000Z
|
2021-12-18T04:18:19.000Z
|
# Copyright 2022 The Orbit Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for orbit.actions.conditional_action."""
from orbit import actions
import tensorflow as tf
if __name__ == '__main__':
tf.test.main()
| 33.125 | 79 | 0.739623 |
53f4cffa9d98d6fc50ab66c96fe1f4f487091562
| 880 |
py
|
Python
|
Customizations/Tagging/show_tags.task.py
|
phnomcobra/valarie-content
|
b1f6242605badd2b0b2e53c4320f5d963b5e0b21
|
[
"MIT"
] | null | null | null |
Customizations/Tagging/show_tags.task.py
|
phnomcobra/valarie-content
|
b1f6242605badd2b0b2e53c4320f5d963b5e0b21
|
[
"MIT"
] | null | null | null |
Customizations/Tagging/show_tags.task.py
|
phnomcobra/valarie-content
|
b1f6242605badd2b0b2e53c4320f5d963b5e0b21
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
################################################################################
# DOCUMENTS
#
# Justin Dierking
# [email protected]
# 614 692 2050
#
# 04/22/2018 Original Construction
################################################################################
import traceback
import json
| 25.882353 | 80 | 0.465909 |
53f8fdaf42e35a017e458aac366d4271e4baa22e
| 1,932 |
py
|
Python
|
examples/python/masked_hist.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
examples/python/masked_hist.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
examples/python/masked_hist.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
import vigra
import numpy
import pylab
from seglib import cgp2d
from seglib.preprocessing import norm01
import seglib.edge_detectors.pixel as edp
import seglib.region_descriptors.pixel as rdp
from seglib.preprocessing import norm01
from seglib.histogram import jointHistogram,histogram
from seglib.region_descriptors.pixel.sift import denseSift
# change me to your path
img = "img/text.jpg"
img = numpy.squeeze(vigra.readImage(img))#[0:75,0:75,:]
binCount = 30
sigma = 1.5
histImg = numpy.zeros(img.shape[0:2]+(binCount*3,))
imgBig = None
sizes = [3,4,5,8,10,15,20,25,40,100]
scalings = [5,10,15]
for size in sizes:
for scaling in scalings:
size = int (size)
scaling = float(scaling)
print size,scaling
labels ,nseg= vigra.analysis.slicSuperpixels(vigra.colors.transform_RGB2Lab(img),scaling,size)
labels = vigra.analysis.labelImage(labels).astype(numpy.uint64)
cgp,tgrid = cgp2d.cgpFromLabels(labels)
if imgBig is None:
imgBig=vigra.sampling.resize(img,cgp.shape)
#cgp2d.visualize(imgBig,cgp=cgp)
print "accumulate cell "
hist = cgp.accumulateCellHistogram(cellType=2,image=img,binCount=binCount,sigma=sigma)
hist = hist.reshape([cgp.numCells(2),-1])
for c in range(histImg.shape[2]):
histImg[:,:,c] += (size)*cgp.featureToImage(cellType=2,features=hist[:,c],ignoreInactive=False,useTopologicalShape=False)
histImg=numpy.require(histImg,dtype=numpy.float32)
histImg=vigra.taggedView(histImg, 'xyc')
histImg = vigra.gaussianSmoothing(histImg,sigma=1.0)
#for c in range(histImg.shape[2]):
# #print c
# pylab.imshow( numpy.swapaxes( norm01(histImg[:,:,c]) ,0,1) )
# pylab.show()
#
# print "hist",hist.shape
imgdt = rdp.deepDetexturize(srcImg=img,img=histImg,nIteration=10,
nCluster=10,reductionAlg='pca',nldEdgeThreshold=10.0,nldScale=10.0,distance=None)#'cityblock')
| 27.6 | 133 | 0.70911 |
53fa17d1fb343f99d7928294d83a0d41844594ce
| 748 |
py
|
Python
|
backup/models.py
|
helwete/simple-backup
|
c7dd1a08d398f5b4005c187e274e192b2e024f30
|
[
"MIT"
] | null | null | null |
backup/models.py
|
helwete/simple-backup
|
c7dd1a08d398f5b4005c187e274e192b2e024f30
|
[
"MIT"
] | null | null | null |
backup/models.py
|
helwete/simple-backup
|
c7dd1a08d398f5b4005c187e274e192b2e024f30
|
[
"MIT"
] | null | null | null |
from datetime import date
from django.conf import settings
from django.db import models
# Create your models here.
| 35.619048 | 94 | 0.743316 |
53fa743e6670e6a8830a736afc87f494f4f511b4
| 2,713 |
py
|
Python
|
Kmeans Cluster/Kmeans_Compare.py
|
Jojoxiao/Machine-Learning-for-Beginner-by-Python3
|
71b91c9cba5803bd78d4d31be6dabb1d3989e968
|
[
"MIT"
] | 397 |
2018-05-28T02:07:32.000Z
|
2022-03-30T09:53:37.000Z
|
Kmeans Cluster/Kmeans_Compare.py
|
976634681/Machine-Learning-for-Beginner-by-Python3
|
d9effcbb1b390dc608a0f4c0a28f0ad03892047a
|
[
"MIT"
] | 4 |
2019-01-14T16:41:02.000Z
|
2021-03-11T13:23:06.000Z
|
Kmeans Cluster/Kmeans_Compare.py
|
976634681/Machine-Learning-for-Beginner-by-Python3
|
d9effcbb1b390dc608a0f4c0a28f0ad03892047a
|
[
"MIT"
] | 235 |
2018-06-28T05:31:40.000Z
|
2022-03-11T03:20:07.000Z
|
#-*- codingutf-8 -*-
# &Author AnFany
#
import Kmeans_AnFany as K_Af # AnFany
import Kmeans_Sklearn as K_Sk # Sklearn
import matplotlib.pyplot as plt
from pylab import mpl #
mpl.rcParams['font.sans-serif'] = ['FangSong'] #
mpl.rcParams['axes.unicode_minus'] = False
import numpy as np
# sklearn
from sklearn.datasets import make_blobs
X, Y = make_blobs(n_samples=600, centers=6, n_features=2)
#
#
# AnFany
kresult = K_Af.op_kmeans(X, countcen=6)
# Sklearn
sk = K_Sk.KMeans(init='k-means++', n_clusters=6, n_init=10)
train = sk.fit(X)
result = sk.predict(X)
skru = K_Sk.trans(result)
#
#
#
#
plt.subplot(2, 2, 1)
fig_scatter(X, Y)
plt.subplot(2, 2, 2)
sca(X, kresult[0], kresult[2])
plt.subplot(2, 2, 3)
sca(X, train.cluster_centers_, skru, titl='Sklearn ')
plt.subplot(2, 2, 4)
plt.axis('off')
plt.text(0.3, 0.6, 'AnFany %.5f'%Cost(X, kresult[2]))
plt.text(0.3, 0.3, 'Sklearn %.5f'%Cost(X, skru))
plt.show()
| 25.59434 | 123 | 0.573535 |
53faaa8c310593f3046382b5d7e3fa8922d7e1b7
| 5,544 |
py
|
Python
|
control_panel.py
|
Stayermax/5dof-bartender-robot
|
dd04303afd2c252e6f7105e33ba35b01f3915194
|
[
"MIT"
] | null | null | null |
control_panel.py
|
Stayermax/5dof-bartender-robot
|
dd04303afd2c252e6f7105e33ba35b01f3915194
|
[
"MIT"
] | null | null | null |
control_panel.py
|
Stayermax/5dof-bartender-robot
|
dd04303afd2c252e6f7105e33ba35b01f3915194
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Control panel file
"""
import pddl_solver as pddl
import ik
import rospy
from get_object_position import get_object_position
import time
from constants import *
from spawn_models import reset_model_position, reset_all, spawn_model, spawn_all_models
from delete_models import delete_all, delete_model
if __name__ == '__main__':
control_panel()
| 40.173913 | 170 | 0.530483 |
53fac3e7275b1080c646a6ed12952be14a9e25f1
| 1,427 |
py
|
Python
|
Enigma/Enigma.py
|
archanpatkar/Enigma
|
dbbc1fda99bf451a0284f051c724ed43915dfe2a
|
[
"MIT"
] | 3 |
2019-06-25T06:46:50.000Z
|
2021-07-27T14:14:32.000Z
|
Enigma/Enigma.py
|
archanpatkar/Enigma
|
dbbc1fda99bf451a0284f051c724ed43915dfe2a
|
[
"MIT"
] | null | null | null |
Enigma/Enigma.py
|
archanpatkar/Enigma
|
dbbc1fda99bf451a0284f051c724ed43915dfe2a
|
[
"MIT"
] | 1 |
2021-07-27T14:20:30.000Z
|
2021-07-27T14:20:30.000Z
|
from Enigma.Rotor import Rotor
from Enigma.Reflector import Reflector
from Enigma.Plugboard import Plugboard
| 32.431818 | 143 | 0.5459 |
53fad9cdfe9f1c4fdba68eaa168284de33fce059
| 647 |
py
|
Python
|
var/spack/repos/builtin/packages/exiv2/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 9 |
2018-04-18T07:51:40.000Z
|
2021-09-10T03:56:57.000Z
|
var/spack/repos/builtin/packages/exiv2/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 907 |
2018-04-18T11:17:57.000Z
|
2022-03-31T13:20:25.000Z
|
var/spack/repos/builtin/packages/exiv2/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 29 |
2018-11-05T16:14:23.000Z
|
2022-02-03T16:07:09.000Z
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
| 30.809524 | 96 | 0.710974 |
53fb4aef0b525310a37b5aa5c278d91c9afe8fd1
| 2,711 |
py
|
Python
|
magicauth/send_token.py
|
JMIdeaMaker/django-magicauth
|
ffca3423c46f8f3d7e49eaf374b33265d4730587
|
[
"MIT"
] | null | null | null |
magicauth/send_token.py
|
JMIdeaMaker/django-magicauth
|
ffca3423c46f8f3d7e49eaf374b33265d4730587
|
[
"MIT"
] | null | null | null |
magicauth/send_token.py
|
JMIdeaMaker/django-magicauth
|
ffca3423c46f8f3d7e49eaf374b33265d4730587
|
[
"MIT"
] | null | null | null |
import math
from django.contrib.auth import get_user_model
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import send_mail
from django.template import loader
from magicauth import settings as magicauth_settings
from django.conf import settings as django_settings
from magicauth.models import MagicToken
import sendgrid
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
sg = sendgrid.SendGridAPIClient(django_settings.SENDGRID_API_KEY)
| 36.146667 | 98 | 0.69384 |
53fbcfdc398532d49a5138646d1108fbc979d12a
| 2,148 |
py
|
Python
|
qcdb/util/paths.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 8 |
2019-03-28T11:54:59.000Z
|
2022-03-19T03:31:37.000Z
|
qcdb/util/paths.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 39 |
2018-10-31T23:02:18.000Z
|
2021-12-12T22:11:37.000Z
|
qcdb/util/paths.py
|
loriab/qccddb
|
d9e156ef8b313ac0633211fc6b841f84a3ddde24
|
[
"BSD-3-Clause"
] | 9 |
2018-03-12T20:51:50.000Z
|
2022-02-28T15:18:34.000Z
|
import os
import sys
## {{{ http://code.activestate.com/recipes/52224/ (r1)
def search_file(filename, search_path):
"""Given an os.pathsep divided `search_path`, find first occurrence of
`filename`. Returns full path to file if found or None if unfound.
"""
file_found = False
paths = search_path.split(os.pathsep)
# paths = string.split(search_path, os.pathsep)
for path in paths:
if os.path.exists(os.path.join(path, filename)):
file_found = True
break
if file_found:
return os.path.abspath(os.path.join(path, filename))
else:
return None
## end of http://code.activestate.com/recipes/52224/ }}}
def import_ignorecase(module, lenv=None):
"""Function to import *module* in any possible lettercase
permutation. Returns module object if available, None if not.
`lenv` is list (not str) of addl sys.path members to try.
"""
lenv = [] if lenv is None else lenv
with add_path(lenv):
modobj = None
for per in list(all_casings(module)):
try:
modobj = __import__(per)
except ImportError:
pass
else:
break
return modobj
| 26.85 | 74 | 0.603352 |
53fbd095d48c73b6a23ec7ef2c3b6688ff51dfc5
| 2,380 |
py
|
Python
|
tests/models/DCN_test.py
|
JiangBowen-master/DeepCTR
|
291ffb0ff3b8322f64bd839f963d5c7a70e6b358
|
[
"Apache-2.0"
] | 1 |
2021-09-20T14:12:35.000Z
|
2021-09-20T14:12:35.000Z
|
tests/models/DCN_test.py
|
JiangBowen-master/DeepCTR
|
291ffb0ff3b8322f64bd839f963d5c7a70e6b358
|
[
"Apache-2.0"
] | 1 |
2022-02-10T06:29:19.000Z
|
2022-02-10T06:29:19.000Z
|
tests/models/DCN_test.py
|
JiangBowen-master/DeepCTR
|
291ffb0ff3b8322f64bd839f963d5c7a70e6b358
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import tensorflow as tf
from deepctr.estimator import DCNEstimator
from deepctr.models import DCN
from ..utils import check_model, get_test_data, SAMPLE_SIZE, get_test_data_estimator, check_estimator, \
Estimator_TEST_TF1
# def test_DCN_invalid(embedding_size=8, cross_num=0, hidden_size=()):
# feature_dim_dict = {'sparse': [SparseFeat('sparse_1', 2), SparseFeat('sparse_2', 5), SparseFeat('sparse_3', 10)],
# 'dense': [SparseFeat('dense_1', 1), SparseFeat('dense_1', 1), SparseFeat('dense_1', 1)]}
# with pytest.raises(ValueError):
# _ = DCN(None, embedding_size=embedding_size, cross_num=cross_num, dnn_hidden_units=hidden_size, dnn_dropout=0.5)
if __name__ == "__main__":
pass
| 42.5 | 122 | 0.654622 |
53fbe12da973d06be5b6afaae786b7644d276650
| 1,309 |
py
|
Python
|
workflows/post_process_run/fv3post/gsutil.py
|
jacnugent/fv3net
|
84958651bdd17784fdab98f87ad0d65414c03368
|
[
"MIT"
] | 5 |
2021-03-20T22:42:40.000Z
|
2021-06-30T18:39:36.000Z
|
workflows/post_process_run/fv3post/gsutil.py
|
jacnugent/fv3net
|
84958651bdd17784fdab98f87ad0d65414c03368
|
[
"MIT"
] | 195 |
2021-09-16T05:47:18.000Z
|
2022-03-31T22:03:15.000Z
|
workflows/post_process_run/fv3post/gsutil.py
|
ai2cm/fv3net
|
e62038aee0a97d6207e66baabd8938467838cf51
|
[
"MIT"
] | 1 |
2021-06-16T22:04:24.000Z
|
2021-06-16T22:04:24.000Z
|
import os
import subprocess
import backoff
| 25.666667 | 85 | 0.654698 |
53fc42709c54959b0375cdc103e3419eb44ee072
| 3,012 |
py
|
Python
|
deploy_tix/__main__.py
|
rpappalax/deploy-tix
|
a53c7fa7898b9f0c2f530c8abd8bab322a2eb7bc
|
[
"MIT"
] | null | null | null |
deploy_tix/__main__.py
|
rpappalax/deploy-tix
|
a53c7fa7898b9f0c2f530c8abd8bab322a2eb7bc
|
[
"MIT"
] | 20 |
2015-02-24T08:56:47.000Z
|
2018-07-25T16:35:30.000Z
|
deploy_tix/__main__.py
|
rpappalax/deploy-tix
|
a53c7fa7898b9f0c2f530c8abd8bab322a2eb7bc
|
[
"MIT"
] | 3 |
2015-04-01T21:39:50.000Z
|
2020-09-10T19:40:43.000Z
|
import argparse
from deploy_tix.bugzilla_rest_client import BugzillaRESTClient
from deploy_tix.release_notes import ReleaseNotes
from output_helper import OutputHelper
| 30.12 | 87 | 0.625166 |
53fce9990550dc9cdc1a65b09b6de93156132380
| 2,583 |
py
|
Python
|
site-packages/visual/examples/drape.py
|
lebarsfa/vpython-wx
|
38df062e5532b79f632f4f2a1abae86754c264a9
|
[
"BSL-1.0"
] | 68 |
2015-01-17T05:41:58.000Z
|
2021-04-24T08:35:24.000Z
|
site-packages/visual/examples/drape.py
|
lebarsfa/vpython-wx
|
38df062e5532b79f632f4f2a1abae86754c264a9
|
[
"BSL-1.0"
] | 16 |
2015-01-02T19:36:06.000Z
|
2018-09-09T21:01:25.000Z
|
site-packages/visual/examples/drape.py
|
lebarsfa/vpython-wx
|
38df062e5532b79f632f4f2a1abae86754c264a9
|
[
"BSL-1.0"
] | 37 |
2015-02-04T04:23:00.000Z
|
2020-06-07T03:24:41.000Z
|
from visual import *
print("""
Click to place spheres under falling string.
Right button drag or Ctrl-drag to rotate view.
Middle button drag or Alt-drag to zoom in or out.
On a two-button mouse, middle is left + right.
""")
# David Scherer
scene.title = "Drape"
restlength = 0.02
m = 0.010 * restlength
g = 9.8
dt = 0.002
k = 3
damp = (1-0)**dt
nspheres = 3
floor = 0
# Create the stringy thing:
band = curve( x = arange(-1,1,restlength),
y = 1,
radius = 0.02
)
band.p = band.pos * 0
scene.range = 1.5
scene.autoscale = 0
# Let the user position obstacles:
spheres = []
for i in range(nspheres):
s = sphere( pos = scene.mouse.getclick().pos, #(i*0.6 - 0.7,0.5 + i*0.1,0),
radius = 0.25,
color = (abs(sin(i)),cos(i)**2,(i%10)/10.0) )
spheres.append( s )
while True:
rate(1.0 / dt)
if scene.mouse.clicked:
i = len(spheres)
s = sphere( pos = scene.mouse.getclick().pos,
radius = 0.25,
color = (abs(sin(i)),cos(i)**2,(i%10)/10.0) )
spheres.append( s )
if floor:
below = less(band.pos[:,1],-1)
band.p[:,1] = where( below, 0, band.p[:,1] )
band.pos[:,1] = where( below, -1, band.pos[:,1] )
# need a more physical way to make 'damped springs' than this!
band.p = band.p * damp
#band.p[0] = 0 # nail down left endpoint
#band.p[-1] = 0 # nail down right endpoint
band.pos = band.pos + band.p/m*dt
#gravity
band.p[:,1] = band.p[:,1] - m * g * dt
# force[n] is the force on point n from point n+1 (to the right):
length = (band.pos[1:] - band.pos[:-1])
dist = sqrt(sum(length*length,-1))
force = k * ( dist - restlength )
force = length/dist[:,newaxis] * force[:,newaxis]
band.p[:-1] = band.p[:-1] + force*dt
band.p[1:] = band.p[1:] - force*dt
# color based on "stretch": blue -> white -> red
c = clip( dist/restlength * 0.5, 0, 2 )
# blue (compressed) -> white (relaxed) -> red (tension)
band.red[1:] = where( less(c,1), c, 1 )
band.green[1:] = where( less(c,1), c, 2-c )
band.blue[1:] = where( less(c,1), 1, 2-c )
for s in spheres:
dist = mag( band.pos - s.pos )[:,newaxis]
inside = less( dist, s.radius )
if sometrue(inside):
R = ( band.pos - s.pos ) / dist
surface = s.pos + (s.radius)*R
band.pos = surface*inside + band.pos*(1-inside)
pdotR = sum(asarray(band.p)*asarray(R),-1)
band.p = band.p - R*pdotR[:,newaxis]*inside
| 27.189474 | 81 | 0.542005 |
53fd39f8be55af2124122647f83ca83013ed5b72
| 8,921 |
py
|
Python
|
sdc/utilities/sdc_typing_utils.py
|
dlee992/sdc
|
1ebf55c00ef38dfbd401a70b3945e352a5a38b87
|
[
"BSD-2-Clause"
] | 540 |
2017-06-19T16:29:24.000Z
|
2019-05-21T09:30:07.000Z
|
sdc/utilities/sdc_typing_utils.py
|
dlee992/sdc
|
1ebf55c00ef38dfbd401a70b3945e352a5a38b87
|
[
"BSD-2-Clause"
] | 389 |
2019-10-30T18:56:46.000Z
|
2022-03-09T08:21:36.000Z
|
sdc/utilities/sdc_typing_utils.py
|
dlee992/sdc
|
1ebf55c00ef38dfbd401a70b3945e352a5a38b87
|
[
"BSD-2-Clause"
] | 36 |
2017-06-19T16:29:15.000Z
|
2019-04-26T09:22:39.000Z
|
# *****************************************************************************
# Copyright (c) 2020, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
"""
| This file contains SDC utility functions related to typing compilation phase
"""
import numpy
import numba
import sdc
from numba import types
from numba.core.errors import TypingError
from numba.np import numpy_support
from sdc.datatypes.indexes import *
from sdc.str_arr_type import string_array_type, StringArrayType
from sdc.datatypes.categorical.types import Categorical
sdc_old_index_types = (types.Array, StringArrayType, )
sdc_pandas_index_types = (
EmptyIndexType,
PositionalIndexType,
RangeIndexType,
Int64IndexType,
MultiIndexType,
) + sdc_old_index_types
sdc_indexes_range_like = (
PositionalIndexType,
RangeIndexType,
)
# TO-DO: support caching of data allocated for range indexes at request for .values
sdc_indexes_wo_values_cache = (
EmptyIndexType,
PositionalIndexType,
RangeIndexType,
)
sdc_pandas_df_column_types = (
types.Array,
StringArrayType,
Categorical,
)
def kwsparams2list(params):
"""Convert parameters dict to a list of string of a format 'key=value'"""
return ['{}={}'.format(k, v) for k, v in params.items()]
def sigparams2list(param_names, defaults):
"""Creates a list of strings of a format 'key=value' from parameter names and default values"""
return [(f'{param}' if param not in defaults else f'{param}={defaults[param]}') for param in param_names]
def has_literal_value(var, value):
"""Used during typing to check that variable var is a Numba literal value equal to value"""
if not isinstance(var, types.Literal):
return False
if value is None:
return isinstance(var, types.NoneType) or var.literal_value is value
elif isinstance(value, type(bool)):
return var.literal_value is value
else:
return var.literal_value == value
def has_python_value(var, value):
"""Used during typing to check that variable var was resolved as Python type and has specific value"""
if not isinstance(var, type(value)):
return False
if value is None or isinstance(value, type(bool)):
return var is value
else:
return var == value
def is_default(var, value):
return has_literal_value(var, value) or has_python_value(var, value) or isinstance(var, types.Omitted)
def check_is_numeric_array(type_var):
"""Used during typing to check that type_var is a numeric numpy arrays"""
return check_is_array_of_dtype(type_var, types.Number)
def check_index_is_numeric(ty_series):
"""Used during typing to check that series has numeric index"""
return isinstance(ty_series.index.dtype, types.Number)
def check_types_comparable(ty_left, ty_right):
"""Used during typing to check that specified types can be compared"""
if hasattr(ty_left, 'dtype'):
ty_left = ty_left.dtype
if hasattr(ty_right, 'dtype'):
ty_right = ty_right.dtype
# add the rest of supported types here
if isinstance(ty_left, types.Number):
return isinstance(ty_right, types.Number)
if isinstance(ty_left, types.UnicodeType):
return isinstance(ty_right, types.UnicodeType)
if isinstance(ty_left, types.Boolean):
return isinstance(ty_right, types.Boolean)
if isinstance(ty_left, (types.Tuple, types.UniTuple)):
# FIXME: just for now to unblock compilation
return ty_left == ty_right
return False
def check_arrays_comparable(ty_left, ty_right):
"""Used during typing to check that underlying arrays of specified types can be compared"""
return ((ty_left == string_array_type and ty_right == string_array_type)
or (check_is_numeric_array(ty_left) and check_is_numeric_array(ty_right)))
def check_is_array_of_dtype(type_var, dtype):
"""Used during typing to check that type_var is a numeric numpy array of specific dtype"""
return isinstance(type_var, types.Array) and isinstance(type_var.dtype, dtype)
def find_common_dtype_from_numpy_dtypes(array_types, scalar_types):
"""Used to find common numba dtype for a sequences of numba dtypes each representing some numpy dtype"""
np_array_dtypes = [numpy_support.as_dtype(dtype) for dtype in array_types]
np_scalar_dtypes = [numpy_support.as_dtype(dtype) for dtype in scalar_types]
np_common_dtype = numpy.find_common_type(np_array_dtypes, np_scalar_dtypes)
numba_common_dtype = numpy_support.from_dtype(np_common_dtype)
return numba_common_dtype
def find_index_common_dtype(left, right):
"""Used to find common dtype for indexes of two series and verify if index dtypes are equal"""
left_index_dtype = left.dtype
right_index_dtype = right.dtype
index_dtypes_match = left_index_dtype == right_index_dtype
if not index_dtypes_match:
numba_index_common_dtype = find_common_dtype_from_numpy_dtypes(
[left_index_dtype, right_index_dtype], [])
else:
numba_index_common_dtype = left_index_dtype
return index_dtypes_match, numba_index_common_dtype
def gen_impl_generator(codegen, impl_name):
"""Generate generator of an implementation"""
return _df_impl_generator
def check_signed_integer(ty):
return isinstance(ty, types.Integer) and ty.signed
def _check_dtype_param_type(dtype):
""" Returns True is dtype is a valid type for dtype parameter and False otherwise.
Used in RangeIndex ctor and other methods that take dtype parameter. """
valid_dtype_types = (types.NoneType, types.Omitted, types.UnicodeType, types.NumberClass)
return isinstance(dtype, valid_dtype_types) or dtype is None
| 34.311538 | 109 | 0.690954 |
53fde8ce197812a38b7631459a915158d4d2d39f
| 1,074 |
py
|
Python
|
Hackerrank/Contests/Project Euler/euler010.py
|
PROxZIMA/Competitive-Coding
|
ba6b365ea130b6fcaa15c5537b530ed363bab793
|
[
"MIT"
] | 1 |
2021-01-10T13:29:21.000Z
|
2021-01-10T13:29:21.000Z
|
Hackerrank/Contests/Project Euler/euler010.py
|
PROxZIMA/Competitive-Coding
|
ba6b365ea130b6fcaa15c5537b530ed363bab793
|
[
"MIT"
] | null | null | null |
Hackerrank/Contests/Project Euler/euler010.py
|
PROxZIMA/Competitive-Coding
|
ba6b365ea130b6fcaa15c5537b530ed363bab793
|
[
"MIT"
] | null | null | null |
from math import sqrt
# Naive method: Loop through N and check if every number is prime or not. If prime add to sum. Time complexity is O(n). Time of execution ~ 8sec for n = 1000000
s = set(prime(1000000))
for _ in range(int(input())):
n = int(input())
print(sum(i for i in s if i <= n))
# Sieve implementation: Time complexity of O(n*log(log(n))). Time of execution ~ 2sec for n = 1000000
limit = 1000000
sieve = [0] + [1, 0] * 500000
sieve[0], sieve[1], sieve[2] = 0, 0, 2
p = 3
while p <= limit:
if sieve[p]:
sieve[p] = sieve[p-1] + p
for i in range(p*p, limit+1, p):
sieve[i] = 0
else:
sieve[p] = sieve[p-1]
sieve[p+1] = sieve[p]
p += 2
for _ in range(int(input())):
print(sieve[int(input())])
| 23.347826 | 161 | 0.515829 |
53fe751d15505be94879d0853534a2ee2c6e3129
| 3,891 |
py
|
Python
|
DQM/L1TMonitorClient/python/L1EmulatorErrorFlagClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852 |
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
DQM/L1TMonitorClient/python/L1EmulatorErrorFlagClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371 |
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
DQM/L1TMonitorClient/python/L1EmulatorErrorFlagClient_cfi.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240 |
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDHarvester import DQMEDHarvester
l1EmulatorErrorFlagClient = DQMEDHarvester("L1EmulatorErrorFlagClient",
#
# for each L1 system, give:
# - SystemLabel: system label
# - HwValLabel: system label as used in hardware validation package
# (the package producing the ErrorFlag histogram)
# - SystemMask: system mask: if 1, the system is masked in the summary plot
# - SystemFolder: the folder where the ErrorFlag histogram is looked for
#
# the position in the parameter set gives, in reverse order, the position in the reportSummaryMap
# in the emulator column (left column)
L1Systems = cms.VPSet(
cms.PSet(
SystemLabel = cms.string("ECAL"),
HwValLabel = cms.string("ETP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("HCAL"),
HwValLabel = cms.string("HTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("RCT"),
HwValLabel = cms.string("RCT"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("Stage1Layer2"),
HwValLabel = cms.string("Stage1Layer2"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("DTTF"),
HwValLabel = cms.string("DTF"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("DTTPG"),
HwValLabel = cms.string("DTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("CSCTF"),
HwValLabel = cms.string("CTF"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("CSCTPG"),
HwValLabel = cms.string("CTP"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("RPC"),
HwValLabel = cms.string("RPC"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("GMT"),
HwValLabel = cms.string("GMT"),
SystemMask = cms.uint32(0),
SystemFolder = cms.string("")
),
cms.PSet(
SystemLabel = cms.string("GT"),
HwValLabel = cms.string("GT"),
SystemMask = cms.uint32(1),
SystemFolder = cms.string("L1TEMU/Stage1GTexpert")
)
)
)
| 45.776471 | 101 | 0.40992 |
53ff445026af64cf9c890da3e25303bb69266c4d
| 17,382 |
py
|
Python
|
codalab/model/tables.py
|
jzwang43/codalab-worksheets
|
b1d4c6cc4b72f4dfa35a15f876e2d0ce9a03d28d
|
[
"Apache-2.0"
] | null | null | null |
codalab/model/tables.py
|
jzwang43/codalab-worksheets
|
b1d4c6cc4b72f4dfa35a15f876e2d0ce9a03d28d
|
[
"Apache-2.0"
] | null | null | null |
codalab/model/tables.py
|
jzwang43/codalab-worksheets
|
b1d4c6cc4b72f4dfa35a15f876e2d0ce9a03d28d
|
[
"Apache-2.0"
] | null | null | null |
"""
The SQLAlchemy table objects for the CodaLab bundle system tables.
"""
# TODO: Replace String and Text columns with Unicode and UnicodeText as appropriate
# This way, SQLAlchemy will automatically perform conversions to and from UTF-8
# encoding, or use appropriate database engine-specific data types for Unicode
# data. Currently, only worksheet.title uses the Unicode column type.
from sqlalchemy import Column, ForeignKey, Index, MetaData, Table, UniqueConstraint
from sqlalchemy.types import (
BigInteger,
Boolean,
DateTime,
Enum,
Float,
Integer,
LargeBinary,
String,
Text,
Unicode,
)
from sqlalchemy.sql.schema import ForeignKeyConstraint
db_metadata = MetaData()
bundle = Table(
'bundle',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('bundle_type', String(63), nullable=False),
# The command will be NULL except for run bundles.
Column('command', Text, nullable=True),
# The data_hash will be NULL if the bundle's value is still being computed.
Column('data_hash', String(63), nullable=True),
Column('state', String(63), nullable=False),
Column('owner_id', String(255), nullable=True),
Column('is_anonymous', Boolean, nullable=False, default=False),
UniqueConstraint('uuid', name='uix_1'),
Index('bundle_data_hash_index', 'data_hash'),
Index('state_index', 'state'), # Needed for the bundle manager.
)
# Includes things like name, description, etc.
bundle_metadata = Table(
'bundle_metadata',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('bundle_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Column('metadata_key', String(63), nullable=False),
Column('metadata_value', Text, nullable=False),
Index('metadata_kv_index', 'metadata_key', 'metadata_value', mysql_length=63),
)
# For each child_uuid, we have: key = child_path, target = (parent_uuid, parent_path)
bundle_dependency = Table(
'bundle_dependency',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('child_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Column('child_path', Text, nullable=False),
# Deliberately omit ForeignKey(bundle.c.uuid), because bundles can have
# dependencies to bundles not (yet) in the system.
Column('parent_uuid', String(63), nullable=False),
Column('parent_path', Text, nullable=False),
)
# The worksheet table does not have many columns now, but it will eventually
# include columns for owner, group, permissions, etc.
worksheet = Table(
'worksheet',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('name', String(255), nullable=False),
Column('owner_id', String(255), nullable=True),
Column(
'title', Unicode(255), nullable=True
), # Short human-readable description of the worksheet
Column(
'frozen', DateTime, nullable=True
), # When the worksheet was frozen (forever immutable) if it is.
Column('is_anonymous', Boolean, nullable=False, default=False),
Column(
'date_created', DateTime
), # When the worksheet was created; Set to null if the worksheet created before v0.5.31; Set to current timestamp by default
Column(
'date_last_modified', DateTime
), # When the worksheet was last modified; Set to null if the worksheet created before v0.5.31; Set to current_timestamp by default
UniqueConstraint('uuid', name='uix_1'),
Index('worksheet_name_index', 'name'),
Index('worksheet_owner_index', 'owner_id'),
)
worksheet_item = Table(
'worksheet_item',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('worksheet_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
# A worksheet item is either:
# - type = bundle (bundle_uuid != null)
# - type = worksheet (subworksheet_uuid != null)
# - type = markup (value != null)
# - type = directive (value != null)
# Deliberately omit ForeignKey(bundle.c.uuid), because worksheets can contain
# bundles and worksheets not (yet) in the system.
Column('bundle_uuid', String(63), nullable=True),
Column('subworksheet_uuid', String(63), nullable=True),
Column('value', Text, nullable=False), # TODO: make this nullable
Column('type', String(20), nullable=False),
Column('sort_key', Integer, nullable=True),
Index('worksheet_item_worksheet_uuid_index', 'worksheet_uuid'),
Index('worksheet_item_bundle_uuid_index', 'bundle_uuid'),
Index('worksheet_item_subworksheet_uuid_index', 'subworksheet_uuid'),
)
# Worksheet tags
worksheet_tag = Table(
'worksheet_tag',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('worksheet_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
Column('tag', String(63), nullable=False),
Index('worksheet_tag_worksheet_uuid_index', 'worksheet_uuid'),
Index('worksheet_tag_tag_index', 'tag'),
)
group = Table(
'group',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('uuid', String(63), nullable=False),
Column('name', String(255), nullable=False),
Column('user_defined', Boolean),
Column('owner_id', String(255), nullable=True),
UniqueConstraint('uuid', name='uix_1'),
Index('group_name_index', 'name'),
Index('group_owner_id_index', 'owner_id'),
)
user_group = Table(
'user_group',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
Column('user_id', String(63), ForeignKey("user.user_id"), nullable=False),
# Whether a user is able to modify this group.
Column('is_admin', Boolean),
Index('group_uuid_index', 'group_uuid'),
Index('user_id_index', 'user_id'),
)
# Permissions for bundles
group_bundle_permission = Table(
'group_bundle_permission',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
# Reference to a bundle
Column('object_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
# Permissions encoded as integer (see below)
Column('permission', Integer, nullable=False),
)
# Permissions for worksheets
group_object_permission = Table(
'group_object_permission',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=False),
# Reference to a worksheet object
Column('object_uuid', String(63), ForeignKey(worksheet.c.uuid), nullable=False),
# Permissions encoded as integer (see below)
Column('permission', Integer, nullable=False),
)
# A permission value is one of the following: none (0), read (1), or all (2).
GROUP_OBJECT_PERMISSION_NONE = 0x00
GROUP_OBJECT_PERMISSION_READ = 0x01
GROUP_OBJECT_PERMISSION_ALL = 0x02
# A notifications value is one of the following:
NOTIFICATIONS_NONE = 0x00 # Receive no notifications
NOTIFICATIONS_IMPORTANT = 0x01 # Receive only important notifications
NOTIFICATIONS_GENERAL = 0x02 # Receive general notifications (new features)
# Store information about users.
user = Table(
'user',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
# Basic information
Column('user_id', String(63), nullable=False),
Column('user_name', String(63), nullable=False, unique=True),
Column(
'email', String(254), nullable=False, unique=True
), # Length of 254 to be compliant with RFC3696/5321
Column(
'notifications', Integer, nullable=False, default=NOTIFICATIONS_GENERAL
), # Which emails user wants to receive
Column('last_login', DateTime), # Null if user has never logged in
Column(
'is_active', Boolean, nullable=False, default=True
), # Set to False instead of deleting users to maintain foreign key integrity
Column('first_name', String(30, convert_unicode=True)),
Column('last_name', String(30, convert_unicode=True)),
Column('date_joined', DateTime, nullable=False),
Column('has_access', Boolean, default=False, nullable=True),
Column('is_verified', Boolean, nullable=False, default=False),
Column('is_superuser', Boolean, nullable=False, default=False),
Column('password', String(128), nullable=False),
# Additional information
Column('affiliation', String(255, convert_unicode=True), nullable=True),
Column('url', String(255, convert_unicode=True), nullable=True),
# Quotas
Column('time_quota', Float, nullable=False), # Number of seconds allowed
Column('parallel_run_quota', Integer, nullable=False), # Number of parallel jobs allowed
Column('time_used', Float, nullable=False), # Number of seconds already used
Column('disk_quota', Float, nullable=False), # Number of bytes allowed
Column('disk_used', Float, nullable=False), # Number of bytes already used
Index('user_user_id_index', 'user_id'),
Index('user_user_name_index', 'user_name'),
UniqueConstraint('user_id', name='uix_1'),
)
# Stores (email) verification keys
user_verification = Table(
'user_verification',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('date_created', DateTime, nullable=False),
Column('date_sent', DateTime, nullable=True),
Column('key', String(64), nullable=False),
)
# Stores password reset codes
user_reset_code = Table(
'user_reset_code',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('date_created', DateTime, nullable=False),
Column('code', String(64), nullable=False),
)
# OAuth2 Tables
oauth2_client = Table(
'oauth2_client',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), nullable=False),
Column('name', String(63), nullable=True),
Column('secret', String(255), nullable=True),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=True),
Column(
'grant_type',
Enum("authorization_code", "password", "client_credentials", "refresh_token"),
nullable=False,
),
Column('response_type', Enum("code", "token"), nullable=False),
Column('scopes', Text, nullable=False), # comma-separated list of allowed scopes
Column('redirect_uris', Text, nullable=False), # comma-separated list of allowed redirect URIs
UniqueConstraint('client_id', name='uix_1'),
)
oauth2_token = Table(
'oauth2_token',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), ForeignKey(oauth2_client.c.client_id), nullable=False),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('scopes', Text, nullable=False),
Column('access_token', String(255), unique=True),
Column('refresh_token', String(255), unique=True),
Column('expires', DateTime, nullable=False),
)
oauth2_auth_code = Table(
'oauth2_auth_code',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
),
Column('client_id', String(63), ForeignKey(oauth2_client.c.client_id), nullable=False),
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('scopes', Text, nullable=False),
Column('code', String(100), nullable=False),
Column('expires', DateTime, nullable=False),
Column('redirect_uri', String(255), nullable=False),
)
# Store information about users' questions or feedback.
chat = Table(
'chat',
db_metadata,
Column(
'id',
BigInteger().with_variant(Integer, "sqlite"),
primary_key=True,
nullable=False,
autoincrement=True,
), # Primary key
Column('time', DateTime, nullable=False), # When did the user send this query?
Column('sender_user_id', String(63), nullable=True), # Who sent it?
Column('recipient_user_id', String(63), nullable=True), # Who received it?
Column('message', Text, nullable=False), # What's the content of the chat?
Column(
'worksheet_uuid', String(63), nullable=True
), # What is the id of the worksheet that the sender is on?
Column(
'bundle_uuid', String(63), nullable=True
), # What is the id of the bundle that the sender is on?
)
# Store information about workers.
worker = Table(
'worker',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), primary_key=True, nullable=False),
Column('worker_id', String(127), primary_key=True, nullable=False),
Column('group_uuid', String(63), ForeignKey(group.c.uuid), nullable=True),
Column('tag', Text, nullable=True), # Tag that allows for scheduling runs on specific workers.
Column('cpus', Integer, nullable=False), # Number of CPUs on worker.
Column('gpus', Integer, nullable=False), # Number of GPUs on worker.
Column('memory_bytes', BigInteger, nullable=False), # Total memory of worker.
Column('free_disk_bytes', BigInteger, nullable=True), # Available disk space on worker.
Column(
'checkin_time', DateTime, nullable=False
), # When the worker last checked in with the bundle service.
Column('socket_id', Integer, nullable=False), # Socket ID worker listens for messages on.
Column(
'shared_file_system', Boolean, nullable=False
), # Whether the worker and the server have a shared filesystem.
Column(
'tag_exclusive', Boolean, nullable=False
), # Whether worker runs bundles if and only if they match tags.
Column(
'exit_after_num_runs', Integer, nullable=False
), # Number of jobs allowed to run on worker.
Column('is_terminating', Boolean, nullable=False),
)
# Store information about all sockets currently allocated to each worker.
worker_socket = Table(
'worker_socket',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('worker_id', String(127), nullable=False),
# No foreign key constraint on the worker table so that we can create a socket
# for the worker before adding the worker to the worker table.
Column('socket_id', Integer, primary_key=True, nullable=False),
)
# Store information about the bundles currently running on each worker.
worker_run = Table(
'worker_run',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), nullable=False),
Column('worker_id', String(127), nullable=False),
ForeignKeyConstraint(['user_id', 'worker_id'], ['worker.user_id', 'worker.worker_id']),
Column('run_uuid', String(63), ForeignKey(bundle.c.uuid), nullable=False),
Index('uuid_index', 'run_uuid'),
)
# Store information about the dependencies available on each worker.
worker_dependency = Table(
'worker_dependency',
db_metadata,
Column('user_id', String(63), ForeignKey(user.c.user_id), primary_key=True, nullable=False),
Column('worker_id', String(127), primary_key=True, nullable=False),
ForeignKeyConstraint(['user_id', 'worker_id'], ['worker.user_id', 'worker.worker_id']),
# Serialized list of dependencies for the user/worker combination.
# See WorkerModel for the serialization method.
Column('dependencies', LargeBinary, nullable=False),
)
| 36.904459 | 136 | 0.676159 |
53ff8a47a271e5535277c6325b7ff8df26908ae6
| 31,403 |
py
|
Python
|
grpc/plugins/connection/gnmi.py
|
hansthienpondt/ansible-networking-collections
|
278c88fceac297693a31df3cb54c942284823fbd
|
[
"BSD-3-Clause"
] | null | null | null |
grpc/plugins/connection/gnmi.py
|
hansthienpondt/ansible-networking-collections
|
278c88fceac297693a31df3cb54c942284823fbd
|
[
"BSD-3-Clause"
] | null | null | null |
grpc/plugins/connection/gnmi.py
|
hansthienpondt/ansible-networking-collections
|
278c88fceac297693a31df3cb54c942284823fbd
|
[
"BSD-3-Clause"
] | null | null | null |
# (c) 2020 Nokia
#
# Licensed under the BSD 3 Clause license
# SPDX-License-Identifier: BSD-3-Clause
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
author:
- "Hans Thienpondt (@HansThienpondt)"
- "Sven Wisotzky (@wisotzky)"
connection: gnmi
short_description: Provides a persistent gRPC connection for gNMI API service
description:
- This gRPC plugin provides methods to interact with the gNMI service.
- OpenConfig gNMI specification
https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md
- gNMI API
https://raw.githubusercontent.com/openconfig/gnmi/master/proto/gnmi/gnmi.proto
- This connection plugin provides a persistent communication channel to
remote devices using gRPC including the underlying transport (TLS).
- The plugin binds to the gNMI gRPC service. It provide wrappers for gNMI
requests (Capabilities, Get, Set, Subscribe)
requirements:
- grpcio
- protobuf
options:
host:
description:
- Target host FQDN or IP address to establish gRPC connection.
default: inventory_hostname
vars:
- name: ansible_host
port:
type: int
description:
- Specifies the port on the remote device that listens for connections
when establishing the gRPC connection. If None only the C(host) part
will be used.
ini:
- section: defaults
key: remote_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_port
remote_user:
description:
- The username used to authenticate to the remote device when the gRPC
connection is first established. If the remote_user is not specified,
the connection will use the username of the logged in user.
- Can be configured from the CLI via the C(--user) or C(-u) options.
ini:
- section: defaults
key: remote_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
password:
description:
- Configures the user password used to authenticate to the remote device
when first establishing the gRPC connection.
vars:
- name: ansible_password
- name: ansible_ssh_pass
private_key_file:
description:
- The PEM encoded private key file used to authenticate to the
remote device when first establishing the grpc connection.
ini:
- section: grpc_connection
key: private_key_file
env:
- name: ANSIBLE_PRIVATE_KEY_FILE
vars:
- name: ansible_private_key_file
root_certificates_file:
description:
- The PEM encoded root certificate file used to create a SSL-enabled
channel, if the value is None it reads the root certificates from
a default location chosen by gRPC at runtime.
ini:
- section: grpc_connection
key: root_certificates_file
env:
- name: ANSIBLE_ROOT_CERTIFICATES_FILE
vars:
- name: ansible_root_certificates_file
certificate_chain_file:
description:
- The PEM encoded certificate chain file used to create a SSL-enabled
channel. If the value is None, no certificate chain is used.
ini:
- section: grpc_connection
key: certificate_chain_file
env:
- name: ANSIBLE_CERTIFICATE_CHAIN_FILE
vars:
- name: ansible_certificate_chain_file
certificate_path:
description:
- Folder to search for certificate and key files
ini:
- section: grpc_connection
key: certificate_path
env:
- name: ANSIBLE_CERTIFICATE_PATH
vars:
- name: ansible_certificate_path
gnmi_encoding:
description:
- Encoding used for gNMI communication
- Must be either JSON or JSON_IETF
- If not provided, will run CapabilityRequest for auto-detection
ini:
- section: grpc_connection
key: gnmi_encoding
env:
- name: ANSIBLE_GNMI_ENCODING
vars:
- name: ansible_gnmi_encoding
grpc_channel_options:
description:
- Key/Value pairs (dict) to define gRPC channel options to be used
- gRPC reference
U(https://grpc.github.io/grpc/core/group__grpc__arg__keys.html)
- Provide the I(ssl_target_name_override) option to override the TLS
subject or subjectAltName (only in the case secure connections are
used). The option must be provided in cases, when the FQDN or IPv4
address that is used to connect to the device is different from the
subject name that is provided in the host certificate. This is
needed, because the TLS validates hostname or IP address to avoid
man-in-the-middle attacks.
vars:
- name: ansible_grpc_channel_options
grpc_environment:
description:
- Key/Value pairs (dict) to define environment settings specific to gRPC
- The standard mechanism to provide/set the environment in Ansible
cannot be used, because those environment settings are not passed to
the client process that establishes the gRPC connection.
- Set C(GRPC_VERBOSITY) and C(GRPC_TRACE) to setup gRPC logging. Need to
add code for log forwarding of gRPC related log messages to the
persistent messages log (see below).
- Set C(HTTPS_PROXY) to specify your proxy settings (if needed).
- Set C(GRPC_SSL_CIPHER_SUITES) in case the default TLS ciphers do not match
what is offered by the gRPC server.
vars:
- name: ansible_grpc_environment
persistent_connect_timeout:
type: int
description:
- Configures, in seconds, the amount of time to wait when trying to
initially establish a persistent connection. If this value expires
before the connection to the remote device is completed, the connection
will fail.
default: 5
ini:
- section: persistent_connection
key: connect_timeout
env:
- name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT
vars:
- name: ansible_connect_timeout
persistent_command_timeout:
type: int
description:
- Configures the default timeout value (in seconds) when awaiting a
response after issuing a call to a RPC. If the RPC does not return
before the timeout exceed, an error is generated and the connection
is closed.
default: 300
ini:
- section: persistent_connection
key: command_timeout
env:
- name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT
vars:
- name: ansible_command_timeout
persistent_log_messages:
type: boolean
description:
- This flag will enable logging the command executed and response received
from target device in the ansible log file. For this option to work the
'log_path' ansible configuration option is required to be set to a file
path with write access.
- Be sure to fully understand the security implications of enabling this
option as it could create a security vulnerability by logging sensitive
information in log file.
default: False
ini:
- section: persistent_connection
key: log_messages
env:
- name: ANSIBLE_PERSISTENT_LOG_MESSAGES
vars:
- name: ansible_persistent_log_messages
"""
import os
import re
import json
import base64
import datetime
try:
import grpc
HAS_GRPC = True
except ImportError:
HAS_GRPC = False
try:
from google import protobuf
HAS_PROTOBUF = True
except ImportError:
HAS_PROTOBUF = False
from ansible.errors import AnsibleConnectionFailure, AnsibleError
from ansible.plugins.connection import NetworkConnectionBase
from ansible.plugins.connection import ensure_connect
from google.protobuf import json_format
from ansible_collections.nokia.grpc.plugins.connection.pb import gnmi_pb2
from ansible.module_utils._text import to_text
| 37.74399 | 124 | 0.583384 |
9900a4818a6a2131c9358bacda678af44a4371c0
| 4,056 |
py
|
Python
|
testcases/cloud_admin/services_up_test.py
|
tbeckham/eutester
|
1440187150ce284bd87147e71ac7f0fda194b4d9
|
[
"BSD-2-Clause"
] | null | null | null |
testcases/cloud_admin/services_up_test.py
|
tbeckham/eutester
|
1440187150ce284bd87147e71ac7f0fda194b4d9
|
[
"BSD-2-Clause"
] | null | null | null |
testcases/cloud_admin/services_up_test.py
|
tbeckham/eutester
|
1440187150ce284bd87147e71ac7f0fda194b4d9
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: clarkmatthew
import eucaops
from eutester.eutestcase import EutesterTestCase
import time
if __name__ == "__main__":
testcase = MyTestCase()
### Use the list of tests passed from config/command line to determine what subset of tests to run
### or use a predefined list "VolumeTagging", "InstanceTagging", "SnapshotTagging", "ImageTagging"
list = testcase.args.tests or ["wait_for_services_operational"]
### Convert test suite methods to EutesterUnitTest objects
unit_list = [ ]
for test in list:
unit_list.append( testcase.create_testunit_by_name(test) )
### Run the EutesterUnitTest objects, dont worry about clean on exit until we need it for this method
result = testcase.run_test_case_list(unit_list,clean_on_exit=False)
exit(result)
| 42.25 | 115 | 0.704389 |
99019a837f86e3b14c54300ab0d06ff51f85071a
| 173 |
py
|
Python
|
intValues.py
|
jules552/ProjetISN
|
20da3572b59af25a166022bc2f5b25d46add2650
|
[
"Unlicense"
] | null | null | null |
intValues.py
|
jules552/ProjetISN
|
20da3572b59af25a166022bc2f5b25d46add2650
|
[
"Unlicense"
] | null | null | null |
intValues.py
|
jules552/ProjetISN
|
20da3572b59af25a166022bc2f5b25d46add2650
|
[
"Unlicense"
] | null | null | null |
MAP = 1
SPEED = 1.5
VELOCITYRESET = 6
WIDTH = 1280
HEIGHT = 720
X = WIDTH / 2 - 50
Y = HEIGHT / 2 - 50
MOUSER = 325
TICKRATES = 120
nfc = False
raspberry = False
| 14.416667 | 20 | 0.606936 |
990280dc9a383a0a37cbb821de57615b46aa6a23
| 401 |
py
|
Python
|
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | 1 |
2019-11-17T10:56:28.000Z
|
2019-11-17T10:56:28.000Z
|
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | null | null | null |
April/Apr_25_2019/builder.py
|
while1618/DailyCodingProblem
|
187909f78281828da543439646cdf52d64c2bd0c
|
[
"MIT"
] | 1 |
2021-11-02T01:00:37.000Z
|
2021-11-02T01:00:37.000Z
|
# This problem was asked by Facebook.
#
# A builder is looking to build a row of N houses that can be of K different colors.
# He has a goal of minimizing cost while ensuring that no two neighboring houses are of the same color.
#
# Given an N by K matrix where the nth row and kth column represents the cost to build the nth house with kth color,
# return the minimum cost which achieves this goal.
| 44.555556 | 116 | 0.763092 |
99050763178e67f3f1f7faee3c71dfb0a78b6af1
| 4,521 |
py
|
Python
|
experiments/delaney/plot.py
|
pfnet-research/bayesgrad
|
5db613391777b20b7a367c274804f0b736991b0a
|
[
"MIT"
] | 57 |
2018-06-30T01:47:19.000Z
|
2022-03-03T17:21:42.000Z
|
experiments/delaney/plot.py
|
pfnet-research/bayesgrad
|
5db613391777b20b7a367c274804f0b736991b0a
|
[
"MIT"
] | null | null | null |
experiments/delaney/plot.py
|
pfnet-research/bayesgrad
|
5db613391777b20b7a367c274804f0b736991b0a
|
[
"MIT"
] | 8 |
2018-07-07T06:18:40.000Z
|
2021-02-23T21:58:45.000Z
|
import argparse
import numpy as np
import os
import sys
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from saliency.visualizer.smiles_visualizer import SmilesVisualizer
if __name__ == '__main__':
main()
| 35.320313 | 116 | 0.628622 |
99062a5160d0b8327745e2f7901f243a1d23d8b8
| 853 |
py
|
Python
|
public/js/tinymice/plugins/bootstrap/jquery-file-tree/connectors/jqueryFileTree.py
|
btybug/main.albumbugs
|
2343466bae7ee3d8941abc4c9684667cccc3e103
|
[
"MIT"
] | 13 |
2016-05-25T16:12:49.000Z
|
2021-04-09T01:49:24.000Z
|
public/js/tinymice/plugins/bootstrap/jquery-file-tree/connectors/jqueryFileTree.py
|
btybug/main.albumbugs
|
2343466bae7ee3d8941abc4c9684667cccc3e103
|
[
"MIT"
] | 265 |
2015-10-19T02:40:55.000Z
|
2022-03-28T07:24:49.000Z
|
public/js/tinymice/plugins/bootstrap/jquery-file-tree/connectors/jqueryFileTree.py
|
btybug/main.albumbugs
|
2343466bae7ee3d8941abc4c9684667cccc3e103
|
[
"MIT"
] | 7 |
2016-02-08T11:41:40.000Z
|
2021-06-08T18:18:02.000Z
|
#
# jQuery File Tree
# Python/Django connector script
# By Martin Skou
#
import os
import urllib
| 32.807692 | 101 | 0.548652 |
990961ddde648d8a6e8bdae1002af6b0a3fe992c
| 1,639 |
py
|
Python
|
gpytorch/lazy/chol_lazy_tensor.py
|
harvineet/gpytorch
|
8aa8f1a4298ef61cfea9c4d11c75576a84ffcc3e
|
[
"MIT"
] | null | null | null |
gpytorch/lazy/chol_lazy_tensor.py
|
harvineet/gpytorch
|
8aa8f1a4298ef61cfea9c4d11c75576a84ffcc3e
|
[
"MIT"
] | null | null | null |
gpytorch/lazy/chol_lazy_tensor.py
|
harvineet/gpytorch
|
8aa8f1a4298ef61cfea9c4d11c75576a84ffcc3e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import torch
from .lazy_tensor import LazyTensor
from .root_lazy_tensor import RootLazyTensor
from .. import settings
| 33.44898 | 113 | 0.654667 |
9909642cf635ba7b413ffb8f974cd5801c613d72
| 5,765 |
py
|
Python
|
pirates/audio/AmbientManagerBase.py
|
ksmit799/POTCO-PS
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 8 |
2017-01-24T04:33:29.000Z
|
2020-11-01T08:36:24.000Z
|
pirates/audio/AmbientManagerBase.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 1 |
2017-03-02T18:05:17.000Z
|
2017-03-14T06:47:10.000Z
|
pirates/audio/AmbientManagerBase.py
|
ksmit799/Pirates-Online-Remake
|
520d38935ae8df4b452c733a82c94dddac01e275
|
[
"Apache-2.0"
] | 11 |
2017-03-02T18:46:07.000Z
|
2020-11-01T08:36:26.000Z
|
# File: A (Python 2.4)
from pandac.PandaModules import AudioSound
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import LerpFunc, Sequence
from direct.showbase.DirectObject import DirectObject
| 30.828877 | 153 | 0.601214 |
99096743e56d22ad0a53c9983c2e48c412dd1c0f
| 890 |
py
|
Python
|
test/tests/import_test.py
|
jmgc/pyston
|
9f672c1bbb75710ac17dd3d9107da05c8e9e8e8f
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1 |
2020-02-06T14:28:45.000Z
|
2020-02-06T14:28:45.000Z
|
test/tests/import_test.py
|
jmgc/pyston
|
9f672c1bbb75710ac17dd3d9107da05c8e9e8e8f
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
test/tests/import_test.py
|
jmgc/pyston
|
9f672c1bbb75710ac17dd3d9107da05c8e9e8e8f
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1 |
2020-02-06T14:29:00.000Z
|
2020-02-06T14:29:00.000Z
|
import import_target
print import_target.x
import import_target
import_target.foo()
c = import_target.C()
print import_target.import_nested_target.y
import_target.import_nested_target.bar()
d = import_target.import_nested_target.D()
print "testing importfrom:"
from import_target import x as z
print z
import_nested_target = 15
from import_nested_target import y
print "This should still be 15:",import_nested_target
import import_nested_target
print import_nested_target.__name__
print import_nested_target.y
import_target.import_nested_target.y = import_nested_target.y + 1
print import_nested_target.y
print z
print y
print __name__
print __import__("import_target") is import_target
import sys
import _multiprocessing
del _multiprocessing
del sys.modules["_multiprocessing"]
import _multiprocessing
import time
del time
del sys.modules["time"]
import time
print time.sleep(0)
| 20.227273 | 65 | 0.837079 |
99098c029853719101bfb8070fc7fe3e4ddbd2c3
| 6,801 |
py
|
Python
|
hexrd/ui/matrix_editor.py
|
HEXRD/hexrdgui
|
d92915463f237e0521b5830655ae73bc5bcd9f80
|
[
"BSD-3-Clause"
] | 13 |
2020-02-18T00:23:02.000Z
|
2022-02-24T20:04:36.000Z
|
hexrd/ui/matrix_editor.py
|
HEXRD/hexrdgui
|
d92915463f237e0521b5830655ae73bc5bcd9f80
|
[
"BSD-3-Clause"
] | 656 |
2020-01-14T02:33:40.000Z
|
2022-03-26T15:31:17.000Z
|
hexrd/ui/matrix_editor.py
|
HEXRD/hexrdgui
|
d92915463f237e0521b5830655ae73bc5bcd9f80
|
[
"BSD-3-Clause"
] | 6 |
2020-01-17T15:02:53.000Z
|
2020-11-01T22:02:48.000Z
|
import numpy as np
from PySide2.QtCore import QSignalBlocker, Signal
from PySide2.QtWidgets import QGridLayout, QWidget
from hexrd.ui.scientificspinbox import ScientificDoubleSpinBox
DEFAULT_ENABLED_STYLE_SHEET = 'background-color: white'
DEFAULT_DISABLED_STYLE_SHEET = 'background-color: #F0F0F0'
INVALID_MATRIX_STYLE_SHEET = 'background-color: red'
def widget(self, row, col):
return self.layout().itemAtPosition(row, col).widget()
def gui_value(self, row, col):
return self.widget(row, col).value()
def set_gui_value(self, row, col, val):
self.widget(row, col).setValue(val)
def set_matrix_invalid(self, s):
self.matrix_invalid = True
self.matrix_invalid_reason = s
self.update_tooltips()
self.update_enable_states()
def set_matrix_valid(self):
self.matrix_invalid = False
self.matrix_invalid_reason = ''
self.update_tooltips()
self.update_enable_states()
def update_tooltips(self):
if self.matrix_invalid:
tooltip = self.matrix_invalid_reason
else:
tooltip = ''
for w in self.enabled_widgets:
w.setToolTip(tooltip)
def apply_constraints(self):
if (func := self.apply_constraints_func) is None:
return
func(self.data)
self.update_gui()
if __name__ == '__main__':
import sys
from PySide2.QtWidgets import QApplication, QDialog, QVBoxLayout
if len(sys.argv) < 2:
sys.exit('Usage: <script> <matrix_size>')
rows, cols = [int(x) for x in sys.argv[1].split('x')]
data = np.ones((rows, cols))
app = QApplication(sys.argv)
dialog = QDialog()
layout = QVBoxLayout()
dialog.setLayout(layout)
editor = MatrixEditor(data)
layout.addWidget(editor)
# def constraints(x):
# x[2][2] = x[1][1]
# editor.enabled_elements = [(1, 1), (3, 4)]
# editor.apply_constraints_func = constraints
editor.data_modified.connect(on_data_modified)
dialog.finished.connect(app.quit)
dialog.show()
app.exec_()
| 27.987654 | 78 | 0.617115 |
990aa6cbf16ed34f5030609c03ab43c0f0ed8c2a
| 674 |
py
|
Python
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
harshp8l/deep-learning-lang-detection
|
2a54293181c1c2b1a2b840ddee4d4d80177efb33
|
[
"MIT"
] | 84 |
2017-10-25T15:49:21.000Z
|
2021-11-28T21:25:54.000Z
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 5 |
2018-03-29T11:50:46.000Z
|
2021-04-26T13:33:18.000Z
|
data/train/python/990aa6cbf16ed34f5030609c03ab43c0f0ed8c2aurls.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 24 |
2017-11-22T08:31:00.000Z
|
2022-03-27T01:22:31.000Z
|
from django.conf.urls.defaults import *
urlpatterns = patterns('pytorque.views',
(r'^$', 'central_dispatch_view'),
(r'^browse$', 'central_dispatch_view'),
(r'^monitor$', 'central_dispatch_view'),
(r'^submit$', 'central_dispatch_view'),
(r'^stat$', 'central_dispatch_view'),
(r'^login/$', 'login'),
(r'^logout/$', 'logout'),
# (r'^$', 'central_dispatch_view'),
(r'^user/(?P<username>\w{0,50})/$', 'index'),
(r'^user/(?P<username>\w{0,50})/browse$', 'browse'),
# (r'^user/(?P<username>\w{0,50})/monitor', 'monitor'),
# (r'^user/(?P<username>\w{0,50})/submit', 'submit'),
# (r'^user/(?P<username>\w{0,50})/stat', 'stat'),
)
| 33.7 | 58 | 0.569733 |
990b3873866758deed49ecf19b9f6e265d5bd2a4
| 3,616 |
py
|
Python
|
checkerpy/types/all/typedtuple.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | 1 |
2018-01-12T19:20:51.000Z
|
2018-01-12T19:20:51.000Z
|
checkerpy/types/all/typedtuple.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | null | null | null |
checkerpy/types/all/typedtuple.py
|
yedivanseven/CheckerPy
|
04612086d25fecdd0b20ca0a050db8620c437b0e
|
[
"MIT"
] | null | null | null |
from typing import Tuple, Union, Any, Sequence
from collections import deque, defaultdict, OrderedDict
from ...validators.one import JustLen
from ...functional.mixins import CompositionClassMixin
from ..one import Just
dict_keys = type({}.keys())
odict_keys = type(OrderedDict({}).keys())
dict_values = type({}.values())
odict_values = type(OrderedDict({}).values())
dict_items = type({}.items())
odict_items = type(OrderedDict({}).items())
NAMED_TYPES = (frozenset, slice, range,
deque, defaultdict, OrderedDict,
dict_keys, dict_values, dict_items,
odict_keys, odict_values, odict_items)
TypesT = Union[type, Sequence[type]]
| 35.45098 | 78 | 0.641316 |
54c99a336aaeb2a2bf8fbb1530f743b492eca07a
| 2,019 |
py
|
Python
|
data/analyzer/linux/lib/common/abstracts.py
|
iswenhao/Panda-Sandbox
|
a04069d404cb4326ff459e703f14625dc45759ed
|
[
"MIT"
] | 2 |
2021-01-12T15:42:05.000Z
|
2021-01-13T04:59:39.000Z
|
data/analyzer/linux/lib/common/abstracts.py
|
iswenhao/Panda-Sandbox
|
a04069d404cb4326ff459e703f14625dc45759ed
|
[
"MIT"
] | null | null | null |
data/analyzer/linux/lib/common/abstracts.py
|
iswenhao/Panda-Sandbox
|
a04069d404cb4326ff459e703f14625dc45759ed
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.api.process import Process
from lib.exceptions.exceptions import CuckooPackageError
| 27.657534 | 102 | 0.583952 |
54ca6e875f242dc42891ee212f00bf7ca42878a5
| 182 |
py
|
Python
|
rdmo/options/apps.py
|
Raspeanut/rdmo
|
9f785010a499c372a2f8368ccf76d2ea4150adcb
|
[
"Apache-2.0"
] | 1 |
2021-12-13T16:32:25.000Z
|
2021-12-13T16:32:25.000Z
|
rdmo/options/apps.py
|
Raspeanut/rdmo
|
9f785010a499c372a2f8368ccf76d2ea4150adcb
|
[
"Apache-2.0"
] | null | null | null |
rdmo/options/apps.py
|
Raspeanut/rdmo
|
9f785010a499c372a2f8368ccf76d2ea4150adcb
|
[
"Apache-2.0"
] | 1 |
2021-05-20T09:31:49.000Z
|
2021-05-20T09:31:49.000Z
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
| 22.75 | 55 | 0.763736 |
54d0c3f0ae68b706ed041587d739745d17917113
| 380 |
py
|
Python
|
main/admin.py
|
sirodoht/mal
|
82295e1b6a03cd9a7ee1357ca3f5be7a26d0ffe9
|
[
"MIT"
] | 2 |
2020-03-29T18:47:18.000Z
|
2020-05-12T07:03:36.000Z
|
main/admin.py
|
sirodoht/mal
|
82295e1b6a03cd9a7ee1357ca3f5be7a26d0ffe9
|
[
"MIT"
] | null | null | null |
main/admin.py
|
sirodoht/mal
|
82295e1b6a03cd9a7ee1357ca3f5be7a26d0ffe9
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from main import models
admin.site.register(models.User, Admin)
admin.site.register(models.Document, DocumentAdmin)
| 20 | 75 | 0.747368 |
54d0c963fcd5c7b6f9c7de58ed61e6d2623f1f5a
| 3,501 |
py
|
Python
|
cloudshell/cli/configurator.py
|
QualiSystems/cloudshell-cli
|
9a38ff37e91e7798511e860603f5a8a79b782472
|
[
"Apache-2.0"
] | 4 |
2017-01-31T14:05:19.000Z
|
2019-04-10T16:35:44.000Z
|
cloudshell/cli/configurator.py
|
QualiSystems/cloudshell-cli
|
9a38ff37e91e7798511e860603f5a8a79b782472
|
[
"Apache-2.0"
] | 89 |
2016-05-25T14:17:38.000Z
|
2022-03-17T13:09:59.000Z
|
cloudshell/cli/configurator.py
|
QualiSystems/cloudshell-cli
|
9a38ff37e91e7798511e860603f5a8a79b782472
|
[
"Apache-2.0"
] | 6 |
2016-07-21T12:24:10.000Z
|
2022-02-21T06:33:18.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from cloudshell.cli.factory.session_factory import (
CloudInfoAccessKeySessionFactory,
GenericSessionFactory,
SessionFactory,
)
from cloudshell.cli.service.cli import CLI
from cloudshell.cli.session.ssh_session import SSHSession
from cloudshell.cli.session.telnet_session import TelnetSession
ABC = ABCMeta("ABC", (object,), {"__slots__": ()})
if sys.version_info >= (3, 0):
from functools import lru_cache
else:
from functools32 import lru_cache
def get_cli_service(self, command_mode):
"""Use cli.get_session to open CLI connection and switch into required mode.
:param CommandMode command_mode: operation mode, can be
default_mode/enable_mode/config_mode/etc.
:return: created session in provided mode
:rtype: cloudshell.cli.service.session_pool_context_manager.SessionPoolContextManager # noqa: E501
"""
return self._cli.get_session(
self._defined_sessions(), command_mode, self._logger
)
class AbstractModeConfigurator(ABC, CLIServiceConfigurator):
"""Used by shells to run enable/config command."""
def enable_mode_service(self):
return self.get_cli_service(self.enable_mode)
def config_mode_service(self):
return self.get_cli_service(self.config_mode)
| 32.119266 | 120 | 0.694087 |
54d0e7ae83bd72293871a6d51b4fbe8e0a0e701d
| 142 |
py
|
Python
|
examples/ingenerator.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 206 |
2021-06-24T16:16:13.000Z
|
2022-03-31T07:44:17.000Z
|
examples/ingenerator.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 13 |
2021-06-24T17:51:36.000Z
|
2022-02-23T10:07:17.000Z
|
examples/ingenerator.py
|
quynhanh-ngx/pytago
|
de976ad8d85702ae665e97978bc4a75d282c857f
|
[
"MIT"
] | 14 |
2021-06-26T02:19:45.000Z
|
2022-03-30T03:02:49.000Z
|
if __name__ == '__main__':
main()
| 14.2 | 36 | 0.485915 |
54d2af6cc6ffcbe94ad442887d35faa47a8ec2cd
| 1,090 |
py
|
Python
|
source/packages/scs-pm-server/src/python-server/app.py
|
amittkSharma/scs_predictive_maintenance
|
105a218b47d81d02f7e799287bd1e9279db452ce
|
[
"MIT"
] | null | null | null |
source/packages/scs-pm-server/src/python-server/app.py
|
amittkSharma/scs_predictive_maintenance
|
105a218b47d81d02f7e799287bd1e9279db452ce
|
[
"MIT"
] | 1 |
2022-02-05T17:13:00.000Z
|
2022-02-05T17:13:00.000Z
|
source/packages/scs-pm-server/src/python-server/app.py
|
amittkSharma/scs_predictive_maintenance
|
105a218b47d81d02f7e799287bd1e9279db452ce
|
[
"MIT"
] | null | null | null |
import json
import logging
import joblib
import pandas as pd
from flask import Flask, jsonify, request
from flask_cors import CORS, cross_origin
app = Flask(__name__)
CORS(app)
if __name__ == "__main__":
app.run(debug=True)
# To start the server
# python3 app.py
| 24.222222 | 67 | 0.709174 |
54d3039f58743cfa00e492ea3768046369054479
| 4,411 |
py
|
Python
|
tests/test_remove_from_dependee_chain.py
|
ess-dmsc/nexus-constructor
|
ae0026c48f8f2d4d88d3ff00e45cb6591983853b
|
[
"BSD-2-Clause"
] | 3 |
2019-05-31T08:38:25.000Z
|
2022-01-06T09:23:21.000Z
|
tests/test_remove_from_dependee_chain.py
|
ess-dmsc/nexus-constructor
|
ae0026c48f8f2d4d88d3ff00e45cb6591983853b
|
[
"BSD-2-Clause"
] | 709 |
2019-02-06T08:23:07.000Z
|
2022-03-29T23:03:37.000Z
|
tests/test_remove_from_dependee_chain.py
|
ess-dmsc/nexus-constructor
|
ae0026c48f8f2d4d88d3ff00e45cb6591983853b
|
[
"BSD-2-Clause"
] | 2 |
2020-03-06T09:58:56.000Z
|
2020-08-04T18:32:57.000Z
|
import pytest
from PySide2.QtGui import QVector3D
from nexus_constructor.model.component import Component
from nexus_constructor.model.dataset import Dataset
from nexus_constructor.model.instrument import Instrument
from nexus_constructor.model.value_type import ValueTypes
values = Dataset(
name="scalar_value",
type=ValueTypes.DOUBLE,
size=[1],
values=90.0,
parent_node=None,
)
| 28.275641 | 68 | 0.670143 |
54d32f6738e6ad2c2884cf8b772cee6a6620a984
| 11,013 |
py
|
Python
|
fastmvsnet/train1.py
|
molspace/FastMVS_experiments
|
b897015d77600687ca2addf99bb6a6f0de524e5f
|
[
"MIT"
] | null | null | null |
fastmvsnet/train1.py
|
molspace/FastMVS_experiments
|
b897015d77600687ca2addf99bb6a6f0de524e5f
|
[
"MIT"
] | null | null | null |
fastmvsnet/train1.py
|
molspace/FastMVS_experiments
|
b897015d77600687ca2addf99bb6a6f0de524e5f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import argparse
import os.path as osp
import logging
import time
import sys
sys.path.insert(0, osp.dirname(__file__) + '/..')
import torch
import torch.nn as nn
from fastmvsnet.config import load_cfg_from_file
from fastmvsnet.utils.io import mkdir
from fastmvsnet.utils.logger import setup_logger
from fastmvsnet.utils.torch_utils import set_random_seed
from fastmvsnet.model1 import build_pointmvsnet as build_model
from fastmvsnet.solver import build_optimizer, build_scheduler
from fastmvsnet.utils.checkpoint import Checkpointer
from fastmvsnet.dataset1 import build_data_loader
from fastmvsnet.utils.tensorboard_logger import TensorboardLogger
from fastmvsnet.utils.metric_logger import MetricLogger
from fastmvsnet.utils.file_logger import file_logger
if __name__ == "__main__":
main()
| 37.080808 | 119 | 0.562608 |
54d41bf8d53f9ade04da7c58f9daea5fe0658840
| 857 |
py
|
Python
|
modulo2/3-detectores/3.2-detector/models.py
|
fossabot/unifacisa-visao-computacional
|
14aef22a3e7fe10ee820d31ce12ad21a3cad7b0b
|
[
"MIT"
] | null | null | null |
modulo2/3-detectores/3.2-detector/models.py
|
fossabot/unifacisa-visao-computacional
|
14aef22a3e7fe10ee820d31ce12ad21a3cad7b0b
|
[
"MIT"
] | null | null | null |
modulo2/3-detectores/3.2-detector/models.py
|
fossabot/unifacisa-visao-computacional
|
14aef22a3e7fe10ee820d31ce12ad21a3cad7b0b
|
[
"MIT"
] | 1 |
2021-02-06T00:49:32.000Z
|
2021-02-06T00:49:32.000Z
|
# Estrutura bsica para projetos de Machine Learning e Deep Learning
# Por Adriano Santos.
from torch import nn, relu
import torch.nn.functional as F
import torch.optim as optim
import torch
from torchvision import models
| 29.551724 | 86 | 0.655776 |
54d5248eff89e3f435c1da7e63250cb5c736a60a
| 3,231 |
py
|
Python
|
python/setup.py
|
sbrodeur/evert
|
c7005ba29576145ab650144f9b9230eaf7bec460
|
[
"BSD-3-Clause"
] | 28 |
2017-10-04T13:58:43.000Z
|
2021-11-06T10:46:51.000Z
|
python/setup.py
|
sbrodeur/evert
|
c7005ba29576145ab650144f9b9230eaf7bec460
|
[
"BSD-3-Clause"
] | 7 |
2017-12-04T17:17:55.000Z
|
2021-07-29T08:58:26.000Z
|
python/setup.py
|
sbrodeur/evert
|
c7005ba29576145ab650144f9b9230eaf7bec460
|
[
"BSD-3-Clause"
] | 10 |
2017-11-07T14:51:08.000Z
|
2019-06-05T04:17:44.000Z
|
#!/usr/bin/env python
# Copyright (c) 2017, Simon Brodeur
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
"""
setup.py file for installing Python bindings using SWIG
"""
from distutils.core import setup, Extension
evert_module = Extension('_evert',
define_macros = [('MAJOR_VERSION', '1'),
('MINOR_VERSION', '0')],
include_dirs = ['../include'],
sources=['../src/elBeam.cpp',
'../src/elBSP.cpp',
'../src/elGLUT.cpp',
'../src/elListener.cpp',
'../src/elOrientedPoint.cpp',
'../src/elPathSolution.cpp',
'../src/elPolygon.cpp',
'../src/elRay.cpp',
'../src/elRoom.cpp',
'../src/elSource.cpp',
'../src/elTimer.cpp',
'../src/elVector.cpp',
'../src/elViewer.cpp',
'evert.i'],
libraries = ['GL', 'GLU', 'glut'],
library_dirs = [],
language='c++',
swig_opts=['-c++', '-I../include'],
#extra_compile_args=['-std=c++11'],
)
setup (name = 'evert',
version = '1.0',
author = "Samuli Laine",
description = """Accelerated beam tracing algorithm""",
ext_modules = [evert_module],
py_modules = ["evert"],
)
| 46.826087 | 89 | 0.556484 |
54d6049e6360802df5527ba35f15e6ff291748e2
| 530 |
py
|
Python
|
somegame/fps_osd.py
|
kodo-pp/somegame-but-not-that-one
|
6252d34b84fe7c83ada9e699df17688c50dd7596
|
[
"MIT"
] | null | null | null |
somegame/fps_osd.py
|
kodo-pp/somegame-but-not-that-one
|
6252d34b84fe7c83ada9e699df17688c50dd7596
|
[
"MIT"
] | null | null | null |
somegame/fps_osd.py
|
kodo-pp/somegame-but-not-that-one
|
6252d34b84fe7c83ada9e699df17688c50dd7596
|
[
"MIT"
] | null | null | null |
import pygame
from loguru import logger
from somegame.osd import OSD
| 29.444444 | 85 | 0.635849 |
54d6ce148b09071a1e33198868f6c84a03813ea1
| 11,846 |
py
|
Python
|
python/chronos/test/bigdl/chronos/data/experimental/test_xshardstsdataset.py
|
sgwhat/BigDL
|
25b402666fbb26b0bc18fc8100e9a00469844778
|
[
"Apache-2.0"
] | null | null | null |
python/chronos/test/bigdl/chronos/data/experimental/test_xshardstsdataset.py
|
sgwhat/BigDL
|
25b402666fbb26b0bc18fc8100e9a00469844778
|
[
"Apache-2.0"
] | null | null | null |
python/chronos/test/bigdl/chronos/data/experimental/test_xshardstsdataset.py
|
sgwhat/BigDL
|
25b402666fbb26b0bc18fc8100e9a00469844778
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import numpy as np
import pandas as pd
import random
import os
from unittest import TestCase
from bigdl.chronos.data import TSDataset
from bigdl.chronos.data.experimental import XShardsTSDataset
from bigdl.orca.data.pandas import read_csv
from bigdl.orca.common import init_orca_context, stop_orca_context, OrcaContext
from pandas.testing import assert_frame_equal
from numpy.testing import assert_array_almost_equal
| 46.454902 | 100 | 0.593534 |
54d7680f93fc7f5f7a46d60f37723337c7dce6f3
| 2,603 |
py
|
Python
|
zoom_functions.py
|
WXSD-Sales/ZoomToWebex
|
16cc663620e2ef2904b0e2857d709aee96b78eb7
|
[
"MIT"
] | 1 |
2021-10-21T01:36:33.000Z
|
2021-10-21T01:36:33.000Z
|
zoom_functions.py
|
WXSD-Sales/integration-samples
|
2f18be740329f3c35c78c268a6d4544cae5d313e
|
[
"MIT"
] | null | null | null |
zoom_functions.py
|
WXSD-Sales/integration-samples
|
2f18be740329f3c35c78c268a6d4544cae5d313e
|
[
"MIT"
] | null | null | null |
import json
import tornado.gen
import traceback
from base64 import b64encode
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError
from settings import Settings
from mongo_db_controller import ZoomUserDB
| 38.850746 | 142 | 0.661929 |
54d83fe60a2207f45c149a5e0cac230756ba7376
| 1,484 |
py
|
Python
|
crypten/mpc/__init__.py
|
gmuraru/CrypTen
|
e39a7aaf65436706321fe4e3fc055308c78b6b92
|
[
"MIT"
] | null | null | null |
crypten/mpc/__init__.py
|
gmuraru/CrypTen
|
e39a7aaf65436706321fe4e3fc055308c78b6b92
|
[
"MIT"
] | null | null | null |
crypten/mpc/__init__.py
|
gmuraru/CrypTen
|
e39a7aaf65436706321fe4e3fc055308c78b6b92
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from crypten.mpc import primitives # noqa: F401
from crypten.mpc import provider # noqa: F40
from .context import run_multiprocess
from .mpc import MPCTensor
from .ptype import ptype
__all__ = ["MPCTensor", "primitives", "provider", "ptype", "run_multiprocess"]
# the different private type attributes of an mpc encrypted tensor
arithmetic = ptype.arithmetic
binary = ptype.binary
# Set provider
__SUPPORTED_PROVIDERS = {
"TFP": provider.TrustedFirstParty,
"TTP": provider.TrustedThirdParty,
"HE": provider.HomomorphicProvider,
}
__default_provider = __SUPPORTED_PROVIDERS[
os.environ.get("CRYPTEN_PROVIDER_NAME", "TFP")
]
| 28.538462 | 81 | 0.768194 |
54d943f36b7e93ff9b844e618cfa99e6c35ca662
| 2,011 |
py
|
Python
|
contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/pyflakes.py
|
lahosken/pants
|
1b0340987c9b2eab9411416803c75b80736716e4
|
[
"Apache-2.0"
] | null | null | null |
contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/pyflakes.py
|
lahosken/pants
|
1b0340987c9b2eab9411416803c75b80736716e4
|
[
"Apache-2.0"
] | null | null | null |
contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/pyflakes.py
|
lahosken/pants
|
1b0340987c9b2eab9411416803c75b80736716e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pyflakes.checker import Checker as FlakesChecker
from pants.contrib.python.checks.tasks.checkstyle.common import CheckstylePlugin, Nit
| 35.910714 | 93 | 0.721532 |
54d95a6a219b638ddca6d85bef7b830f95b22592
| 2,426 |
py
|
Python
|
pharmrep/forum/models.py
|
boyombo/pharmrep
|
2293ceb235dec949c58fa40d1ee43fce172e0ceb
|
[
"MIT"
] | null | null | null |
pharmrep/forum/models.py
|
boyombo/pharmrep
|
2293ceb235dec949c58fa40d1ee43fce172e0ceb
|
[
"MIT"
] | null | null | null |
pharmrep/forum/models.py
|
boyombo/pharmrep
|
2293ceb235dec949c58fa40d1ee43fce172e0ceb
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
| 32.783784 | 100 | 0.659934 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.