hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
16d2ceeba676dbb491a1206466347e8ee17c6418
| 2,485 |
py
|
Python
|
source/code/build-instance-scheduler-template.py
|
liangruibupt/aws-instance-scheduler
|
a4e46eec9f39c2e3b95c5bcbe32c036e239d6066
|
[
"Apache-2.0"
] | null | null | null |
source/code/build-instance-scheduler-template.py
|
liangruibupt/aws-instance-scheduler
|
a4e46eec9f39c2e3b95c5bcbe32c036e239d6066
|
[
"Apache-2.0"
] | null | null | null |
source/code/build-instance-scheduler-template.py
|
liangruibupt/aws-instance-scheduler
|
a4e46eec9f39c2e3b95c5bcbe32c036e239d6066
|
[
"Apache-2.0"
] | 1 |
2021-04-09T15:01:49.000Z
|
2021-04-09T15:01:49.000Z
|
######################################################################################################################
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
import json
import sys
from collections import OrderedDict
main(template_file=sys.argv[1], bucket=sys.argv[2], solution=sys.argv[3], version=sys.argv[4], region=sys.argv[5])
exit(0)
| 59.166667 | 118 | 0.464789 |
16d306bdfaed88804b418d267e2c9f7fdd6fab73
| 7,965 |
py
|
Python
|
src/parse.py
|
StanfordAHA/Configuration
|
a5d404433d32b0ac20544d5bafa9422c979afc16
|
[
"BSD-3-Clause"
] | null | null | null |
src/parse.py
|
StanfordAHA/Configuration
|
a5d404433d32b0ac20544d5bafa9422c979afc16
|
[
"BSD-3-Clause"
] | null | null | null |
src/parse.py
|
StanfordAHA/Configuration
|
a5d404433d32b0ac20544d5bafa9422c979afc16
|
[
"BSD-3-Clause"
] | null | null | null |
###############################################################################
# file -- parse.py --
# Top contributors (to current version):
# Nestan Tsiskaridze
# This file is part of the configuration finder for the Stanford AHA project.
# Copyright (c) 2021 by the authors listed in the file AUTHORS
# in the top-level source directory) and their institutional affiliations.
# All rights reserved. See the file LICENSE in the top-level source
# directory for licensing information.
#
# Handles parsing of all input files.
###############################################################################
import smt_switch as ss
import smt_switch.primops as po
import smt_switch.sortkinds as sk
import argparse
import pono as c
import sys
import re
import time
import copy
import io
#import timeit
| 40.431472 | 129 | 0.417075 |
16d35857ae1d82e14e5940b8e5331b8a6a44ca39
| 2,177 |
py
|
Python
|
neyesem/main.py
|
omerfarukbaysal/neyesem
|
f69bf4446ce902f00389c8d71f68e1b7db05f86d
|
[
"MIT"
] | null | null | null |
neyesem/main.py
|
omerfarukbaysal/neyesem
|
f69bf4446ce902f00389c8d71f68e1b7db05f86d
|
[
"MIT"
] | null | null | null |
neyesem/main.py
|
omerfarukbaysal/neyesem
|
f69bf4446ce902f00389c8d71f68e1b7db05f86d
|
[
"MIT"
] | null | null | null |
from flask import Blueprint, render_template, redirect, url_for, request, flash, make_response
from werkzeug.security import generate_password_hash
from flask_login import login_required, current_user
from . import db
import datetime
from .models import Visitor, User
main = Blueprint('main', __name__)
| 33.492308 | 120 | 0.692237 |
16d397fdfd404f351b1fb42cfa6cff5538a49320
| 790 |
py
|
Python
|
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
00-Aulas/Aula007_2.py
|
AmandaRH07/Python_Entra21
|
4084962508f1597c0498d8b329e0f45e2ac55302
|
[
"MIT"
] | null | null | null |
# Funes
cabecalho = "SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n"
rodape = "\n\n\n Obrigada pela preferencia"
| 27.241379 | 70 | 0.655696 |
16d47d0537155255ce27cd3c3479b098ca6ecf13
| 665 |
py
|
Python
|
ast_version/src/binop.py
|
lucassa3/CCompiler
|
ad788f692dc2863da9111b4a42f54277ac29d5ae
|
[
"MIT"
] | 1 |
2020-04-29T21:30:11.000Z
|
2020-04-29T21:30:11.000Z
|
ast_version/src/binop.py
|
lucassa3/CCompiler
|
ad788f692dc2863da9111b4a42f54277ac29d5ae
|
[
"MIT"
] | 10 |
2018-08-20T18:10:56.000Z
|
2019-04-05T14:45:11.000Z
|
ast_version/src/binop.py
|
lucassa3/CCompiler
|
ad788f692dc2863da9111b4a42f54277ac29d5ae
|
[
"MIT"
] | null | null | null |
from node import Node
| 19.558824 | 32 | 0.542857 |
16d53c81f0a6c59b031bb33f8b48778a56657258
| 7,180 |
py
|
Python
|
aqt/installer.py
|
pylipp/aqtinstall
|
e08667cb5c9ced27994c4cde16d0c1b4a4386455
|
[
"MIT"
] | null | null | null |
aqt/installer.py
|
pylipp/aqtinstall
|
e08667cb5c9ced27994c4cde16d0c1b4a4386455
|
[
"MIT"
] | null | null | null |
aqt/installer.py
|
pylipp/aqtinstall
|
e08667cb5c9ced27994c4cde16d0c1b4a4386455
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright (C) 2018 Linus Jahn <[email protected]>
# Copyright (C) 2019,2020 Hiroshi Miura <[email protected]>
# Copyright (C) 2020, Aurlien Gteau
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import concurrent.futures
import os
import pathlib
import subprocess
import sys
import time
from logging import getLogger
import py7zr
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from aqt.archives import QtPackage
from aqt.helper import altlink, versiontuple
from aqt.qtpatch import Updater
from aqt.settings import Settings
| 40.111732 | 110 | 0.608635 |
16d55202daea41a875b382f2393a76063d29376b
| 4,865 |
py
|
Python
|
lib/django-0.96/django/views/generic/list_detail.py
|
MiCHiLU/google_appengine_sdk
|
3da9f20d7e65e26c4938d2c4054bc4f39cbc5522
|
[
"Apache-2.0"
] | 790 |
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/django-0.96/django/views/generic/list_detail.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361 |
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/django-0.96/django/views/generic/list_detail.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155 |
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
from django.template import loader, RequestContext
from django.http import Http404, HttpResponse
from django.core.xheaders import populate_xheaders
from django.core.paginator import ObjectPaginator, InvalidPage
from django.core.exceptions import ObjectDoesNotExist
def object_list(request, queryset, paginate_by=None, page=None,
allow_empty=False, template_name=None, template_loader=loader,
extra_context=None, context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic list of objects.
Templates: ``<app_label>/<model_name>_list.html``
Context:
object_list
list of objects
is_paginated
are the results paginated?
results_per_page
number of objects per page (if paginated)
has_next
is there a next page?
has_previous
is there a prev page?
page
the current page
next
the next page
previous
the previous page
pages
number of pages, total
hits
number of objects, total
last_on_page
the result number of the last of object in the
object_list (1-indexed)
first_on_page
the result number of the first object in the
object_list (1-indexed)
"""
if extra_context is None: extra_context = {}
queryset = queryset._clone()
if paginate_by:
paginator = ObjectPaginator(queryset, paginate_by)
if not page:
page = request.GET.get('page', 1)
try:
page = int(page)
object_list = paginator.get_page(page - 1)
except (InvalidPage, ValueError):
if page == 1 and allow_empty:
object_list = []
else:
raise Http404
c = RequestContext(request, {
'%s_list' % template_object_name: object_list,
'is_paginated': paginator.pages > 1,
'results_per_page': paginate_by,
'has_next': paginator.has_next_page(page - 1),
'has_previous': paginator.has_previous_page(page - 1),
'page': page,
'next': page + 1,
'previous': page - 1,
'last_on_page': paginator.last_on_page(page - 1),
'first_on_page': paginator.first_on_page(page - 1),
'pages': paginator.pages,
'hits' : paginator.hits,
}, context_processors)
else:
c = RequestContext(request, {
'%s_list' % template_object_name: queryset,
'is_paginated': False
}, context_processors)
if not allow_empty and len(queryset) == 0:
raise Http404
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
if not template_name:
model = queryset.model
template_name = "%s/%s_list.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
return HttpResponse(t.render(c), mimetype=mimetype)
def object_detail(request, queryset, object_id=None, slug=None,
slug_field=None, template_name=None, template_name_field=None,
template_loader=loader, extra_context=None,
context_processors=None, template_object_name='object',
mimetype=None):
"""
Generic detail of an object.
Templates: ``<app_label>/<model_name>_detail.html``
Context:
object
the object
"""
if extra_context is None: extra_context = {}
model = queryset.model
if object_id:
queryset = queryset.filter(pk=object_id)
elif slug and slug_field:
queryset = queryset.filter(**{slug_field: slug})
else:
raise AttributeError, "Generic detail view must be called with either an object_id or a slug/slug_field."
try:
obj = queryset.get()
except ObjectDoesNotExist:
raise Http404, "No %s found matching the query" % (model._meta.verbose_name)
if not template_name:
template_name = "%s/%s_detail.html" % (model._meta.app_label, model._meta.object_name.lower())
if template_name_field:
template_name_list = [getattr(obj, template_name_field), template_name]
t = template_loader.select_template(template_name_list)
else:
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
for key, value in extra_context.items():
if callable(value):
c[key] = value()
else:
c[key] = value
response = HttpResponse(t.render(c), mimetype=mimetype)
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.name))
return response
| 36.856061 | 113 | 0.623227 |
16d68949a023a20451569c4bd42476cab180bd99
| 5,398 |
py
|
Python
|
pax/_src/core/utility_modules.py
|
NTT123/pax
|
b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335
|
[
"MIT"
] | 11 |
2021-08-28T17:45:38.000Z
|
2022-01-26T17:50:03.000Z
|
pax/_src/core/utility_modules.py
|
NTT123/pax
|
b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335
|
[
"MIT"
] | 1 |
2021-09-13T17:29:33.000Z
|
2021-09-13T21:50:34.000Z
|
pax/_src/core/utility_modules.py
|
NTT123/pax
|
b80e1e4b6bfb763afd6b4fdefa31a051ca8a3335
|
[
"MIT"
] | null | null | null |
"""Utility Modules."""
from typing import Any, Callable, Dict, List, Optional, Sequence, TypeVar, Union
import jax
import jax.numpy as jnp
from .module import Module, parameters_method
T = TypeVar("T", bound=Module)
O = TypeVar("O")
| 29.659341 | 91 | 0.582994 |
16d79dca474781cfacdcca9ed1544b5e9e33234c
| 2,612 |
py
|
Python
|
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
src/richie/apps/courses/lms/edx.py
|
kernicPanel/richie
|
803deda3e29383ce85593e1836a3cf4efc6b847e
|
[
"MIT"
] | null | null | null |
"""
Backend to connect Open edX richie with an LMS
"""
import logging
import re
import requests
from requests.auth import AuthBase
from ..serializers import SyncCourseRunSerializer
from .base import BaseLMSBackend
logger = logging.getLogger(__name__)
def split_course_key(key):
"""Split an OpenEdX course key by organization, course and course run codes.
We first try splitting the key as a version 1 key (course-v1:org+course+run)
and fallback the old version (org/course/run).
"""
if key.startswith("course-v1:"):
organization, course, run = key[10:].split("+")
else:
organization, course, run = key.split("/")
return organization, course, run
| 32.246914 | 88 | 0.68683 |
16d7b7c1c6e2def8cf0c9ec10f6916a0a8cf367f
| 4,106 |
py
|
Python
|
BitTorrent-5.2.2/BTL/brpclib.py
|
jpabb7/p2pScrapper
|
0fd57049606864223eb45f956a58adda1231af88
|
[
"MIT"
] | 4 |
2016-04-26T03:43:54.000Z
|
2016-11-17T08:09:04.000Z
|
BitTorrent-5.2.2/BTL/brpclib.py
|
jpabb7/p2pScrapper
|
0fd57049606864223eb45f956a58adda1231af88
|
[
"MIT"
] | 17 |
2015-01-05T21:06:22.000Z
|
2015-12-07T20:45:44.000Z
|
BitTorrent-5.2.2/BTL/brpclib.py
|
jpabb7/p2pScrapper
|
0fd57049606864223eb45f956a58adda1231af88
|
[
"MIT"
] | 7 |
2015-07-28T09:17:17.000Z
|
2021-11-07T02:29:41.000Z
|
# by Greg Hazel
import xmlrpclib
from xmlrpclib2 import *
from BTL import brpc
old_PyCurlTransport = PyCurlTransport
# --------------------------------------------------------------------
# request dispatcher
# Double underscore is BAD!
def new_server_proxy(url):
c = cache_set.get_cache(PyCURL_Cache, url)
t = PyCurlTransport(c)
return BRPC_ServerProxy(url, transport=t)
ServerProxy = new_server_proxy
if __name__ == '__main__':
s = ServerProxy('https://greg.mitte.bittorrent.com:7080/')
ping(0, 1, 1, name="potato")
ping(0, 1, 1, name="anime")
ping("phish", 0, 1, 1)
ping("games", 0, 1, 1)
| 30.641791 | 74 | 0.605212 |
16d7e9187801937282012d38f8b28fd55938bd25
| 1,207 |
py
|
Python
|
database_files/views.py
|
den-gts/django-database-files-3000
|
0a135004427c021944b30ef8aace844ab20b9cfb
|
[
"BSD-3-Clause"
] | 8 |
2016-12-11T02:24:21.000Z
|
2020-08-07T10:02:32.000Z
|
database_files/views.py
|
den-gts/django-database-files-3000
|
0a135004427c021944b30ef8aace844ab20b9cfb
|
[
"BSD-3-Clause"
] | 41 |
2015-08-11T16:57:21.000Z
|
2022-01-18T19:19:41.000Z
|
database_files/views.py
|
den-gts/django-database-files-3000
|
0a135004427c021944b30ef8aace844ab20b9cfb
|
[
"BSD-3-Clause"
] | 7 |
2015-08-02T05:32:41.000Z
|
2019-06-17T11:53:14.000Z
|
import base64
import mimetypes
import os
from django.conf import settings
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import cache_control
from django.views.static import serve as django_serve
from database_files.models import File
def serve_mixed(request, *args, **kwargs):
"""
First attempts to serve the file from the filesystem,
then tries the database.
"""
name = kwargs.get('name') or kwargs.get('path')
document_root = kwargs.get('document_root')
document_root = document_root or settings.MEDIA_ROOT
try:
# First attempt to serve from filesystem.
return django_serve(request, name, document_root)
except Http404:
# Then try serving from database.
return serve(request, name)
| 29.439024 | 74 | 0.724109 |
16d80d08df5b20660db28d091611ed67b6dfa076
| 2,026 |
py
|
Python
|
NoiseFiltersPy/Injector.py
|
TVect/NoiseFiltersPy
|
fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5
|
[
"MIT"
] | 6 |
2019-11-20T19:32:41.000Z
|
2021-06-25T19:47:26.000Z
|
NoiseFiltersPy/Injector.py
|
TVect/NoiseFiltersPy
|
fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5
|
[
"MIT"
] | null | null | null |
NoiseFiltersPy/Injector.py
|
TVect/NoiseFiltersPy
|
fff1f3113cf9b3e7b8de65421ab9951fd3cb11e5
|
[
"MIT"
] | 1 |
2021-06-25T19:47:34.000Z
|
2021-06-25T19:47:34.000Z
|
import numpy as np
import pandas as pd
from abc import ABC
def _gen_random(self, seed: int = None):
"""[summary]
Args:
seed (int, optional): [description]. Defaults to 123.
"""
rng = np.random.default_rng(seed)
for example in self._new_noise:
self._labels.iloc[example] = rng.choice(list(self._label_types - set(self._labels.iloc[example])))
| 30.238806 | 110 | 0.600197 |
16d81711460bcdde5df04988352f117c180dbed8
| 19,516 |
py
|
Python
|
application/mod_user/forms.py
|
hackBCA/hackbcafour
|
971120ff88423cc660f92985790cddf9939838bf
|
[
"MIT"
] | 2 |
2016-11-13T21:32:51.000Z
|
2017-03-22T02:50:26.000Z
|
application/mod_user/forms.py
|
hackBCA/hackbcafour
|
971120ff88423cc660f92985790cddf9939838bf
|
[
"MIT"
] | 1 |
2021-02-08T20:18:59.000Z
|
2021-02-08T20:18:59.000Z
|
application/mod_user/forms.py
|
hackBCA/hackbcafour
|
971120ff88423cc660f92985790cddf9939838bf
|
[
"MIT"
] | null | null | null |
from wtforms import Form, TextField, PasswordField, SelectField, TextAreaField, BooleanField, validators, ValidationError, RadioField
import re
phone_regex = "(\+\d+-?)?((\(?\d{3}\)?)|(\d{3}))-?\d{3}-?\d{4}$"
gender_choices = [
("", "Gender"),
("male", "Male"),
("female", "Female"),
("other", "Other"),
("rns", "Rather Not Say")
]
beginner_choices = [
("", "Are you a beginner?"),
("yes", "Yes"),
("no", "No")
]
ethnicity_choices = [
("", "Ethnicity"),
("white", "White"),
("african_american", "African American"),
("asian_pacific", "Asian or Pacific Islander"),
("american_indian_alaskan_native", "American Indian or Alaskan Native"),
("multiracial", "Multiracial"),
("hispanic", "Hispanic origin"),
("other", "Other"),
("rns", "Rather Not Say")
]
num_hackathons_choices = [
("", "How many hackathons have you been to?"),
("0", "0"),
("1", "1"),
("2", "2"),
("3", "3"),
("4", "4"),
("5", "5+")
]
num_hackathons_choices_mentor = [
("", "How many hackathons have you mentored at?"),
("0", "0"),
("1", "1"),
("2", "2"),
("3", "3"),
("4", "4"),
("5", "5+")
]
grade_choices = [
("", "What grade are you in?"),
("9", "9th"),
("10", "10th"),
("11", "11th"),
("12", "12th")
]
shirt_sizes = [
("", "What is your shirt size?"),
("XS", "Extra Small"),
("S", "Small"),
("M", "Medium"),
("L", "Large"),
("XL", "Extra Large")
]
type_account_choices = [
("hacker", "Hacker"),
("mentor", "Mentor")
]
free_response1_prompt = "Why do you want to come to hackBCA?"
free_response1_prompt_mentor = "Please list languages/frameworks/technologies that you would like to mentor students in."
free_response2_prompt_mentor = "Would you like to run a workshop? If so, please briefly describe your ideas."
attending_choices = [
("Attending", "Yes, I will!"),
("Not Attending", "No, I won't.")
]
# class MentorRsvpForm(Form):
# attending = RadioField("Are you attending hackBCA III?", [validators.Required(message = "Please tell us if you are attending hackBCA III.")], choices = attending_choices)
# phone = TextField("Phone Number", [
# validators.Required(message = "Confirm your preferred contact number."),
# validators.Regexp(phone_regex, message = "Please enter a valid phone number.")
# ], description = "Phone Number Confirmation")
# t_shirt_size = SelectField("What is your shirt size?", [validators.Required(message = "You must select an option.")], choices = shirt_sizes, description = "What is your shirt size?")
# food_allergies = TextAreaField("Allergies", [
# validators.optional(),
# ], description = "Do you have any allergies?")
# medical_information = TextAreaField("Medical Information", [
# validators.optional(),
# ], description = "Are there any other medical issues that we should know about? (ex. Other allergies, illnesses, etc.)")
# hackbca_rules = BooleanField("I agree",[
# validators.Required(message = "Please read and agree to our rules.")
# ], description = "I agree to the rules set forth by hackBCA.", default = False)
# mlh_terms = BooleanField("I agree",[
# validators.Required(message = "Please read and agree to the MLH Code of Conduct.")
# ], description = "I agree to the MLH Code of Conduct.", default = False)
| 45.071594 | 269 | 0.645522 |
16d86786252483bb0df3775ba6255b1dd3edd2a1
| 2,181 |
py
|
Python
|
src/app.py
|
gh640/coding-challenge
|
3be31d643ac081bfec3495cb8f705c400be82553
|
[
"MIT"
] | null | null | null |
src/app.py
|
gh640/coding-challenge
|
3be31d643ac081bfec3495cb8f705c400be82553
|
[
"MIT"
] | 2 |
2017-11-17T03:14:45.000Z
|
2019-10-19T07:17:22.000Z
|
src/app.py
|
gh640/coding-challenge
|
3be31d643ac081bfec3495cb8f705c400be82553
|
[
"MIT"
] | 1 |
2017-11-16T09:33:38.000Z
|
2017-11-16T09:33:38.000Z
|
# coding: utf-8
'''
'''
from math import ceil
import os
from flask import json
from flask import Flask
from flask import request
from flask import send_from_directory
from flask import render_template
# from json_loader import load_locations
# from json_loader import prepare_locations
from models import Location
#
LOCATION_ITEMS_PER_PAGE = 20
app = Flask(__name__)
app.config['GOOGLE_API_KEY'] = os.environ['GOOGLE_API_KEY']
app.config['ROOT'] = (app.config['APPLICATION_ROOT']
if app.config['APPLICATION_ROOT'] else '')
| 23.706522 | 79 | 0.629069 |
16d86a94620baf9944e6bd338662eefcd3ab573e
| 2,180 |
py
|
Python
|
corkus/objects/dungeon.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 5 |
2021-09-10T14:20:15.000Z
|
2022-01-09T11:27:49.000Z
|
corkus/objects/dungeon.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 11 |
2021-08-15T09:39:09.000Z
|
2022-01-12T14:11:24.000Z
|
corkus/objects/dungeon.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 2 |
2021-12-01T23:33:14.000Z
|
2022-01-12T11:08:18.000Z
|
from __future__ import annotations
from .base import CorkusBase
from enum import Enum
| 34.0625 | 113 | 0.580734 |
16d935b63ca1c52fcdad82da9c168df67d096ff5
| 527 |
py
|
Python
|
src/brisk.py
|
chaoer/brisk-descriptor
|
140b08539768b8038680fd86d7fda9688dd5b908
|
[
"BSD-3-Clause"
] | 18 |
2015-02-05T00:44:24.000Z
|
2018-11-30T03:20:51.000Z
|
src/brisk.py
|
chaoer/brisk-descriptor
|
140b08539768b8038680fd86d7fda9688dd5b908
|
[
"BSD-3-Clause"
] | 4 |
2016-06-25T20:04:59.000Z
|
2019-01-29T19:34:24.000Z
|
src/brisk.py
|
chaoer/brisk-descriptor
|
140b08539768b8038680fd86d7fda9688dd5b908
|
[
"BSD-3-Clause"
] | 14 |
2015-11-15T05:20:28.000Z
|
2019-01-02T12:50:44.000Z
|
import pybrisk
| 26.35 | 57 | 0.648956 |
16dc5aa7f7c7413a9e340c8bb600ebd849d60e67
| 2,897 |
py
|
Python
|
hale_hub/outlet_interface.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
hale_hub/outlet_interface.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
hale_hub/outlet_interface.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
import serial
import serial.tools.list_ports
from hale_hub.constants import STARTING_OUTLET_COMMAND, SERIAL_BAUD_RATE, SERIAL_TIMEOUT
from hale_hub.ifttt_logger import send_ifttt_log
_outlet_interface = _OutletInterface()
set_outlet_serial_interface = _outlet_interface.set_serial_interface
toggle_outlet = _outlet_interface.toggle_outlet
turn_on_outlet = _outlet_interface.turn_on_outlet
turn_off_outlet = _outlet_interface.turn_off_outlet
get_outlets = _outlet_interface.get_outlets
set_outlet_name = _outlet_interface.set_outlet_name
| 41.385714 | 123 | 0.706593 |
16dcdf8ea3ba055a8650580e31092f4149c84a27
| 3,233 |
py
|
Python
|
helix/core.py
|
carbonscott/helix
|
e2ee6e1293cae4f0bd1220ed5a41268d20a095db
|
[
"MIT"
] | null | null | null |
helix/core.py
|
carbonscott/helix
|
e2ee6e1293cae4f0bd1220ed5a41268d20a095db
|
[
"MIT"
] | null | null | null |
helix/core.py
|
carbonscott/helix
|
e2ee6e1293cae4f0bd1220ed5a41268d20a095db
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
def measure_twocores(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical distance vector between the centers.
- Interhelical angle (0-90 degree)
'''
# Obtain the centers...
center_ref = np.nanmean(core_xyz_ref, axis = 0)
center_tar = np.nanmean(core_xyz_tar, axis = 0)
# Construct the interhelical distance vector...
ih_dvec = center_tar - center_ref
# Calculate the length of interhelical distance vector...
norm_ih_dvec = np.linalg.norm(ih_dvec)
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Calculate the interhelical angle...
core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref)
core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar)
ih_ang = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) )
return ih_dvec, norm_ih_dvec, core_vec_ref_unit, core_vec_tar_unit, ih_ang
def calc_interangle(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical angle (0-90 degree)
'''
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Calculate the interhelical angle...
core_vec_ref_unit = core_vec_ref / np.linalg.norm(core_vec_ref)
core_vec_tar_unit = core_vec_tar / np.linalg.norm(core_vec_tar)
inter_angle = np.arccos( np.dot(core_vec_ref_unit, core_vec_tar_unit) )
if inter_angle > np.pi / 2.0: inter_angle = np.pi - inter_angle
return inter_angle
def calc_interdist(core_xyz_ref, core_xyz_tar):
''' Measure the following aspects of two helical cores.
- Interhelical distance vector between the centers.
Refers to http://geomalgorithms.com/a07-_distance.html for the method.
Q is ref, P is tar.
'''
# Obtain the helical core vectors...
core_xyz_ref_nonan = remove_nan(core_xyz_ref)
core_xyz_tar_nonan = remove_nan(core_xyz_tar)
core_vec_ref = core_xyz_ref_nonan[-1] - core_xyz_ref_nonan[0]
core_vec_tar = core_xyz_tar_nonan[-1] - core_xyz_tar_nonan[0]
# Obtain the starting point...
q0 = core_xyz_ref_nonan[0]
p0 = core_xyz_tar_nonan[0]
w0 = p0 - q0
# Obtain the directional vector with magnitude...
v = core_vec_ref
u = core_vec_tar
# Math part...
a = np.dot(u, u)
b = np.dot(u, v)
c = np.dot(v, v)
d = np.dot(u, w0)
e = np.dot(v, w0)
de = a * c - b * b # Denominator
if de == 0: sc, tc = 0, d / b
else: sc, tc = (b * e - c * d) / de, (a * e - b * d) / de
# Calculate distance...
wc = w0 + sc * u - tc * v
inter_dist = np.linalg.norm(wc)
return inter_dist
| 33.677083 | 78 | 0.683266 |
16dd18d4c9d6b529392f25ddf3a0704445995def
| 675 |
py
|
Python
|
matury/2011/6.py
|
bartekpacia/informatyka-frycz
|
6fdbbdea0c6b6a710378f22e90d467c9f91e64aa
|
[
"MIT"
] | 2 |
2021-03-06T22:09:44.000Z
|
2021-03-14T14:41:03.000Z
|
matury/2011/6.py
|
bartekpacia/informatyka-frycz
|
6fdbbdea0c6b6a710378f22e90d467c9f91e64aa
|
[
"MIT"
] | 1 |
2020-03-25T15:42:47.000Z
|
2020-10-06T21:41:14.000Z
|
matury/2011/6.py
|
bartekpacia/informatyka-frycz
|
6fdbbdea0c6b6a710378f22e90d467c9f91e64aa
|
[
"MIT"
] | null | null | null |
from typing import List
with open("dane/liczby.txt") as f:
nums: List[int] = []
nums_9_chars: List[int] = []
for line in f:
sline = line.strip()
num = int(sline, 2)
if len(sline) == 9:
nums_9_chars.append(num)
nums.append(num)
count_even = 0
max_num = 0
for num in nums:
if num % 2 == 0:
count_even += 1
if num > max_num:
max_num = num
print(f"{count_even=}")
print(f"max_num(10): {max_num}, max_num(2): {bin(max_num)[2:]}")
sum_9_chars = 0
for num in nums_9_chars:
sum_9_chars += num
print(f"count of numbers with 9 digits: {len(nums_9_chars)}, their sum: {bin(sum_9_chars)[2:]}")
| 20.454545 | 96 | 0.58963 |
16de03e641bb707c0257c647f4e57b0375e2b543
| 668 |
py
|
Python
|
Python/fibs.py
|
familug/FAMILUG
|
ef8c11d92f4038d80f3f1a24cbab022c19791acf
|
[
"BSD-2-Clause"
] | 5 |
2015-10-13T04:13:04.000Z
|
2020-12-23T13:47:43.000Z
|
Python/fibs.py
|
familug/FAMILUG
|
ef8c11d92f4038d80f3f1a24cbab022c19791acf
|
[
"BSD-2-Clause"
] | null | null | null |
Python/fibs.py
|
familug/FAMILUG
|
ef8c11d92f4038d80f3f1a24cbab022c19791acf
|
[
"BSD-2-Clause"
] | 8 |
2015-07-20T15:37:38.000Z
|
2021-04-14T07:18:10.000Z
|
if __name__ == "__main__":
print_fib(10)
print
print_fib2(10)
| 15.904762 | 35 | 0.438623 |
16de052924f6b7a0503a267b4aaeda1587303cff
| 3,681 |
py
|
Python
|
src/model/ParseInput.py
|
slavi010/polyhash-2020
|
a11aa694fbf901be4f4db565cb09800f8f57eae7
|
[
"MIT"
] | null | null | null |
src/model/ParseInput.py
|
slavi010/polyhash-2020
|
a11aa694fbf901be4f4db565cb09800f8f57eae7
|
[
"MIT"
] | null | null | null |
src/model/ParseInput.py
|
slavi010/polyhash-2020
|
a11aa694fbf901be4f4db565cb09800f8f57eae7
|
[
"MIT"
] | null | null | null |
import os
from typing import List
from src.model.Etape import Etape
from src.model.Grille import Grille
from src.model.ItemCase import ItemCase
from src.model.PointMontage import PointMontage
from src.model.Robot import Robot
from src.model.Tache import Tache
| 40.9 | 123 | 0.594947 |
16df196ac8b1d19487d9f38ab432516956acf44f
| 13,440 |
py
|
Python
|
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
test.py
|
UnKafkaesque/Sentiment-Analysis
|
bd8517420534bcfe76f2f60a4f178d1dac540075
|
[
"MIT"
] | null | null | null |
import os
import sys
import time
import traceback
import project1_Copy as p1
import numpy as np
verbose = False
if __name__ == "__main__":
main()
| 29.154013 | 165 | 0.564658 |
16e1f96bcb5b1ba1faf14b289b7309040c63b043
| 1,619 |
py
|
Python
|
homework_1/tests/test_3.py
|
mag-id/epam_python_autumn_2020
|
2488817ba039f5722030a23edc97abe9f70a9a30
|
[
"MIT"
] | null | null | null |
homework_1/tests/test_3.py
|
mag-id/epam_python_autumn_2020
|
2488817ba039f5722030a23edc97abe9f70a9a30
|
[
"MIT"
] | null | null | null |
homework_1/tests/test_3.py
|
mag-id/epam_python_autumn_2020
|
2488817ba039f5722030a23edc97abe9f70a9a30
|
[
"MIT"
] | null | null | null |
"""
Unit tests for module `homework_1.tasks.task_3`.
"""
from tempfile import NamedTemporaryFile
from typing import Tuple
import pytest
from homework_1.tasks.task_3 import find_maximum_and_minimum
| 29.436364 | 87 | 0.542928 |
16e3d9c0e2f0128dd26f1a69eb5d1f88d973387a
| 16,524 |
py
|
Python
|
sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/v20181101/blob_container.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = ['BlobContainerArgs', 'BlobContainer']
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
public_access: Optional[pulumi.Input['PublicAccess']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BlobContainerArgs.__new__(BlobContainerArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["container_name"] = container_name
__props__.__dict__["metadata"] = metadata
__props__.__dict__["public_access"] = public_access
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["etag"] = None
__props__.__dict__["has_immutability_policy"] = None
__props__.__dict__["has_legal_hold"] = None
__props__.__dict__["immutability_policy"] = None
__props__.__dict__["last_modified_time"] = None
__props__.__dict__["lease_duration"] = None
__props__.__dict__["lease_state"] = None
__props__.__dict__["lease_status"] = None
__props__.__dict__["legal_hold"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storage/v20181101:BlobContainer"), pulumi.Alias(type_="azure-native:storage:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180201:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180201:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180301preview:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180301preview:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20180701:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20180701:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20190401:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20190401:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20190601:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20190601:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20200801preview:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20200801preview:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20210101:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20210101:BlobContainer"), pulumi.Alias(type_="azure-native:storage/v20210201:BlobContainer"), pulumi.Alias(type_="azure-nextgen:storage/v20210201:BlobContainer")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(BlobContainer, __self__).__init__(
'azure-native:storage/v20181101:BlobContainer',
resource_name,
__props__,
opts)
| 50.53211 | 1,363 | 0.676834 |
16e4af35a62847ccd702cb32c6b8a27f27bee59d
| 129 |
py
|
Python
|
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | null | null | null |
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | 8 |
2021-03-19T03:28:32.000Z
|
2022-03-11T23:59:00.000Z
|
app/admin/views/__init__.py
|
CAUCHY2932/Northern_Hemisphere
|
06e5b3e3f0b47940d5b4549899d062373b019579
|
[
"BSD-3-Clause"
] | null | null | null |
# coding:utf-8
import app.admin.views.start
import app.admin.views.book
import app.admin.views.user
import app.admin.views.site
| 18.428571 | 28 | 0.79845 |
16e4dfbf8bd61eccd8ee52165a28c0666d169326
| 840 |
py
|
Python
|
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
test_mnist.py
|
aidiary/chainer-siamese
|
6abce9192298e14682a7c766e2a5cdd10f519193
|
[
"MIT"
] | null | null | null |
import os
import chainer
import chainer.links as L
from net import SiameseNetwork
import numpy as np
import matplotlib.pyplot as plt
#
model = SiameseNetwork()
chainer.serializers.load_npz(os.path.join('result', 'model.npz'), model)
#
_, test = chainer.datasets.get_mnist(ndim=3)
test_data, test_label = test._datasets
# 2
y = model.forward_once(test_data)
feat = y.data
#
c = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff',
'#ff00ff', '#990000', '#999900', '#009900', '#009999']
#
#
#
for i in range(10):
f = feat[np.where(test_label == i)]
plt.plot(f[:, 0], f[:, 1], '.', c=c[i])
plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
plt.savefig(os.path.join('result', 'result.png'))
| 24.705882 | 72 | 0.667857 |
16e5abfcca6728651310e1b9d7d20815d0685476
| 5,535 |
py
|
Python
|
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
TwoFeetTempoMove.py
|
b0nz0/TwisterTempo
|
fc975af4095509d8ec4fe2f84313fe152577bed2
|
[
"MIT"
] | null | null | null |
from random import randrange, random
from time import time
import logging
from TwisterTempoGUI import TwisterTempoGUI
| 45.368852 | 106 | 0.592954 |
16e6fb3c075a8554e7e6d5fe5397106b44ef9bf3
| 311 |
py
|
Python
|
plugins/panorama/panorama/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 13 |
2020-01-27T09:02:25.000Z
|
2022-01-20T07:45:26.000Z
|
plugins/panorama/panorama/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 29 |
2020-03-22T06:57:57.000Z
|
2022-01-24T22:46:42.000Z
|
plugins/panorama/panorama/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 6 |
2020-07-10T00:13:30.000Z
|
2022-01-26T08:22:33.000Z
|
# -*- coding: utf-8 -*-
"""
Panorama is a Pelican plugin to generate statistics from blog posts
(number of posts per month, categories and so on) display them as beautiful charts.
Project location: https://github.com/romainx/panorama
"""
__version__ = "0.2.0"
__author__ = "romainx"
from .panorama import *
| 22.214286 | 83 | 0.726688 |
16e747bd6febb0a03dbe8fb17268efc47ff0c0ee
| 7,999 |
py
|
Python
|
transitfeed/transfer.py
|
cclauss/transitfeed
|
54a4081b59bfa015d5f0405b68203e61762d4a52
|
[
"Apache-2.0"
] | 9 |
2015-07-21T17:41:25.000Z
|
2020-08-26T13:37:08.000Z
|
transitfeed/transfer.py
|
cclauss/transitfeed
|
54a4081b59bfa015d5f0405b68203e61762d4a52
|
[
"Apache-2.0"
] | 4 |
2015-06-11T18:40:16.000Z
|
2020-04-03T20:31:40.000Z
|
transitfeed/transfer.py
|
cclauss/transitfeed
|
54a4081b59bfa015d5f0405b68203e61762d4a52
|
[
"Apache-2.0"
] | 4 |
2016-02-09T21:45:50.000Z
|
2020-07-30T21:52:50.000Z
|
#!/usr/bin/python2.5
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gtfsobjectbase import GtfsObjectBase
import problems as problems_module
import util
| 40.811224 | 91 | 0.665833 |
16e7d64f5a23705a73ced1fae75f2e7697ae34b2
| 2,067 |
py
|
Python
|
social/urls.py
|
Kizito-Alberrt/insta-social
|
c632e901cd81b0b139f88ad55236efd6c7ddbef1
|
[
"MIT"
] | null | null | null |
social/urls.py
|
Kizito-Alberrt/insta-social
|
c632e901cd81b0b139f88ad55236efd6c7ddbef1
|
[
"MIT"
] | null | null | null |
social/urls.py
|
Kizito-Alberrt/insta-social
|
c632e901cd81b0b139f88ad55236efd6c7ddbef1
|
[
"MIT"
] | null | null | null |
from django.urls import path
from . import views
from . views import UserPostListView, PostDetailView, PostDeleteview, PostCreateView, PostUpdateView,CommentUpdateView, VideoCreateView, video_update
urlpatterns = [
path('',views.base, name='base'),
path('login',views.login, name='login'),
path('register',views.register, name='register'),
path('index',views.index, name='index'),
path('logout',views.logout, name='logout'),
path('like_post', views.like_post, name='like_post'),
path('find_friends',views.find_friends, name='find_friends'),
path('profile',views.profile, name='profile'),
path('profile_update', views.profile_update, name='profile_update'),
path('user/<str:username>', UserPostListView.as_view(), name='user_posts'),
path('post/<int:pk>/',PostDetailView.as_view(), name='post_details' ),
path('post/<int:pk>/delete/',PostDeleteview.as_view(), name='post_delete' ),
path('profile_posts',views.profile_posts, name='profile_posts'),
path('results',views.results, name='results'),
path('post/new/',PostCreateView.as_view(), name='post-create' ),
path('post_update',views.post_update, name='post_update'),
path('post/<int:pk>/update',PostUpdateView.as_view(), name='post-update' ),
path('profile_photos',views.profile_photos, name='profile_photos'),
path('comment_update/<int:id>',views.comment_update, name='comment_update'),
path('comment/<int:pk>/update',CommentUpdateView.as_view(), name='comment-update' ),
path('delete/<int:id>',views.delete, name='delete'),
path('favourite',views.favourite, name='favourite'),
path('favourite_posts',views.favourite_posts, name='favourite_posts'),
path('video/new/',VideoCreateView.as_view(), name='video-create' ),
path('post/<int:pk>/video',video_update.as_view(), name='video_update' ),
# path('<str:username>',views.userprofile, name='userprofile'),
path('video_posts',views.video_posts, name='video_posts'),
path('user_videos',views.user_videos,name='user_videos'),
]
| 43.0625 | 149 | 0.701016 |
16e8783047883ecc17068c1f63c87b161a271a5f
| 1,054 |
py
|
Python
|
vtkplotter_examples/other/dolfin/collisions.py
|
ismarou/vtkplotter-examples
|
1eefcc026be169ab7a77a5bce6dec8044c33b554
|
[
"MIT"
] | 4 |
2020-07-30T02:38:29.000Z
|
2021-09-12T14:30:18.000Z
|
vtkplotter_examples/other/dolfin/collisions.py
|
ismarou/vtkplotter-examples
|
1eefcc026be169ab7a77a5bce6dec8044c33b554
|
[
"MIT"
] | null | null | null |
vtkplotter_examples/other/dolfin/collisions.py
|
ismarou/vtkplotter-examples
|
1eefcc026be169ab7a77a5bce6dec8044c33b554
|
[
"MIT"
] | null | null | null |
'''
compute_collision() will compute the collision of all the entities with
a Point while compute_first_collision() will always return its first entry.
Especially if a point is on an element edge this can be tricky.
You may also want to compare with the Cell.contains(Point) tool.
'''
# Script by Rudy at https://fenicsproject.discourse.group/t/
# any-function-to-determine-if-the-point-is-in-the-mesh/275/3
import dolfin
from vtkplotter.dolfin import shapes, plot, printc
n = 4
Px = 0.5
Py = 0.5
mesh = dolfin.UnitSquareMesh(n, n)
bbt = mesh.bounding_box_tree()
collisions = bbt.compute_collisions(dolfin.Point(Px, Py))
collisions1st = bbt.compute_first_entity_collision(dolfin.Point(Px, Py))
printc("collisions : ", collisions)
printc("collisions 1st: ", collisions1st)
for cell in dolfin.cells(mesh):
contains = cell.contains(dolfin.Point(Px, Py))
printc("Cell", cell.index(), "contains P:", contains, c=contains)
###########################################
pt = shapes.Point([Px, Py], c='blue')
plot(mesh, pt, text=__doc__)
| 35.133333 | 75 | 0.705882 |
16e8943240219eac91364d8b6c27599e32680763
| 622 |
py
|
Python
|
alice_check_train/__main__.py
|
AsciiShell/Alice-Check-Train
|
49d5804d28a237756a7cf27e451ff56166fbee5c
|
[
"MIT"
] | null | null | null |
alice_check_train/__main__.py
|
AsciiShell/Alice-Check-Train
|
49d5804d28a237756a7cf27e451ff56166fbee5c
|
[
"MIT"
] | null | null | null |
alice_check_train/__main__.py
|
AsciiShell/Alice-Check-Train
|
49d5804d28a237756a7cf27e451ff56166fbee5c
|
[
"MIT"
] | null | null | null |
import datetime
import os
from alice_check_train.main import rasp_to_text
from alice_check_train.rasp_api import get_rasp, filter_rasp
if __name__ == '__main__':
main()
| 25.916667 | 65 | 0.688103 |
16e89821c774aa40fe5b74ea387488fc99280078
| 7,309 |
py
|
Python
|
aws-KNN-RESTful.py
|
cakebytheoceanLuo/k-NN
|
52c66b5e38490431b3079c2baaad38785802f4e5
|
[
"Apache-2.0"
] | 1 |
2021-11-16T13:22:09.000Z
|
2021-11-16T13:22:09.000Z
|
aws-KNN-RESTful.py
|
cakebytheoceanLuo/k-NN
|
52c66b5e38490431b3079c2baaad38785802f4e5
|
[
"Apache-2.0"
] | null | null | null |
aws-KNN-RESTful.py
|
cakebytheoceanLuo/k-NN
|
52c66b5e38490431b3079c2baaad38785802f4e5
|
[
"Apache-2.0"
] | null | null | null |
# https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb
# https://docs.aws.amazon.com/opensearch-service/latest/developerguide/knn.html
import sys
import requests
import h5py
import numpy as np
import json
import aiohttp
import asyncio
import time
import httpx
from requests.auth import HTTPBasicAuth
from statistics import mean
# if len(sys.argv) != 2:
# print("Type in the efSearch!")
# sys.exit()
# path = '/tmp/sift-128-euclidean.hdf5.1M' # float dataset
# path = '/tmp/sift-128-euclidean.hdf5' # float dataset
path = '/home/ubuntu/sift-128-euclidean.hdf5' # float dataset
output_csv = '/tmp/sift-es.csv'
# url = 'http://127.0.0.1:9200/sift-index/'
host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # single node
# host = 'https://vpc-....ap-southeast-1.es.amazonaws.com/' # two nodes
url = host + 'sift-index/'
requestHeaders = {'content-type': 'application/json'} # https://stackoverflow.com/questions/51378099/content-type-header-not-supported
auth = HTTPBasicAuth('admin', 'I#vu7bTAHB')
# Build an index
#https://stackoverflow.com/questions/17301938/making-a-request-to-a-restful-api-using-python
# PUT sift-index
data = '''{
"settings": {
"index": {
"knn": true,
"knn.space_type": "l2",
"knn.algo_param.m": 6,
"knn.algo_param.ef_construction": 50,
"knn.algo_param.ef_search": 50,
"refresh_interval": -1,
"translog.flush_threshold_size": "10gb",
"number_of_replicas": 0
}
},
"mappings": {
"properties": {
"sift_vector": {
"type": "knn_vector",
"dimension": 128
}
}
}
}'''
# https://medium.com/@kumon/how-to-realize-similarity-search-with-elasticsearch-3dd5641b9adb
response = requests.put(url, data=data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
# response = requests.put(url, data=data, verify=False, headers=requestHeaders, auth=auth)
assert response.status_code==requests.codes.ok
# cluster_url = 'http://127.0.0.1:9200/_cluster/settings'
cluster_url = host + '_cluster/settings'
cluster_data = '''{
"persistent" : {
"knn.algo_param.index_thread_qty": 16
}
}
'''
response = requests.put(cluster_url, data=cluster_data, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), headers=requestHeaders)
assert response.status_code==requests.codes.ok
# Bulkload into index
bulk_template = '{ "index": { "_index": "sift-index", "_id": "%s" } }\n{ "sift_vector": [%s] }\n'
hf = h5py.File(path, 'r')
for key in hf.keys():
print("A key of hf is %s" % key) #Names of the groups in HDF5 file.
vectors = np.array(hf["train"][:])
num_vectors, dim = vectors.shape
print("num_vectors: %d" % num_vectors)
print("dim: %d" % dim)
bulk_data = ""
start = time.time()
for (id,vector) in enumerate(vectors):
assert len(vector)==dim
vector_str = ""
for num in vector:
vector_str += str(num) + ','
vector_str = vector_str[:-1]
id_str = str(id)
single_bulk_done = bulk_template % (id_str, vector_str)
bulk_data += single_bulk_done
if (id+1) % 100000 == 0:
print(str(id+1))
# POST _bulk
response = requests.put(url + '_bulk', data=bulk_data, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), headers=requestHeaders)
assert response.status_code==requests.codes.ok
bulk_data = ""
end = time.time()
print("Insert Time: %d mins" % ((end - start) / 60.0)) # Unit: min
# refresh_url = 'http://127.0.0.1:9200/sift-index/_settings'
refresh_url = host + 'sift-index/_settings'
refresh_data = '''{
"index" : {
"refresh_interval": "1s"
}
}
'''
response = requests.put(refresh_url, data=refresh_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert response.status_code==requests.codes.ok
# response = requests.post('http://127.0.0.1:9200/sift-index/_refresh', verify=False, headers=requestHeaders)
# assert response.status_code==requests.codes.ok
# merge_url = 'http://127.0.0.1:9200/sift-index/_forcemerge?max_num_segments=1'
merge_url = host + 'sift-index/_forcemerge?max_num_segments=1'
merge_response = requests.post(merge_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), timeout=600)
assert merge_response.status_code==requests.codes.ok
# warmup_url = 'http://127.0.0.1:9200/_opendistro/_knn/warmup/sift-index'
warmup_url = host + '_opendistro/_knn/warmup/sift-index'
warmup_response = requests.get(warmup_url, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert warmup_response.status_code==requests.codes.ok
# Send queries
total_time = 0 # in ms
hits = 0 # for recall calculation
query_template = '''
{
"size": 50,
"query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}}
}
'''
queries = np.array(hf["test"][:])
nq = len(queries)
neighbors = np.array(hf["neighbors"][:])
# distances = np.array(hf["distances"][:])
num_queries, q_dim = queries.shape
print("num_queries: %d" % num_queries)
print("q_dim: %d" % q_dim)
assert q_dim==dim
ef_search_list = [50, 100, 150, 200, 250, 300]
for ef_search in ef_search_list:
ef_data = '''{
"index": {
"knn.algo_param.ef_search": %d
}
}'''
ef_data = ef_data % ef_search
### Update Index Setting: efSearch
response = requests.put(url + '_settings', data=ef_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'))
assert response.status_code==requests.codes.ok
total_time_list = []
hits_list = []
for count in range(5):
total_time = 0 # in ms
hits = 0 # for recall calculation
query_template = '''
'''
single_query = '''{}\n{"size": 50, "query": {"knn": {"sift_vector": {"vector": [%s],"k": 50}}}}\n'''
for (id,query) in enumerate(queries):
assert len(query)==dim
query_str = ""
for num in query:
query_str += str(num) + ','
query_str = query_str[:-1]
# GET sift-index/_search
single_query_done = single_query % (query_str)
query_template += single_query_done
query_data = query_template
# print(query_data)
response = requests.get(url + '_msearch', data=query_data, headers=requestHeaders, auth=HTTPBasicAuth('admin', 'I#vu7bTAHB'), stream=True)
assert response.status_code==requests.codes.ok
# print(response.text)
result = json.loads(response.text)
# QPS
total_time = result['took']
# tooks = []
# for i in range(len(queries)):
# for ele in result['responses']:
# tooks.append(int(ele['took']))
for id in range(len(queries)):
# Recall
neighbor_id_from_result = []
for ele in result['responses'][id]['hits']['hits']:
neighbor_id_from_result.append(int(ele['_id']))
assert len(neighbor_id_from_result)==50
# print("neighbor_id_from_result: ")
# print(neighbor_id_from_result)
neighbor_id_gt = neighbors[id][0:50] # topK=50
# print("neighbor_id_gt")
# print(neighbor_id_gt)
hits_q = len(list(set(neighbor_id_from_result) & set(neighbor_id_gt)))
# print("# hits of this query with topk=50: %d" % hits_q)
hits += hits_q
total_time_list.append(total_time)
hits_list.append(hits)
print(total_time_list)
total_time_avg = mean(total_time_list[2:-1])
hits_avg = mean(hits_list)
QPS = 1.0 * nq / (total_time_avg / 1000.0)
recall = 1.0 * hits_avg / (nq * 50)
print(ef_search, QPS, recall)
| 33.374429 | 142 | 0.675332 |
16ea2d8be166b5650aea4af33dbde9040a41f768
| 1,438 |
py
|
Python
|
test/test_docker_images.py
|
bauerj/cibuildwheel
|
b4addbf4a94daa76769d4f779e169406b0ef99ae
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_docker_images.py
|
bauerj/cibuildwheel
|
b4addbf4a94daa76769d4f779e169406b0ef99ae
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_docker_images.py
|
bauerj/cibuildwheel
|
b4addbf4a94daa76769d4f779e169406b0ef99ae
|
[
"BSD-2-Clause"
] | null | null | null |
import platform
import textwrap
import pytest
from . import test_projects, utils
dockcross_only_project = test_projects.new_c_project(
setup_py_add=textwrap.dedent(r'''
import os, sys
# check that we're running in the correct docker image as specified in the
# environment options CIBW_MANYLINUX1_*_IMAGE
if "linux" in sys.platform and not os.path.exists("/dockcross"):
raise Exception(
"/dockcross directory not found. Is this test running in the correct docker image?"
)
''')
)
| 35.073171 | 116 | 0.672462 |
16eb07b6e691db19202917b717c2ccb87df9fd9d
| 32,556 |
py
|
Python
|
real_trade/MoveAverageTradePosition.py
|
taka-mochi/cryptocurrency-autotrading
|
16677018c793d7bd3fffdcd3575aecb3535dbd04
|
[
"BSD-3-Clause"
] | 3 |
2018-05-22T22:45:23.000Z
|
2020-02-13T16:45:03.000Z
|
real_trade/MoveAverageTradePosition.py
|
taka-mochi/cryptocurrency-autotrading
|
16677018c793d7bd3fffdcd3575aecb3535dbd04
|
[
"BSD-3-Clause"
] | null | null | null |
real_trade/MoveAverageTradePosition.py
|
taka-mochi/cryptocurrency-autotrading
|
16677018c793d7bd3fffdcd3575aecb3535dbd04
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
import math
import dateutil
import dateutil.parser
import json
from ChartBars import Chart
from ChartUpdaterByCCWebsocket import ChartUpdaterByCoincheckWS
from Util import BitcoinUtil
# a class for one position
| 40.593516 | 189 | 0.593593 |
16ebc077aad6a4dd684131dc7271bbdbd5696af9
| 743 |
py
|
Python
|
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | 2 |
2021-09-07T03:25:00.000Z
|
2021-09-07T17:28:46.000Z
|
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | null | null | null |
test.py
|
sbcshop/PiRelay-8
|
4d881f259c07cd4fdf3c57431feb1587aaa0e861
|
[
"MIT"
] | null | null | null |
from PiRelay8 import Relay
import time
r1 = Relay("RELAY1")
r2 = Relay("RELAY2")
r3 = Relay("RELAY3")
r4 = Relay("RELAY4")
r5 = Relay("RELAY5")
r6 = Relay("RELAY6")
r7 = Relay("RELAY7")
r8 = Relay("RELAY8")
r1.off()
r2.off()
r3.off()
r4.off()
r5.off()
r6.off()
r7.off()
r8.off()
r1.on()
time.sleep(0.5)
r1.off()
time.sleep(0.5)
r2.on()
time.sleep(0.5)
r2.off()
time.sleep(0.5)
r3.on()
time.sleep(0.5)
r3.off()
time.sleep(0.5)
r4.on()
time.sleep(0.5)
r4.off()
time.sleep(0.5)
r5.on()
time.sleep(0.5)
r5.off()
time.sleep(0.5)
r6.on()
time.sleep(0.5)
r6.off()
time.sleep(0.5)
r7.on()
time.sleep(0.5)
r7.off()
time.sleep(0.5)
r8.on()
time.sleep(0.5)
r8.off()
time.sleep(0.5)
| 11.983871 | 27 | 0.572005 |
16ebce5b29644a3fdd8bee60c8ef43a322219b10
| 9,086 |
py
|
Python
|
bot/cogs/clan.py
|
johnvictorfs/atlantisbot-rewrite
|
ac6887f91438206ba926be59d8fd2bedd07923ad
|
[
"MIT"
] | null | null | null |
bot/cogs/clan.py
|
johnvictorfs/atlantisbot-rewrite
|
ac6887f91438206ba926be59d8fd2bedd07923ad
|
[
"MIT"
] | 5 |
2018-09-28T18:01:28.000Z
|
2019-02-12T18:49:06.000Z
|
bot/cogs/clan.py
|
johnvictorfs/atlantisbot-rewrite
|
ac6887f91438206ba926be59d8fd2bedd07923ad
|
[
"MIT"
] | 1 |
2018-10-15T22:41:47.000Z
|
2018-10-15T22:41:47.000Z
|
import rs3clans
import discord
from discord.ext import commands
from bot.bot_client import Bot
from bot.utils.tools import separator
from bot.utils.context import Context
| 42.064815 | 115 | 0.569117 |
16ec4bab280bd7d838f873bdb4d147f41ca2f107
| 2,539 |
py
|
Python
|
otcextensions/tests/functional/osclient/vpc/v2/common.py
|
zsoltn/python-otcextensions
|
4c0fa22f095ebd5f9636ae72acbae5048096822c
|
[
"Apache-2.0"
] | 10 |
2018-03-03T17:59:59.000Z
|
2020-01-08T10:03:00.000Z
|
otcextensions/tests/functional/osclient/vpc/v2/common.py
|
zsoltn/python-otcextensions
|
4c0fa22f095ebd5f9636ae72acbae5048096822c
|
[
"Apache-2.0"
] | 208 |
2020-02-10T08:27:46.000Z
|
2022-03-29T15:24:21.000Z
|
otcextensions/tests/functional/osclient/vpc/v2/common.py
|
zsoltn/python-otcextensions
|
4c0fa22f095ebd5f9636ae72acbae5048096822c
|
[
"Apache-2.0"
] | 15 |
2020-04-01T20:45:54.000Z
|
2022-03-23T12:45:43.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import json
import uuid
from datetime import datetime
from openstackclient.tests.functional import base
| 33.853333 | 77 | 0.639228 |
16ef740b41f41832481d4956834bb037ddc3b7b6
| 2,614 |
py
|
Python
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 3 |
2020-10-30T23:31:26.000Z
|
2022-03-30T21:48:40.000Z
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 42 |
2020-09-16T15:23:11.000Z
|
2021-09-20T13:00:50.000Z
|
tests/test_nested_structures_inside_structure_values.py
|
Robinson04/StructNoSQL
|
335c63593025582336bb67ad0b0ed39d30800b74
|
[
"MIT"
] | 2 |
2021-01-03T21:37:22.000Z
|
2021-08-12T20:28:52.000Z
|
import unittest
from typing import Set, Optional, Dict, List
from uuid import uuid4
from StructNoSQL import BaseField, MapModel, TableDataModel
from tests.components.playground_table_clients import PlaygroundDynamoDBBasicTable, TEST_ACCOUNT_ID
if __name__ == '__main__':
unittest.main()
| 38.441176 | 114 | 0.704285 |
16f050092210b638486f36ba124add5847de3ce7
| 9,390 |
py
|
Python
|
test/cpython/test_base64.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1 |
2020-02-06T14:28:45.000Z
|
2020-02-06T14:28:45.000Z
|
test/cpython/test_base64.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
test/cpython/test_base64.py
|
aisk/pyston
|
ac69cfef0621dbc8901175e84fa2b5cb5781a646
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1 |
2020-02-06T14:29:00.000Z
|
2020-02-06T14:29:00.000Z
|
import unittest
from test import test_support
import base64
def test_main():
test_support.run_unittest(__name__)
if __name__ == '__main__':
test_main()
| 43.271889 | 81 | 0.587859 |
16f095ebea3707b39efe449bdb8d248fee8a8b6e
| 7,154 |
py
|
Python
|
src/Path.py
|
gabbonj/Workbench
|
86bbb2e3184e0f2fc5e9ac6dc7cfec86473fb7b9
|
[
"MIT"
] | 2 |
2020-08-06T12:20:24.000Z
|
2020-08-06T12:20:43.000Z
|
src/Path.py
|
gabbonj/Workbench
|
86bbb2e3184e0f2fc5e9ac6dc7cfec86473fb7b9
|
[
"MIT"
] | null | null | null |
src/Path.py
|
gabbonj/Workbench
|
86bbb2e3184e0f2fc5e9ac6dc7cfec86473fb7b9
|
[
"MIT"
] | null | null | null |
import numpy as np
from ctypes import c_void_p
from .Shader import Shader
from .transforms import *
from OpenGL.GL import *
| 46.75817 | 131 | 0.643836 |
bc3fd4c771ec63f015857f770191b3f22d0f45f1
| 1,406 |
py
|
Python
|
icfree/echo_instructor/args.py
|
brsynth/icfree-ml
|
7f6c67f26bf60e9cadd59855aebb6bdb5bd64fda
|
[
"MIT"
] | 1 |
2022-01-13T17:54:12.000Z
|
2022-01-13T17:54:12.000Z
|
icfree/echo_instructor/args.py
|
brsynth/icfree-ml
|
7f6c67f26bf60e9cadd59855aebb6bdb5bd64fda
|
[
"MIT"
] | null | null | null |
icfree/echo_instructor/args.py
|
brsynth/icfree-ml
|
7f6c67f26bf60e9cadd59855aebb6bdb5bd64fda
|
[
"MIT"
] | null | null | null |
from argparse import (
ArgumentParser
)
from os import getcwd as os_getcwd
DEFAULT_OUTPUT_FOLDER = os_getcwd()
DEFAULT_SAMPLE_VOLUME = 10000
| 20.985075 | 75 | 0.600996 |
bc4085bfce6da5fce4ce47af500b1138fc887137
| 246 |
py
|
Python
|
ex1_01.py
|
sitdh/59.com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | 1 |
2021-04-25T14:46:12.000Z
|
2021-04-25T14:46:12.000Z
|
ex1_01.py
|
sitdh/com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | null | null | null |
ex1_01.py
|
sitdh/com-prog
|
24f536a72b0467ff3ee1615f515ecff9fbf36bb3
|
[
"MIT"
] | null | null | null |
import math
x = float(input())
prop_2 = -(x**2) / math.factorial(2)
prop_3 = (x**4) / math.factorial(4)
prop_4 = -(x**6) / math.factorial(6)
cos_x = float(1 + prop_2 + prop_3 + prop_4)
print(prop_2)
print(prop_3)
print(prop_4)
print(cos_x)
| 14.470588 | 43 | 0.646341 |
bc41a5fa588f792a592b96d3c6500dbf29045ec5
| 3,211 |
py
|
Python
|
test/datagateway_api/icat/filters/test_where_filter.py
|
MRichards99/datagateway-api
|
2e6133636fed950a16190d2f703f152c73bb5b1b
|
[
"Apache-2.0"
] | 2 |
2022-02-10T17:47:53.000Z
|
2022-02-10T19:04:02.000Z
|
test/datagateway_api/icat/filters/test_where_filter.py
|
MRichards99/datagateway-api
|
2e6133636fed950a16190d2f703f152c73bb5b1b
|
[
"Apache-2.0"
] | 183 |
2020-12-02T11:34:18.000Z
|
2022-03-29T15:19:23.000Z
|
test/datagateway_api/icat/filters/test_where_filter.py
|
MRichards99/datagateway-api
|
2e6133636fed950a16190d2f703f152c73bb5b1b
|
[
"Apache-2.0"
] | 7 |
2021-04-13T17:26:05.000Z
|
2021-11-22T14:24:24.000Z
|
import pytest
from datagateway_api.src.common.exceptions import BadRequestError, FilterError
from datagateway_api.src.datagateway_api.filter_order_handler import FilterOrderHandler
from datagateway_api.src.datagateway_api.icat.filters import PythonICATWhereFilter
| 43.391892 | 87 | 0.617253 |
bc43defd49d4ea43585c8d3910e9622ef8bc8d38
| 1,099 |
py
|
Python
|
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
scrapy/spider/spider/items.py
|
huobingli/splider
|
a62f0553160531a0735b249b0dc49747e9c821f9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst
# class SpiderItem(scrapy.Item):
# # define the fields for your item here like:
# # name = scrapy.Field()
# pass
#
#
#
# class TorrentItem(scrapy.Item):
# url = scrapy.Field()
# name = scrapy.Field()
# description = scrapy.Field()
# size = scrapy.Field()
#
# import scrapy
| 24.977273 | 52 | 0.66424 |
bc441b2065c8199c0dd4d1448231c084f1b1cfa3
| 7,160 |
py
|
Python
|
codetools/contexts/multi_context.py
|
enthought/codetools
|
20d8bb1eba68145750a1b689655b839078121474
|
[
"BSD-3-Clause"
] | 29 |
2015-08-10T20:25:00.000Z
|
2021-11-30T23:34:24.000Z
|
codetools/contexts/multi_context.py
|
enthought/codetools
|
20d8bb1eba68145750a1b689655b839078121474
|
[
"BSD-3-Clause"
] | 40 |
2015-01-05T15:01:37.000Z
|
2022-03-11T13:47:06.000Z
|
codetools/contexts/multi_context.py
|
enthought/codetools
|
20d8bb1eba68145750a1b689655b839078121474
|
[
"BSD-3-Clause"
] | 4 |
2015-04-14T10:06:26.000Z
|
2021-01-19T16:46:48.000Z
|
#
# (C) Copyright 2013 Enthought, Inc., Austin, TX
# All right reserved.
#
# This file is open source software distributed according to the terms in
# LICENSE.txt
#
""" Context holding multiple subcontexts.
"""
from __future__ import absolute_import
from itertools import chain
from collections import MutableMapping as DictMixin
from traits.api import (Bool, List, Str, Undefined, Supports,
adapt, provides, on_trait_change)
from .data_context import DataContext, ListenableMixin, PersistableMixin
from .i_context import ICheckpointable, IDataContext, IRestrictedContext
from .utils import safe_repr
| 30.468085 | 85 | 0.557682 |
bc447d214c0f2c389991fd5918f6f13fed4aaf6b
| 634 |
py
|
Python
|
line_counter/TestCodes/python_test.py
|
FMoller/coding-auxiliary-tools
|
21784f01731404f33059f3a8c4e73a104709ffe9
|
[
"MIT"
] | null | null | null |
line_counter/TestCodes/python_test.py
|
FMoller/coding-auxiliary-tools
|
21784f01731404f33059f3a8c4e73a104709ffe9
|
[
"MIT"
] | null | null | null |
line_counter/TestCodes/python_test.py
|
FMoller/coding-auxiliary-tools
|
21784f01731404f33059f3a8c4e73a104709ffe9
|
[
"MIT"
] | null | null | null |
"""A simple file to test the line_counter performance in python
This is a multiline doctest
"""
__author__ = "Frederico Moeller"
__copyright__ = ""
__credits__ = ["Frederico Moeller"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Frederico Moeller"
__email__ = ""
__status__ = ""
#import things
import math
#define things
def some_function(var_one, var_two,
var_three):
"""This is a function that do things"""
if var_one > var_two:
if var_two*var_three > var_one:
return "blab" #this happens
else:
return "blob"
else:
return "fish"
| 21.133333 | 63 | 0.641956 |
bc44b8524b66c7a720d547f156846ae7572f5832
| 4,602 |
py
|
Python
|
causal_attribution/data.py
|
VaniW/deconfounded-lexicon-induction
|
419ecf717f51cfd1741732ca3191b36b565bd1a4
|
[
"MIT"
] | 25 |
2020-11-03T16:38:51.000Z
|
2022-03-28T11:53:08.000Z
|
causal_attribution/data.py
|
VaniW/deconfounded-lexicon-induction
|
419ecf717f51cfd1741732ca3191b36b565bd1a4
|
[
"MIT"
] | 1 |
2019-12-15T08:33:47.000Z
|
2019-12-16T17:33:15.000Z
|
causal_attribution/data.py
|
VaniW/deconfounded-lexicon-induction
|
419ecf717f51cfd1741732ca3191b36b565bd1a4
|
[
"MIT"
] | 7 |
2021-05-03T01:01:28.000Z
|
2022-02-19T04:06:20.000Z
|
"""Data pipelines."""
from collections import defaultdict, OrderedDict
from tqdm import tqdm
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
import torch
def get_info(examples, vocab=None, max_seq_len=256):
"""Gathers info on and creats a featurized example generator for a list of raw examples.
Args:
examples: list(list, float, or string). Examples to create generator for.
vocab: list(str). A vocabulary for discrete datatypes (e.g. text or categorical).
max_seq_len: int. maximum sequence length for text examples.
Returns:
A dict of info about this variable as well as a generator over featurized examples.
"""
assert isinstance(examples, list), 'examples must be list; got ' + str(type(examples))
assert len(examples) > 0, 'Empty example list!'
# Text
if isinstance(examples[0], list):
assert vocab is not None, 'ERROR: must provide a vocab.'
example_type = 'input'
vocab = ['UNK', 'PAD'] + vocab
tok2id = {tok: i for i, tok in enumerate(vocab)}
ngrams = max(len(x.split()) for x in vocab)
unk_id = 0
# Continuous
elif isinstance(examples[0], float) or isinstance(examples[0], int):
example_type = 'continuous'
vocab = ['N/A']
if isinstance(examples[0], int):
featurizer = lambda ex: float(ex)
else:
featurizer = lambda ex: ex
# Categorical
elif isinstance(examples[0], str):
example_type = 'categorical'
if not vocab:
vocab = ['UNK'] + sorted(list(set(examples)))
tok2id = {tok: i for i, tok in enumerate(vocab)}
featurizer = lambda ex: tok2id.get(ex, 0) # 0 is the unk id.
else:
print("ERROR: unrecognized example type: ", examples[0])
quit()
return featurizer, example_type, vocab
| 35.674419 | 93 | 0.612994 |
bc44f25c8ff96beccbbd3fbaa05ae2dcf6790cc6
| 576 |
py
|
Python
|
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
fopp/Chapter 12. Functions/get_num_digits.py
|
H2u-Hwng/EVC
|
c650fe7356a333011514cf9025dfd97bf71b1de3
|
[
"MIT"
] | null | null | null |
# Take number, and convert integer to string
# Calculate and return number of digits
# Define main function
# Call main function
main()
| 20.571429 | 70 | 0.647569 |
bc450f5f688b95fda7b269a4ca568c7ecc5143ca
| 4,992 |
py
|
Python
|
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
whois/__init__.py
|
mzpqnxow/whois-1
|
b5623ed25cfa58d9457d30dae640e69b9e530b23
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import *
import re
import sys
import os
import subprocess
import socket
from .parser import WhoisEntry
from .whois import NICClient
# thanks to https://www.regextester.com/104038
IPV4_OR_V6 = re.compile(r"((^\s*((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))\s*$)|(^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$))")
suffixes = None
def extract_domain(url):
"""Extract the domain from the given URL
>>> print(extract_domain('http://www.google.com.au/tos.html'))
google.com.au
>>> print(extract_domain('abc.def.com'))
def.com
>>> print(extract_domain(u'www..hk'))
.hk
>>> print(extract_domain('chambagri.fr'))
chambagri.fr
>>> print(extract_domain('www.webscraping.com'))
webscraping.com
>>> print(extract_domain('198.252.206.140'))
stackoverflow.com
>>> print(extract_domain('102.112.2O7.net'))
2o7.net
>>> print(extract_domain('globoesporte.globo.com'))
globo.com
>>> print(extract_domain('1-0-1-1-1-0-1-1-1-1-1-1-1-.0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info'))
0-0-0-0-0-0-0-0-0-0-0-0-0-10-0-0-0-0-0-0-0-0-0-0-0-0-0.info
>>> print(extract_domain('2607:f8b0:4006:802::200e'))
1e100.net
>>> print(extract_domain('172.217.3.110'))
1e100.net
"""
if IPV4_OR_V6.match(url):
# this is an IP address
return socket.gethostbyaddr(url)[0]
# load known TLD suffixes
global suffixes
if not suffixes:
# downloaded from https://publicsuffix.org/list/public_suffix_list.dat
tlds_path = os.path.join(os.getcwd(), os.path.dirname(__file__), 'data', 'public_suffix_list.dat')
with open(tlds_path, encoding='utf-8') as tlds_fp:
suffixes = set(line.encode('utf-8') for line in tlds_fp.read().splitlines() if line and not line.startswith('//'))
if not isinstance(url, str):
url = url.decode('utf-8')
url = re.sub('^.*://', '', url)
url = url.split('/')[0].lower()
# find the longest suffix match
domain = b''
split_url = url.split('.')
for section in reversed(split_url):
if domain:
domain = b'.' + domain
domain = section.encode('utf-8') + domain
if domain not in suffixes:
if not b'.' in domain and len(split_url) >= 2:
# If this is the first section and there wasn't a match, try to
# match the first two sections - if that works, keep going
# See https://github.com/richardpenman/whois/issues/50
second_order_tld = '.'.join([split_url[-2], split_url[-1]])
if not second_order_tld.encode('utf-8') in suffixes:
break
else:
break
return domain.decode('utf-8')
if __name__ == '__main__':
try:
url = sys.argv[1]
except IndexError:
print('Usage: %s url' % sys.argv[0])
else:
print(whois(url))
| 42.305085 | 1,227 | 0.55629 |
bc45c15aebfb0da618b90f3884eb8a545e0f2823
| 3,255 |
py
|
Python
|
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | 1 |
2021-03-18T15:35:15.000Z
|
2021-03-18T15:35:15.000Z
|
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | null | null | null |
app/dialog/avatar_picture_dialog.py
|
tirinox/alphavatarbot
|
5adac8c9c4534206eaf6c146f6e194ed5951d055
|
[
"MIT"
] | 1 |
2021-03-18T15:35:51.000Z
|
2021-03-18T15:35:51.000Z
|
import asyncio
from contextlib import AsyncExitStack
from aiogram.dispatcher.filters.state import StatesGroup, State
from aiogram.dispatcher.storage import FSMContextProxy
from aiogram.types import Message, PhotoSize, ReplyKeyboardRemove, ContentTypes
from aiogram.utils.helper import HelperMode
from dialog.avatar_image_work import download_tg_photo, get_userpic, combine_frame_and_photo_v2, img_to_bio
from dialog.base import BaseDialog, message_handler
from localization import BaseLocalization
from lib.depcont import DepContainer
from lib.texts import kbd
# todo: accept documents!
| 39.695122 | 119 | 0.677112 |
bc498b7d39a14ae7cd3ad1e6341af40bb6279e72
| 5,144 |
py
|
Python
|
image_store_processing.py
|
olubiyiontheweb/digid_websearch_flask
|
181107eaa60faff9429b754236406eed56e3c1ec
|
[
"MIT"
] | 1 |
2021-12-15T18:56:05.000Z
|
2021-12-15T18:56:05.000Z
|
image_store_processing.py
|
olubiyiontheweb/similar_image_websearch
|
ddb79a3e627c1143ff7f64e6d82f0d8b9dcd8047
|
[
"MIT"
] | null | null | null |
image_store_processing.py
|
olubiyiontheweb/similar_image_websearch
|
ddb79a3e627c1143ff7f64e6d82f0d8b9dcd8047
|
[
"MIT"
] | null | null | null |
from skimage.metrics import structural_similarity as ssim
from glob import glob
from PIL import Image
import numpy as np
import ntpath
import dhash
import cv2
from database_structure import database_migrations
IMAGE_FOLDER = "./image_store"
ALLOWED_EXTENSIONS = ['png', 'jpg', 'jpeg']
image_store_hash = dict()
db_ops = database_migrations()
| 36.742857 | 79 | 0.631221 |
bc49a143fb9688648101a0602142d480263709b3
| 8,823 |
py
|
Python
|
cogs/jpserv.py
|
elthorito/Rai
|
a6f05567a0d4ed98a09676e507c478a27630bf1c
|
[
"MIT"
] | null | null | null |
cogs/jpserv.py
|
elthorito/Rai
|
a6f05567a0d4ed98a09676e507c478a27630bf1c
|
[
"MIT"
] | null | null | null |
cogs/jpserv.py
|
elthorito/Rai
|
a6f05567a0d4ed98a09676e507c478a27630bf1c
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import os
import json
from datetime import date, datetime, timedelta
from .utils import helper_functions as hf
from copy import deepcopy
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))).replace('\\', '/')
| 45.715026 | 120 | 0.572821 |
bc4baf04ed5a5ebda75a1b19ad254a0f725f6190
| 2,027 |
py
|
Python
|
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
nehebn2.py
|
psifertex/nehebn2
|
8b62a88a9d06624dbb62b8b74cc0566172fba970
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from components import ProgramState
import binaryninja as binja
import argparse
import os.path
import curses
# TODO...impliment live-refreashing the settings.json during run (add the keybinding and check for it here in the global input loop)
# TODO...support multi-key presses? Not sure if this already works or not
# TODO...make sure to support small terminals (I think it does right now, but I should add some more checks so nothing goes out of bounds)
if __name__ == "__main__":
background = "2a2a2a"
text = "e0e0e0"
curses.wrapper(main)
| 28.957143 | 138 | 0.644795 |
bc4d5b7bde1ce5d45b97c67684a8f6c61429eb5b
| 5,144 |
py
|
Python
|
keras/layers/pooling/base_pooling3d.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
keras/layers/pooling/base_pooling3d.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
keras/layers/pooling/base_pooling3d.py
|
itsraina/keras
|
5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Private base class for pooling 3D layers."""
import tensorflow.compat.v2 as tf
from keras import backend
from keras.engine.base_layer import Layer
from keras.engine.input_spec import InputSpec
from keras.utils import conv_utils
| 37.823529 | 80 | 0.623639 |
bc4d689703a555cde99de572dc764b14b5f45f70
| 726 |
py
|
Python
|
main.py
|
lucastan96/video2bitmap
|
8a54f33af92b5088d29322abf936a6ce2ecc0ac4
|
[
"MIT"
] | 1 |
2020-12-30T00:57:38.000Z
|
2020-12-30T00:57:38.000Z
|
main.py
|
lucastan96/video2bitmap
|
8a54f33af92b5088d29322abf936a6ce2ecc0ac4
|
[
"MIT"
] | null | null | null |
main.py
|
lucastan96/video2bitmap
|
8a54f33af92b5088d29322abf936a6ce2ecc0ac4
|
[
"MIT"
] | null | null | null |
import moviepy.editor as mpy
import moviepy.video.fx.all as vfx
import subprocess as sp
# Crop and resize video
clip = mpy.VideoFileClip("smoke.mp4")
(w, h) = clip.size
cropped_clip = vfx.crop(clip, width=(h/128)*64, height=h, x1=w/4*3-100, y1=0).resize((64, 128))
cropped_clip.write_videofile('smoke-cropped.mp4')
# Convert video to frames
# Make sure to install ffmpeg on machine
cmd='ffmpeg -i /path/to/smoke-cropped.mp4 /path/to/frames_temp/%d.bmp'
sp.call(cmd,shell=True)
# Convert image to black and white bitmap
for i in range(202):
col = Image.open("frames_temp/" + str(i + 1) + ".bmp")
gray = col.convert('L')
bw = gray.point(lambda x: 0 if x<128 else 255, '1')
bw.save("frames/" + str(i) + ".bmp")
| 34.571429 | 95 | 0.688705 |
bc4dbd4536189e5c83cb95261570c971ee7df77f
| 336 |
py
|
Python
|
ghostwriter/rolodex/apps.py
|
bbhunter/Ghostwriter
|
1b684ddd119feed9891e83b39c9b314b41d086ca
|
[
"BSD-3-Clause"
] | 601 |
2019-07-30T17:06:37.000Z
|
2022-03-31T00:55:31.000Z
|
ghostwriter/rolodex/apps.py
|
chrismaddalena/Ghostwriter
|
5a938358450cd0e69a42883b1b18e067644744a8
|
[
"BSD-3-Clause"
] | 150 |
2019-08-01T07:20:22.000Z
|
2022-03-29T19:18:02.000Z
|
ghostwriter/rolodex/apps.py
|
chrismaddalena/Ghostwriter
|
5a938358450cd0e69a42883b1b18e067644744a8
|
[
"BSD-3-Clause"
] | 126 |
2019-07-30T17:42:49.000Z
|
2022-03-21T20:43:35.000Z
|
"""This contains the configuration of the Rolodex application."""
# Django Imports
from django.apps import AppConfig
| 22.4 | 70 | 0.672619 |
bc4f1db83b181105ad1b030028d4a99321957de7
| 560 |
py
|
Python
|
boards/migrations/0024_boardpreferences_moderators.py
|
oscarsiles/jotlet
|
361f7ad0d32ea96d012020a67493931482207036
|
[
"BSD-3-Clause"
] | null | null | null |
boards/migrations/0024_boardpreferences_moderators.py
|
oscarsiles/jotlet
|
361f7ad0d32ea96d012020a67493931482207036
|
[
"BSD-3-Clause"
] | 2 |
2022-03-21T22:22:33.000Z
|
2022-03-28T22:18:33.000Z
|
boards/migrations/0024_boardpreferences_moderators.py
|
oscarsiles/jotlet
|
361f7ad0d32ea96d012020a67493931482207036
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 4.0.3 on 2022-03-01 14:42
from django.conf import settings
from django.db import migrations, models
| 26.666667 | 115 | 0.673214 |
bc4f5018d00b3586d20735c150c38e4b306f48f3
| 325 |
py
|
Python
|
models/minimize_model.py
|
MichalBusta/OpenCitiesAIC
|
2358118a782edde27a588d6adaf79941cbd90de6
|
[
"MIT"
] | 7 |
2020-03-23T21:43:32.000Z
|
2021-03-30T09:11:45.000Z
|
models/minimize_model.py
|
MichalBusta/OpenCitiesAIC
|
2358118a782edde27a588d6adaf79941cbd90de6
|
[
"MIT"
] | 4 |
2020-05-09T01:13:24.000Z
|
2022-01-13T02:24:14.000Z
|
models/minimize_model.py
|
MichalBusta/OpenCitiesAIC
|
2358118a782edde27a588d6adaf79941cbd90de6
|
[
"MIT"
] | 4 |
2020-04-17T15:06:36.000Z
|
2021-03-30T09:11:47.000Z
|
'''
Created on Mar 22, 2020
@author: Michal.Busta at gmail.com
'''
#get rid of the optimizer state ...
import torch
MODEL_PATH = '/models/model-b2-2.pth'
state = torch.load(MODEL_PATH, map_location=lambda storage, loc: storage)
state_out = {
"state_dict": state["state_dict"],
}
torch.save(state_out, 'model-b2-2.pth')
| 20.3125 | 73 | 0.707692 |
bc4fb0ed6bbdc4f3f5e43225548f14915b084779
| 1,125 |
py
|
Python
|
setup.py
|
thomas-kloeber/braumeister
|
1045df0ad95eb6a4b9b16bb91ece64b09ff1b1f7
|
[
"MIT"
] | 6 |
2018-02-09T15:03:12.000Z
|
2021-02-18T07:21:34.000Z
|
setup.py
|
thomas-kloeber/braumeister
|
1045df0ad95eb6a4b9b16bb91ece64b09ff1b1f7
|
[
"MIT"
] | 17 |
2018-03-20T09:28:32.000Z
|
2022-01-27T08:48:41.000Z
|
setup.py
|
thomas-kloeber/braumeister
|
1045df0ad95eb6a4b9b16bb91ece64b09ff1b1f7
|
[
"MIT"
] | 7 |
2018-02-09T15:06:11.000Z
|
2020-03-02T10:23:10.000Z
|
import os
import re
from setuptools import setup
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('braumeister/braumeister.py').read(),
re.M
).group(1)
setup(
name="braumeister",
packages=["braumeister", "braumeister.actions"],
version=version,
author="Marcel Steffen",
author_email="[email protected]",
description="Easy release bulding, combining JIRA and git",
long_description=read('README.md'),
license="MIT",
keywords="git jira release",
url="https://www.talentsconnect.com",
include_package_data=True,
install_requires=['requests', 'colorama'],
entry_points={
'console_scripts': ["braumeister = braumeister.braumeister:main"]
},
python_requires='!=2.7, !=3.4, >=3.5',
zip_safe=False,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"Topic :: Utilities",
"Topic :: Software Development :: Version Control :: Git"
],
)
| 26.162791 | 73 | 0.639111 |
bc500a982cb78bb46e6aa705ee116eae36444405
| 3,493 |
py
|
Python
|
modules/inference.py
|
rubelchowdhury20/wuton-with-densepose
|
5485f1f311724d8f8b887d669a8b55c73849eb98
|
[
"MIT"
] | 12 |
2020-11-13T01:51:24.000Z
|
2022-03-17T03:14:27.000Z
|
modules/inference.py
|
rubelchowdhury20/wuton-with-densepose
|
5485f1f311724d8f8b887d669a8b55c73849eb98
|
[
"MIT"
] | 1 |
2021-10-12T06:10:22.000Z
|
2021-10-12T06:10:22.000Z
|
modules/inference.py
|
rubelchowdhury20/wuton-with-densepose
|
5485f1f311724d8f8b887d669a8b55c73849eb98
|
[
"MIT"
] | 2 |
2021-01-10T17:51:34.000Z
|
2022-03-02T10:53:11.000Z
|
# standard library imports
import os
# third party imports
import numpy as np
from PIL import Image
import torch.nn as nn
from torchvision import transforms
# local imports
import config
from . import utils
from . import geometric_transformer
| 41.094118 | 178 | 0.742056 |
bc50340e05b5a45da8fec5c4d61ac3cccc89e3f0
| 6,577 |
py
|
Python
|
imggen/fonts.py
|
p-lambda/unlabeled_outputs
|
18cda9e922591ec99d70caaa173abbb049ef274b
|
[
"MIT"
] | 4 |
2021-07-02T03:08:29.000Z
|
2022-03-12T07:13:13.000Z
|
imggen/fonts.py
|
p-lambda/unlabeled_outputs
|
18cda9e922591ec99d70caaa173abbb049ef274b
|
[
"MIT"
] | 1 |
2021-12-25T21:24:23.000Z
|
2021-12-25T21:24:23.000Z
|
imggen/fonts.py
|
p-lambda/unlabeled_outputs
|
18cda9e922591ec99d70caaa173abbb049ef274b
|
[
"MIT"
] | 1 |
2021-12-26T07:33:45.000Z
|
2021-12-26T07:33:45.000Z
|
from pathlib import Path
import h5py
import numpy as np
from torchvision.datasets.vision import VisionDataset
from PIL import Image
import requests
import zipfile
from tqdm import tqdm
| 35.939891 | 149 | 0.614414 |
bc511f5404ed81ec6c064f4f97b303375361769d
| 774 |
py
|
Python
|
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | 1 |
2019-08-28T23:15:25.000Z
|
2019-08-28T23:15:25.000Z
|
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
leetcode/47.py
|
windniw/just-for-fun
|
54e5c2be145f3848811bfd127f6a89545e921570
|
[
"Apache-2.0"
] | null | null | null |
"""
link: https://leetcode.com/problems/permutations-ii
problem: nums
solution: 46
"""
| 28.666667 | 73 | 0.529716 |
bc535e8c70a7ae7d8c05a67decf44c291034483f
| 2,406 |
py
|
Python
|
adminmgr/media/code/A3/task3/T1_ocefXVJ.py
|
IamMayankThakur/test-bigdata
|
cef633eb394419b955bdce479699d0115d8f99c3
|
[
"Apache-2.0"
] | 9 |
2019-11-08T02:05:27.000Z
|
2021-12-13T12:06:35.000Z
|
adminmgr/media/code/A3/task3/T1_ocefXVJ.py
|
IamMayankThakur/test-bigdata
|
cef633eb394419b955bdce479699d0115d8f99c3
|
[
"Apache-2.0"
] | 6 |
2019-11-27T03:23:16.000Z
|
2021-06-10T19:15:13.000Z
|
adminmgr/media/code/A3/task3/T1_ocefXVJ.py
|
IamMayankThakur/test-bigdata
|
cef633eb394419b955bdce479699d0115d8f99c3
|
[
"Apache-2.0"
] | 4 |
2019-11-26T17:04:27.000Z
|
2021-12-13T11:57:03.000Z
|
import findspark
findspark.init()
from pyspark import SparkConf,SparkContext
from pyspark.streaming import StreamingContext
from pyspark.sql import Row,SQLContext
import sys
import requests
conf=SparkConf()
conf.setAppName("BigData")
sc=SparkContext(conf=conf)
ssc=StreamingContext(sc,int(sys.argv[1]))
ssc.checkpoint("/checkpoint_BIGDATA")
'''
#Selecting a window :
#outpu3:
inputStream=ssc.socketTextStream("localhost",9009)
dataStream = inputStream.window(int(sys.argv[1]),int(sys.argv[2]))
tweet=dataStream.map(tmp)
septweet=tweet.flatMap(forf)
count=septweet.reduceByKey(lambda x,y:x+y)
sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False))
tweet1=sortcount.filter(lambda w:w[0] is not '')
tweet1.pprint()
res = tweet1.map(lambda a : a[0])
res.foreachRDD(topprint)
#res.pprint(3)
'''
'''
#Selecting a datastream and then reducing by window:
#outpu2
dataStream=ssc.socketTextStream("localhost",9009)
tweet=dataStream.map(tmp)
septweet=tweet.flatMap(forf)
#septweet.pprint()
count=septweet.reduceByKeyAndWindow(lambda x,y:x+y,int(sys.argv[1]),int(sys.argv[2]))
sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[0],ascending=True))
sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False))
tweet1=sortcount.filter(lambda w:w[0] is not '')
#tweet1.pprint()
res = tweet1.map(lambda a : a[0])
res.foreachRDD(topprint)
'''
#Try in outpu1
inputStream=ssc.socketTextStream("localhost",9009)
dataStream = inputStream.window(int(sys.argv[2]),int(sys.argv[1]))
tweet=dataStream.map(tmp)
septweet=tweet.flatMap(forf)
count=septweet.reduceByKey(lambda x,y:x+y)
sortcount = count.transform(lambda rdd :rdd.sortBy(lambda a:a[0],ascending=True))
sortcount = sortcount.transform(lambda rdd :rdd.sortBy(lambda a:a[1],ascending=False))
tweet1=sortcount.filter(lambda w:w[0] is not '')
#tweet1.pprint()
res = tweet1.map(lambda a : a[0])
res.foreachRDD(topprint)
#TO maintain state
# totalcount=tweet.updateStateByKey(aggregate_tweets_count)
# totalcount.pprint()
#To Perform operation on each RDD
# totalcount.foreachRDD(process_rdd)
ssc.start()
ssc.awaitTermination(25)
ssc.stop()
| 24.804124 | 86 | 0.741895 |
bc53c586e44516a506fdeff0f180d92c8730dd5b
| 908 |
py
|
Python
|
courses/migrations/0003_alter_content_options_alter_module_options_and_more.py
|
antonnifo/E-Soma
|
93d49b27dedbff58d19f8245a79693762fc819d5
|
[
"MIT"
] | 1 |
2022-02-09T06:28:04.000Z
|
2022-02-09T06:28:04.000Z
|
courses/migrations/0003_alter_content_options_alter_module_options_and_more.py
|
antonnifo/E-Soma
|
93d49b27dedbff58d19f8245a79693762fc819d5
|
[
"MIT"
] | null | null | null |
courses/migrations/0003_alter_content_options_alter_module_options_and_more.py
|
antonnifo/E-Soma
|
93d49b27dedbff58d19f8245a79693762fc819d5
|
[
"MIT"
] | 1 |
2022-02-09T06:29:11.000Z
|
2022-02-09T06:29:11.000Z
|
# Generated by Django 4.0.1 on 2022-01-20 13:10
import courses.fields
from django.db import migrations
| 25.942857 | 67 | 0.562775 |
bc5478846dead2384e17349d8f75968c543992de
| 407 |
py
|
Python
|
pkg/maths/maths.py
|
prateekdegaons1991/experiment-loadtest
|
b53c70fac5b2f7d37df77844b26f79741c74c1b6
|
[
"Apache-2.0"
] | 8 |
2020-04-17T06:34:30.000Z
|
2021-12-18T10:54:50.000Z
|
pkg/maths/maths.py
|
oumkale/test-python
|
1f3d3e42ffbe1bf5ed9df8a0c6038e50129b2c4d
|
[
"Apache-2.0"
] | 15 |
2020-04-18T06:01:53.000Z
|
2022-02-15T08:56:25.000Z
|
pkg/maths/maths.py
|
oumkale/test-python
|
1f3d3e42ffbe1bf5ed9df8a0c6038e50129b2c4d
|
[
"Apache-2.0"
] | 12 |
2020-04-17T05:14:27.000Z
|
2022-03-29T19:24:20.000Z
|
#Atoi stands for ASCII to Integer Conversion
#Adjustment contains rule of three for calculating an integer given another integer representing a percentage
| 27.133333 | 109 | 0.673219 |
bc562cc6c9b35189e9adc0f9ba37a99ec2138c03
| 3,672 |
py
|
Python
|
google_compute_engine/config_manager.py
|
redoxdrh/GCP-Flask
|
34af307df541edca4eee58b1d8be64888550a674
|
[
"Apache-2.0"
] | 2 |
2017-05-04T08:05:29.000Z
|
2019-02-08T21:36:11.000Z
|
google_compute_engine/config_manager.py
|
redoxdrh/GCP-Flask
|
34af307df541edca4eee58b1d8be64888550a674
|
[
"Apache-2.0"
] | null | null | null |
google_compute_engine/config_manager.py
|
redoxdrh/GCP-Flask
|
34af307df541edca4eee58b1d8be64888550a674
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library for retrieving and modifying configuration settings."""
import os
import textwrap
from google_compute_engine import file_utils
from google_compute_engine.compat import parser
CONFIG = '/etc/default/instance_configs.cfg'
| 33.381818 | 78 | 0.699891 |
bc56aa53a834b83d16f942b242b5f67998363eda
| 22,135 |
py
|
Python
|
mscreen/autodocktools_prepare_py3k/mglutil/web/services/AppService_services.py
|
e-mayo/mscreen
|
a50f0b2f7104007c730baa51b4ec65c891008c47
|
[
"MIT"
] | 9 |
2021-03-06T04:24:28.000Z
|
2022-01-03T09:53:07.000Z
|
mglutil/web/services/AppService_services.py
|
e-mayo/autodocktools-prepare-py3k
|
2dd2316837bcb7c19384294443b2855e5ccd3e01
|
[
"BSD-3-Clause"
] | 3 |
2021-03-07T05:37:16.000Z
|
2021-09-19T15:06:54.000Z
|
mglutil/web/services/AppService_services.py
|
e-mayo/autodocktools-prepare-py3k
|
2dd2316837bcb7c19384294443b2855e5ccd3e01
|
[
"BSD-3-Clause"
] | 4 |
2019-08-28T23:11:39.000Z
|
2021-11-27T08:43:36.000Z
|
##################################################
# ./AppService_services.py
# generated by ZSI.wsdl2python
#
#
##################################################
from .AppService_services_types import *
from .AppService_services_types import \
nbcr_sdsc_edu_opal_types as ns1
import urllib.parse, types
from ZSI.TCcompound import Struct
from ZSI import client
import ZSI
| 41.296642 | 136 | 0.651096 |
bc56ca67cc1e81684bbce0d45386183e51cffb90
| 10,340 |
py
|
Python
|
examples/pytorch/swin/checkpoint_quantization.py
|
hieuhoang/FasterTransformer
|
440695ccac874574b1d2e1121788e8fa674b4381
|
[
"Apache-2.0"
] | null | null | null |
examples/pytorch/swin/checkpoint_quantization.py
|
hieuhoang/FasterTransformer
|
440695ccac874574b1d2e1121788e8fa674b4381
|
[
"Apache-2.0"
] | null | null | null |
examples/pytorch/swin/checkpoint_quantization.py
|
hieuhoang/FasterTransformer
|
440695ccac874574b1d2e1121788e8fa674b4381
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import argparse
import re
import numpy as np
import torch
ACTIVATION_AMAX_NUM = 72
INT8O_KERNEL_NUM = 5
INT8O_GEMM_NUM = 7
TRT_FUSED_MHA_AMAX_NUM = 3
SCALE_RESERVE_NUM = 8
if __name__ == '__main__':
weights = torch.load('pytorch_model.bin')
extract_amaxlist(weights, [2, 2, 6, 2])
| 47.214612 | 137 | 0.562186 |
bc57b1f771495cf5ea069e99b2859a0f3795d393
| 6,608 |
py
|
Python
|
mars/deploy/kubernetes/core.py
|
tomzhang/mars-1
|
6f1d85e37eb1b383251314cb0ba13e06288af03d
|
[
"Apache-2.0"
] | null | null | null |
mars/deploy/kubernetes/core.py
|
tomzhang/mars-1
|
6f1d85e37eb1b383251314cb0ba13e06288af03d
|
[
"Apache-2.0"
] | null | null | null |
mars/deploy/kubernetes/core.py
|
tomzhang/mars-1
|
6f1d85e37eb1b383251314cb0ba13e06288af03d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import random
import time
from ...actors import new_client, FunctionActor
logger = logging.getLogger(__name__)
| 39.333333 | 100 | 0.628783 |
bc58243dff3b67ec29b9366a2531008a83301c24
| 767 |
py
|
Python
|
tests/tests_model/tests_bert_model.py
|
elangovana/gene_normalisation
|
9152298e951cd968ee516815c7fa11f1ceabca51
|
[
"MIT"
] | 1 |
2020-10-21T06:01:28.000Z
|
2020-10-21T06:01:28.000Z
|
tests/tests_model/tests_bert_model.py
|
elangovana/gene_normalisation
|
9152298e951cd968ee516815c7fa11f1ceabca51
|
[
"MIT"
] | null | null | null |
tests/tests_model/tests_bert_model.py
|
elangovana/gene_normalisation
|
9152298e951cd968ee516815c7fa11f1ceabca51
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import torch
import transformers
from model.bert_model import BertModel
| 26.448276 | 144 | 0.688396 |
bc583d8b5318b12422c378e8c294b322b7118447
| 1,593 |
py
|
Python
|
tests/renderer_test.py
|
tmcclintock/PyDonJuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | 2 |
2020-12-14T20:50:57.000Z
|
2021-05-26T04:32:24.000Z
|
tests/renderer_test.py
|
tmcclintock/PyDonJuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | 29 |
2020-12-18T15:56:14.000Z
|
2021-01-12T01:17:48.000Z
|
tests/renderer_test.py
|
tmcclintock/donjuan
|
ab6d567b568c3e0dd976b10c2628ad99ca81b953
|
[
"CC0-1.0"
] | null | null | null |
import json
import os
import tempfile
from unittest import TestCase
import pytest
from donjuan import Dungeon, DungeonRandomizer, Renderer
| 28.963636 | 68 | 0.622724 |
bc5a20c1be48c7dd2648cc88a86c05d54e4b6c1c
| 612 |
py
|
Python
|
src/foremast/validate.py
|
dnava013/foremast
|
9849821b5bb3cd67b438c5adeaa0e42f86e9eaf8
|
[
"Apache-2.0"
] | 157 |
2016-09-12T16:24:14.000Z
|
2018-06-02T15:40:38.000Z
|
src/foremast/validate.py
|
dnava013/foremast
|
9849821b5bb3cd67b438c5adeaa0e42f86e9eaf8
|
[
"Apache-2.0"
] | 206 |
2016-09-12T16:41:31.000Z
|
2018-06-04T21:50:29.000Z
|
src/foremast/validate.py
|
dnava013/foremast
|
9849821b5bb3cd67b438c5adeaa0e42f86e9eaf8
|
[
"Apache-2.0"
] | 34 |
2016-09-12T16:37:57.000Z
|
2018-06-04T18:37:52.000Z
|
"""Spinnaker validate functions."""
import logging
from .consts import API_URL
from .utils.credentials import get_env_credential
LOG = logging.getLogger(__name__)
def validate_gate():
"""Check Gate connection."""
try:
credentials = get_env_credential()
LOG.debug('Found credentials: %s', credentials)
LOG.info('Gate working.')
except TypeError:
LOG.fatal('Gate connection not valid: API_URL = %s', API_URL)
def validate_all(args):
"""Run all validate steps."""
LOG.debug('Args: %s', args)
LOG.info('Running all validate steps.')
validate_gate()
| 23.538462 | 69 | 0.673203 |
bc5ad874ef55f36dd52e0759ceb93fea5f606b23
| 2,437 |
py
|
Python
|
constellation_forms/migrations/0001_initial.py
|
ConstellationApps/Forms
|
5d2bacf589c1a473cf619f34d569d33191b11285
|
[
"ISC"
] | 2 |
2017-04-18T02:41:00.000Z
|
2017-04-18T02:51:39.000Z
|
constellation_forms/migrations/0001_initial.py
|
ConstellationApps/Forms
|
5d2bacf589c1a473cf619f34d569d33191b11285
|
[
"ISC"
] | 33 |
2017-03-03T06:16:44.000Z
|
2019-08-20T23:06:21.000Z
|
constellation_forms/migrations/0001_initial.py
|
ConstellationApps/Forms
|
5d2bacf589c1a473cf619f34d569d33191b11285
|
[
"ISC"
] | 1 |
2017-02-22T18:48:04.000Z
|
2017-02-22T18:48:04.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-15 00:56
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
| 37.492308 | 142 | 0.552318 |
bc5ae8aa40e31ef7936556c55251ff7ab9886286
| 2,258 |
py
|
Python
|
src/webpy1/src/borough/dbsqli.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | 1 |
2020-02-17T08:18:29.000Z
|
2020-02-17T08:18:29.000Z
|
src/webpy1/src/borough/dbsqli.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
src/webpy1/src/borough/dbsqli.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sqlite3 as sqlite
import os.path as osp
import sys
| 27.204819 | 79 | 0.529672 |
bc5b677ed37d940fc02b036d43d53f7c6322c3f1
| 599 |
py
|
Python
|
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | 2 |
2020-07-07T00:03:31.000Z
|
2020-07-08T09:58:48.000Z
|
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | null | null | null |
losses/all_lost.py
|
Liudzz/loss-chapter
|
22359b92ca5e155d5af32ef2f22eeddf0483b947
|
[
"Apache-2.0"
] | 2 |
2020-07-08T09:58:56.000Z
|
2020-07-11T13:43:53.000Z
|
"""
easy way to use losses
"""
from center_loss import Centerloss
import torch.nn as nn
from FocalLoss import FocalLoss
| 22.185185 | 55 | 0.729549 |
bc5b680a2cd25d3fd6125ee9f9722bc8e692640b
| 7,320 |
py
|
Python
|
nova/tests/functional/test_metadata.py
|
Nexenta/nova
|
ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3
|
[
"Apache-2.0"
] | 1 |
2020-08-14T02:20:59.000Z
|
2020-08-14T02:20:59.000Z
|
nova/tests/functional/test_metadata.py
|
Nexenta/nova
|
ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3
|
[
"Apache-2.0"
] | 2 |
2021-03-31T20:04:16.000Z
|
2021-12-13T20:45:03.000Z
|
nova/tests/functional/test_metadata.py
|
Nexenta/nova
|
ccecb507ff4bdcdd23d90e7b5b02a22c5a46ecc3
|
[
"Apache-2.0"
] | 1 |
2020-07-24T02:31:45.000Z
|
2020-07-24T02:31:45.000Z
|
# Copyright 2016 Rackspace Australia
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import jsonschema
import os
import requests
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import fixtures as func_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
real_request = requests.request
| 37.927461 | 78 | 0.639617 |
bc5dd6bb126db54b8402ce56f75664e9271f9ace
| 8,889 |
py
|
Python
|
openue/sequence_labeling/subject_labeling_data_manager.py
|
zxlzr/OpenUE
|
a49f8950dc2b93a489bb8ce0d40abb26c2c0f347
|
[
"MIT"
] | 8 |
2020-01-08T13:05:35.000Z
|
2021-12-20T09:43:57.000Z
|
openue/sequence_labeling/subject_labeling_data_manager.py
|
zxlzr/OpenUE
|
a49f8950dc2b93a489bb8ce0d40abb26c2c0f347
|
[
"MIT"
] | 9 |
2020-09-25T22:36:51.000Z
|
2022-02-10T01:50:44.000Z
|
openue/sequence_labeling/subject_labeling_data_manager.py
|
zxlzr/OpenUE
|
a49f8950dc2b93a489bb8ce0d40abb26c2c0f347
|
[
"MIT"
] | null | null | null |
import os
import sys
import json
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../bert")))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
import tokenization
from config import config
if __name__=="__main__":
DATA_INPUT_DIR = config.data_dir
DATA_OUTPUT_DIR = "sequence_labeling_data"
Vocab_Path = config.bert_vocab_dir
General_Mode = False
model_data = Model_data_preparation(General_Mode = General_Mode,DATA_INPUT_DIR=DATA_INPUT_DIR, DATA_OUTPUT_DIR=DATA_OUTPUT_DIR,vocab_file_path=Vocab_Path)
model_data.separate_raw_data_and_token_labeling()
| 49.938202 | 186 | 0.582068 |
bc5ea97eac050b419965fd5ba95918dc58fe5bee
| 3,222 |
py
|
Python
|
clarifai/rest/grpc/custom_converters/custom_message_to_dict.py
|
Taik/clarifai-python
|
c3b66b84cb348d3cb1edff958f561a4734b78650
|
[
"Apache-2.0"
] | 322 |
2015-08-25T03:16:11.000Z
|
2021-11-08T09:36:50.000Z
|
clarifai/rest/grpc/custom_converters/custom_message_to_dict.py
|
Taik/clarifai-python
|
c3b66b84cb348d3cb1edff958f561a4734b78650
|
[
"Apache-2.0"
] | 76 |
2015-10-25T13:03:47.000Z
|
2022-02-19T09:36:10.000Z
|
clarifai/rest/grpc/custom_converters/custom_message_to_dict.py
|
Taik/clarifai-python
|
c3b66b84cb348d3cb1edff958f561a4734b78650
|
[
"Apache-2.0"
] | 136 |
2015-09-04T13:48:27.000Z
|
2021-06-12T16:48:36.000Z
|
import typing # noqa
from google.protobuf import descriptor
from google.protobuf.json_format import _IsMapEntry, _Printer
from google.protobuf.message import Message # noqa
from clarifai.rest.grpc.proto.clarifai.api.utils import extensions_pb2
| 36.202247 | 97 | 0.71198 |
bc5ea9e7a84513ea2108b53d14947d94915f3a05
| 26 |
py
|
Python
|
__init__.py
|
mschrimpf/CapsNetKeras
|
4c514860bf6689fb1772a7bd858638cd538ff22f
|
[
"MIT"
] | null | null | null |
__init__.py
|
mschrimpf/CapsNetKeras
|
4c514860bf6689fb1772a7bd858638cd538ff22f
|
[
"MIT"
] | null | null | null |
__init__.py
|
mschrimpf/CapsNetKeras
|
4c514860bf6689fb1772a7bd858638cd538ff22f
|
[
"MIT"
] | null | null | null |
from .capsulenet import *
| 13 | 25 | 0.769231 |
bc5eeaf616c5264490632d3b43d2af7080e1aea8
| 28,625 |
py
|
Python
|
gate/mate_ksx3267v2.py
|
mrchoi87/IRSOSv4
|
886c3dcbeb64c3a8cc257b58692946fd5462312e
|
[
"BSD-3-Clause"
] | null | null | null |
gate/mate_ksx3267v2.py
|
mrchoi87/IRSOSv4
|
886c3dcbeb64c3a8cc257b58692946fd5462312e
|
[
"BSD-3-Clause"
] | null | null | null |
gate/mate_ksx3267v2.py
|
mrchoi87/IRSOSv4
|
886c3dcbeb64c3a8cc257b58692946fd5462312e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 JiNong, Inc.
# All right reserved.
#
import struct
import time
import socket
import select
import traceback
import hashlib
import json
from enum import IntEnum
from threading import Thread, Lock
from mate import Mate, ThreadMate, DevType
from mblock import MBlock, BlkType, StatCode, ResCode, CmdCode, Observation, Request, Response, NotiCode, Notice
from pymodbus.client.sync import ModbusSerialClient
from pymodbus.client.sync import ModbusTcpClient
if __name__ == "__main__":
isnutri = False
opt = {
'conn' : [{
'method': 'rtu',
'port' : '/dev/ttyJND2',
'baudrate' : 9600,
'timeout': 5
}]
}
nutriinfo = [{
"id" : "1", "dk" : "", "dt": "gw", "children" : [{
"id" : "101", "dk" : '[1,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [
{"id" : "102", "dk" : '[1,40211,["control","status","area","alert","opid"],45001,["operation", "opid", "control","EC","pH", "start-area", "stop-area", "on-sec"]]', "dt": "nutrient-supply/level1"},
{"id" : "103", "dk" : '[1,40221,["value","status"]]', "dt": "sen"},
{"id" : "104", "dk" : '[1,40231,["value","status"]]', "dt": "sen"},
{"id" : "105", "dk" : '[1,40241,["value","status"]]', "dt": "sen"},
{"id" : "106", "dk" : '[1,40251,["value","status"]]', "dt": "sen"},
{"id" : "107", "dk" : '[1,40261,["value","status"]]', "dt": "sen"},
{"id" : "109", "dk" : '[1,40271,["value","status"]]', "dt": "sen"},
{"id" : "110", "dk" : '[1,40281,["value","status"]]', "dt": "sen"},
{"id" : "111", "dk" : '[1,40291,["value","status"]]', "dt": "sen"},
{"id" : "112", "dk" : '[1,40301,["value","status"]]', "dt": "sen"},
{"id" : "113", "dk" : '[1,40311,["value","status"]]', "dt": "sen"}
]}
]}
]
devinfo = [{
"id" : "1", "dk" : "JND2", "dt": "gw", "children" : [
# {
# "id" : "101", "dk" : '[1,201,["status"],301,["operation","opid"]]', "dt": "nd", "children" : [
#{"id" : "102", "dk" : '[1,210,["value","status"]]', "dt": "sen"},
#{"id" : "103", "dk" : '[1,220,["value","status"]]', "dt": "sen"}
# "id" : "101", "dk" : '[1,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [
#{"id" : "102", "dk" : '[1,41010,["value","status"]]', "dt": "sen"},
#{"id" : "103", "dk" : '[1,41020,["value","status"]]', "dt": "sen"}
# {"id" : "102", "dk" : '[1,40202,["value","status"]]', "dt": "sen"},
# {"id" : "103", "dk" : '[1,40205,["value","status"]]', "dt": "sen"},
#{"id" : "104", "dk" : '[1,40208,["value","status"]]', "dt": "sen"},
# {"id" : "105", "dk" : '[1,40211,["value","status"]]', "dt": "sen"},
#{"id" : "106", "dk" : '[1,40251,["value","status"]]', "dt": "sen"},
#{"id" : "107", "dk" : '[1,40261,["value","status"]]', "dt": "sen"},
#{"id" : "108", "dk" : '[1,40271,["value","status"]]', "dt": "sen"},
#{"id" : "109", "dk" : '[1,40281,["value","status"]]', "dt": "sen"},
#{"id" : "110", "dk" : '[1,40291,["value","status"]]', "dt": "sen"}
# ]
# }
]
}]
"""
}, {
"id" : "201", "dk" : '[2,40201,["status"],45001,["operation","opid"]]', "dt": "nd", "children" : [
{"id" : "202", "dk" : '[2,40202,["opid","status","state-hold-time","remain-time"],40206,["operation","opid","time"]]', "dt": "act/retractable/level1"},
{"id" : "202", "dk" : '[2,40209,["opid","status","state-hold-time","remain-time"],40213,["operation","opid","time"]]', "dt": "act/retractable/level1"},
{"id" : "203", "dk" : '[2,40216,["value","status"]]', "dt": "sen"},
{"id" : "204", "dk" : '[2,40219,["value","status"]]', "dt": "sen"},
#{"id" : "203", "dk" : (2,40221,["opid","status"],45021,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "204", "dk" : (2,40231,["opid","status"],45031,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "205", "dk" : (2,40241,["opid","status"],45041,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "206", "dk" : (2,40251,["opid","status"],45051,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "207", "dk" : (2,40261,["opid","status"],45061,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "208", "dk" : (2,40271,["opid","status"],45071,["operation","opid"]), "dt": "act/switch/level0"},
#{"id" : "209", "dk" : (2,40281,["opid","status"],45081,["operation","opid"]), "dt": "act/switch/level0"}
]
}, {
"id" : "301", "dk" : (3,40201,["opid","status"],45001,["operation","opid"]), "dt": "nd", "children" : [
{"id" : "302", "dk" : (3,40211,["opid","status"],45011,["operation","opid"]), "dt": "act/retractable/level0"},
{"id" : "303", "dk" : (3,40221,["opid","status"],45021,["operation","opid"]), "dt": "act/retractable/level0"},
{"id" : "304", "dk" : (3,40231,["opid","status"],45031,["operation","opid"]), "dt": "act/retractable/level0"},
{"id" : "305", "dk" : (3,40241,["opid","status"],45041,["operation","opid"]), "dt": "act/retractable/level0"}
]
}]
}]
"""
if isnutri:
kdmate = KSX3267MateV2(opt, nutriinfo, "1", None)
else:
kdmate = KSX3267MateV2(opt, devinfo, "1", None)
mate = Mate ({}, [], "1", None)
kdmate.start (mate.writeblk)
print "mate started"
time.sleep(10)
req = Request(None)
req.setcommand("1", CmdCode.DETECT_DEVICE, None)
print "=======================================#1"
kdmate.writeblk(req)
print "=======================================#1"
"""
time.sleep(1)
req = Request(None)
req.setcommand("1", CmdCode.CANCEL_DETECT, {})
print "=======================================#2"
kdmate.writeblk(req)
print "=======================================#2"
time.sleep(1)
req = Request(None)
req.setcommand("1", CmdCode.DETECT_DEVICE, None)
print "=======================================#3"
kdmate.writeblk(req)
print "=======================================#3"
time.sleep(1)
req = Request(None)
req.setcommand("1", CmdCode.CANCEL_DETECT, {})
print "=======================================#4"
kdmate.writeblk(req)
print "=======================================#4"
time.sleep(10)
req = Request(201)
req.setcommand(202, CmdCode.OPEN, {})
kdmate.writeblk(req)
time.sleep(5)
req = Request(201)
req.setcommand(202, CmdCode.OFF, {})
kdmate.writeblk(req)
time.sleep(10)
req = Request(201)
req.setcommand(202, CmdCode.TIMED_OPEN, {"time":10})
kdmate.writeblk(req)
time.sleep(15)
req = Request(201)
req.setcommand(202, CmdCode.TIMED_CLOSE, {"time":10})
kdmate.writeblk(req)
time.sleep(5)
req = Request(201)
req.setcommand(202, CmdCode.OFF, {})
kdmate.writeblk(req)
"""
time.sleep(30)
kdmate.stop()
print "mate stopped"
| 40.71835 | 212 | 0.516681 |
bc60aeeb26d899f8ba324554b05c50b567a13167
| 6,525 |
py
|
Python
|
CircleciScripts/run_integrationtests.py
|
aimalygin/aws-sdk-ios
|
6cfaa3c56296300499f4885e9039c2dd24624cfa
|
[
"Apache-2.0"
] | 17 |
2018-02-19T16:29:51.000Z
|
2020-04-03T13:52:52.000Z
|
CircleciScripts/run_integrationtests.py
|
aimalygin/aws-sdk-ios
|
6cfaa3c56296300499f4885e9039c2dd24624cfa
|
[
"Apache-2.0"
] | 2 |
2019-11-07T15:23:33.000Z
|
2020-03-12T18:46:47.000Z
|
CircleciScripts/run_integrationtests.py
|
aimalygin/aws-sdk-ios
|
6cfaa3c56296300499f4885e9039c2dd24624cfa
|
[
"Apache-2.0"
] | 10 |
2018-03-06T14:27:12.000Z
|
2020-10-20T22:01:30.000Z
|
import demjson
import sys
from subprocess import Popen, PIPE
import subprocess
import xml.etree.ElementTree as ET
import os
from datetime import datetime
from functions import runcommand
#from sets import Set
#run test
########################## main function ###############################
# a command will like
if (len(sys.argv) < 3 or sys.argv[1] == '-h' or sys.argv[1] == '-h') :
print("Usage: \r\n {0} <integrationTestsConfiguration json file path> <test result location> <group name>".format(sys.argv[0])) ;
exit(1)
jsonfilename=sys.argv[1]
test_result_folder=sys.argv[2]
group_name = sys.argv[3]
destination = sys.argv[4]
derivedDataPath = sys.argv[5]
with open(jsonfilename, 'r') as jsonfile:
jsonstring = jsonfile.read()
testConfigure = demjson.decode(jsonstring)
runningConfigure = testConfigure['runningConfigure']
projectName = runningConfigure['projectName']
projectPath = runningConfigure['projectPath']
schemeName = runningConfigure['schemeName']
sdkName = runningConfigure['sdkName']
print("group name:", group_name)
testgroup = testConfigure[group_name]
testlist = testgroup['test_list']
if 'projectName' in testgroup.keys() :
projectName = testgroup['projectName']
if 'projectPath' in testgroup.keys():
projectPath = testgroup['projectPath']
if 'schemeName' in testgroup.keys():
schemeName = testgroup['schemeName']
print("projectName, projectPath, schemeName, destination", projectName, projectPath, schemeName, destination)
# testcommandhead = f"xcodebuild test-without-building -project {projectName} -scheme {schemeName} -sdk {sdkName} -destination 'platform={paltformName},name={deviceName},OS={osVersion}'"
# testcommandtail = " | tee raw.log | xcpretty -r junit | tee xcpretty.log"
runcommand('echo "export testresult=0" >> $BASH_ENV')
testresult = 0
for testname in testlist:
print("-------------------------------", testname , "-------------------------------");
test = testlist[testname]
testarguments = ' -only-testing:' + testname
#create skipping tests parameters
skipingtests = ""
if 'excludetests' in test:
for skipingtest in test['excludetests']:
skipingtests += ' -skip-testing:' + testname+ "/" + skipingtest
print("excludetests:", skipingtests)
exit_code = runtest(testarguments + skipingtests, projectPath, schemeName, projectName, destination, derivedDataPath)
print(testname, "exit code:", exit_code)
# if test fails, check if the failed tests can be retried
if exit_code == 65:
retriabletimes = 3 ;
if 'retriabletimes' in test:
retriabletimes = test['retriabletimes']
if retriabletimes > 1:
#get all failed test cases
faileds = getfailedcases()
if len(faileds) == 0 :
print("test command return an error code, but the failed test cases is 0")
print("exit code:", exit_code)
break;
print("failed tests:",faileds)
retrytimes = 1
print('retriabletimes:', retriabletimes)
while retrytimes <= retriabletimes and exit_code > 0:
print("retry ", testname, "for ", retrytimes, " times")
testarguments = ""
for failed in faileds:
testarguments += ' -only-testing:' + failed
retrytimes += 1
exit_code = runtest(testarguments,projectPath, schemeName, projectName, destination, derivedDataPath);
print("retry exit code:", exit_code)
if(exit_code != 0 ):
faileds = getfailedcases()
if exit_code != 0 :
print("exit code:", exit_code)
runcommand('mkdir -p {0}/{1}'.format(test_result_folder,testname))
runcommand('echo "{2}" >> {0}/{1}/exitcode.log'.format(test_result_folder,testname,exit_code))
runcommand('mv raw.log {0}/{1}/raw.log'.format(test_result_folder,testname))
runcommand('mv xcpretty.log {0}/{1}/xcpretty.log'.format(test_result_folder,testname))
runcommand('cp build/reports/junit.xml {0}/{1}/junit.xml'.format(test_result_folder,testname))
ignorefailure = False ;
if exit_code == 65 :
failedtests = getfailedcases(False)
print("failedtests:", failedtests)
if 'ignoreFailures' in test and failedtests :
ignoreFailures = set(test['ignoreFailures'])
if failedtests.issubset(ignoreFailures):
print("There are failed testcases that can be ignored")
ignorefailure = True;
else :
print("Failed testcases that cannot be ignored: ", failedtests - ignoreFailures )
if not ignorefailure:
print("There are faillures in the test")
testresult = 1
else:
print("Test succeed")
print("testresult:", testresult)
runcommand('echo "export testresult={0}" >> $BASH_ENV'.format(testresult))
| 42.927632 | 222 | 0.632337 |
bc60d0aa50b7ae50518bf50520f50484fbc32b50
| 572 |
py
|
Python
|
seagrass/hooks/__init__.py
|
kernelmethod/Seagrass
|
52c5f1852fb2d52b3d94411c2a49c3da6fab6c6c
|
[
"BSD-2-Clause"
] | null | null | null |
seagrass/hooks/__init__.py
|
kernelmethod/Seagrass
|
52c5f1852fb2d52b3d94411c2a49c3da6fab6c6c
|
[
"BSD-2-Clause"
] | 21 |
2021-06-07T20:10:46.000Z
|
2021-07-07T22:14:25.000Z
|
seagrass/hooks/__init__.py
|
kernelmethod/Seagrass
|
52c5f1852fb2d52b3d94411c2a49c3da6fab6c6c
|
[
"BSD-2-Clause"
] | null | null | null |
# flake8: noqa: F401
from .context_manager_hook import ContextManagerHook
from .counter_hook import CounterHook
from .file_open_hook import FileOpenHook
from .logging_hook import LoggingHook
from .profiler_hook import ProfilerHook
from .runtime_audit_hook import RuntimeAuditHook
from .stack_trace_hook import StackTraceHook
from .timer_hook import TimerHook
from .tracing_hook import TracingHook
__all__ = [
"CounterHook",
"FileOpenHook",
"LoggingHook",
"ProfilerHook",
"StackTraceHook",
"RuntimeAuditHook",
"TimerHook",
"TracingHook",
]
| 26 | 52 | 0.783217 |
bc613b321946268bd365a901afc58d30c0ee1a72
| 3,549 |
py
|
Python
|
tests/test_internal.py
|
aschleg/hypy
|
d5b8451dcd24b803bbf2eebc46bc3acfd64d8edc
|
[
"MIT"
] | 40 |
2018-08-04T15:36:31.000Z
|
2022-03-12T02:06:28.000Z
|
tests/test_internal.py
|
aschleg/hypy
|
d5b8451dcd24b803bbf2eebc46bc3acfd64d8edc
|
[
"MIT"
] | 5 |
2018-06-17T12:44:59.000Z
|
2021-09-28T01:10:26.000Z
|
tests/test_internal.py
|
aschleg/hypy
|
d5b8451dcd24b803bbf2eebc46bc3acfd64d8edc
|
[
"MIT"
] | 8 |
2018-08-19T14:28:44.000Z
|
2021-12-20T18:52:07.000Z
|
import pytest
import numpy as np
import pandas as pd
from hypothetical._lib import _build_des_mat
| 39.876404 | 112 | 0.412511 |
bc633f72ddfead99679ba43f47af451833e0fa30
| 3,563 |
py
|
Python
|
download.py
|
JamesWang007/Open3D-PointNet
|
402847ceef8d364672ca7d81e0afebcb445cceb5
|
[
"MIT"
] | 120 |
2019-04-06T16:04:01.000Z
|
2021-07-22T17:07:51.000Z
|
test/Open3D-PointNet-master/download.py
|
AhsanulIslam/Thesis_Computer_Vision
|
c308cce15146a33a3e474790b0f9535ee9e41eb7
|
[
"MIT"
] | null | null | null |
test/Open3D-PointNet-master/download.py
|
AhsanulIslam/Thesis_Computer_Vision
|
c308cce15146a33a3e474790b0f9535ee9e41eb7
|
[
"MIT"
] | 25 |
2019-04-08T09:39:47.000Z
|
2021-05-12T15:39:56.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2017 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
"""Download big files from Google Drive."""
import shutil
import sys
import requests
import os
import time
import urllib.request
import zipfile
if __name__ == '__main__':
download_contents()
| 31.8125 | 115 | 0.646646 |
bc63b363d6718bb79d14c412bc96475ee3170b28
| 763 |
py
|
Python
|
ls12/demo5.py
|
cklwblove/python-100-days-source-code
|
5d66c7708047f0d7bac0ce05d21834bbbfa6ccf1
|
[
"MIT"
] | null | null | null |
ls12/demo5.py
|
cklwblove/python-100-days-source-code
|
5d66c7708047f0d7bac0ce05d21834bbbfa6ccf1
|
[
"MIT"
] | null | null | null |
ls12/demo5.py
|
cklwblove/python-100-days-source-code
|
5d66c7708047f0d7bac0ce05d21834bbbfa6ccf1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
"""
import time
import tkinter
import tkinter.messagebox
if __name__ == '__main__':
main()
| 20.078947 | 67 | 0.621232 |
bc652014fdf4755fbb2d576c8ff7469edba046ae
| 3,250 |
py
|
Python
|
hangupsbot/sinks/gitlab/simplepush.py
|
mygreentour/hangoutsbot
|
9ea2da10f546e6f1dd06c8240187049501c5452a
|
[
"Unlicense"
] | null | null | null |
hangupsbot/sinks/gitlab/simplepush.py
|
mygreentour/hangoutsbot
|
9ea2da10f546e6f1dd06c8240187049501c5452a
|
[
"Unlicense"
] | null | null | null |
hangupsbot/sinks/gitlab/simplepush.py
|
mygreentour/hangoutsbot
|
9ea2da10f546e6f1dd06c8240187049501c5452a
|
[
"Unlicense"
] | null | null | null |
"""
GitLab webhook receiver - see http://doc.gitlab.com/ee/web_hooks/web_hooks.html
"""
import asyncio
import json
import logging
from sinks.base_bot_request_handler import AsyncRequestHandler
logger = logging.getLogger(__name__)
try:
import dateutil.parser
except ImportError:
logger.error("missing module python_dateutil: pip3 install python_dateutil")
raise
| 37.790698 | 99 | 0.577538 |
bc6537e769f6b3ef7aa9a2e3afa098d8b075693f
| 1,220 |
py
|
Python
|
evsim/assessor.py
|
cbchoi/nppsim
|
4d096f9d2fdb5ebf3e3e83be7b1974bfc92554c1
|
[
"MIT"
] | 3 |
2020-01-21T13:06:37.000Z
|
2021-03-01T23:35:20.000Z
|
evsim/assessor.py
|
cbchoi/pohangsim
|
e978ff39ec94413ae44129510c56acb134770298
|
[
"MIT"
] | null | null | null |
evsim/assessor.py
|
cbchoi/pohangsim
|
e978ff39ec94413ae44129510c56acb134770298
|
[
"MIT"
] | null | null | null |
from evsim.system_simulator import SystemSimulator
from evsim.behavior_model_executor import BehaviorModelExecutor
from evsim.system_message import SysMessage
from evsim.definition import *
import os
import subprocess as sp
| 34.857143 | 126 | 0.657377 |
bc65af7557f0841ee2695968775683b6f5578bc6
| 19,786 |
py
|
Python
|
tei_entity_enricher/interface/postprocessing/gnd_connector.py
|
NEISSproject/TEIEntityEnricher
|
09a4a932b30886e50965959935dc803b36063e36
|
[
"Apache-2.0"
] | null | null | null |
tei_entity_enricher/interface/postprocessing/gnd_connector.py
|
NEISSproject/TEIEntityEnricher
|
09a4a932b30886e50965959935dc803b36063e36
|
[
"Apache-2.0"
] | null | null | null |
tei_entity_enricher/interface/postprocessing/gnd_connector.py
|
NEISSproject/TEIEntityEnricher
|
09a4a932b30886e50965959935dc803b36063e36
|
[
"Apache-2.0"
] | 1 |
2021-04-27T13:55:29.000Z
|
2021-04-27T13:55:29.000Z
|
import os
from typing import Union, List
from tei_entity_enricher.interface.postprocessing.io import FileReader, FileWriter
from tei_entity_enricher.util.helper import local_save_path, makedir_if_necessary
from tei_entity_enricher.util.exceptions import FileNotFound
| 58.712166 | 347 | 0.564237 |
bc669999ddb0624f8b1b18f8677a007603269fec
| 122 |
py
|
Python
|
neslter/workflow/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | 3 |
2019-01-24T16:32:50.000Z
|
2021-11-05T02:18:12.000Z
|
neslter/workflow/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | 45 |
2019-05-23T15:15:32.000Z
|
2022-03-15T14:09:20.000Z
|
neslter/workflow/__init__.py
|
WHOIGit/nes-lter-ims
|
d4cc96c10da56ca33286af84d669625b67170522
|
[
"MIT"
] | null | null | null |
import logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
logger.level = logging.DEBUG
| 24.4 | 40 | 0.819672 |
bc66bbff24da2cc4aab8ede584053c2dba3e5cf5
| 440 |
py
|
Python
|
inference/_archive/render_section.py
|
emitch/SEAMLeSS
|
cae21c67316ed36529fdc2e470a105a9f847975c
|
[
"MIT"
] | 4 |
2018-12-17T18:45:57.000Z
|
2021-04-29T16:30:42.000Z
|
inference/_archive/render_section.py
|
emitch/SEAMLeSS
|
cae21c67316ed36529fdc2e470a105a9f847975c
|
[
"MIT"
] | 19 |
2019-01-02T19:09:12.000Z
|
2020-12-14T18:50:47.000Z
|
inference/_archive/render_section.py
|
emitch/SEAMLeSS
|
cae21c67316ed36529fdc2e470a105a9f847975c
|
[
"MIT"
] | 2 |
2020-03-18T01:24:03.000Z
|
2022-01-06T06:19:58.000Z
|
from args import get_argparser, parse_args, get_aligner, get_bbox
if __name__ == '__main__':
parser = get_argparser()
args = parse_args(parser)
a = get_aligner(args)
bbox = get_bbox(args)
for z in range(args.bbox_start[2], args.bbox_stop[2]):
print('Rendering z={0}'.format(z))
render(a, bbox, z)
| 24.444444 | 66 | 0.697727 |
bc6934a711c5b2c64314e9faedf3a6f0838f298a
| 52,806 |
py
|
Python
|
venv/lib/python3.9/site-packages/google/cloud/spanner_admin_instance_v1/gapic/instance_admin_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.9/site-packages/google/cloud/spanner_admin_instance_v1/gapic/instance_admin_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.9/site-packages/google/cloud/spanner_admin_instance_v1/gapic/instance_admin_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.spanner.admin.instance.v1 InstanceAdmin API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.spanner_admin_instance_v1.gapic import enums
from google.cloud.spanner_admin_instance_v1.gapic import instance_admin_client_config
from google.cloud.spanner_admin_instance_v1.gapic.transports import (
instance_admin_grpc_transport,
)
from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2
from google.cloud.spanner_admin_instance_v1.proto import spanner_instance_admin_pb2_grpc
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import options_pb2
from google.iam.v1 import policy_pb2
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-spanner").version
# Service calls
def create_instance(
self,
parent,
instance_id,
instance,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates an instance and begins preparing it to begin serving. The
returned ``long-running operation`` can be used to track the progress of
preparing the new instance. The instance name is assigned by the caller.
If the named instance already exists, ``CreateInstance`` returns
``ALREADY_EXISTS``.
Immediately upon completion of this request:
- The instance is readable via the API, with all requested attributes
but no allocated resources. Its state is ``CREATING``.
Until completion of the returned operation:
- Cancelling the operation renders the instance immediately unreadable
via the API.
- The instance can be deleted.
- All other attempts to modify the instance are rejected.
Upon completion of the returned operation:
- Billing for all successfully-allocated resources begins (some types
may have lower than the requested levels).
- Databases can be created in the instance.
- The instance's allocated resource levels are readable via the API.
- The instance's state becomes ``READY``.
The returned ``long-running operation`` will have a name of the format
``<instance_name>/operations/<operation_id>`` and can be used to track
creation of the instance. The ``metadata`` field type is
``CreateInstanceMetadata``. The ``response`` field type is ``Instance``,
if successful.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `instance_id`:
>>> instance_id = ''
>>>
>>> # TODO: Initialize `instance`:
>>> instance = {}
>>>
>>> response = client.create_instance(parent, instance_id, instance)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The name of the project in which to create the instance.
Values are of the form ``projects/<project>``.
instance_id (str): Required. The ID of the instance to create. Valid identifiers are of
the form ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64
characters in length.
instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to create. The name may be omitted, but if
specified must be ``<parent>/instances/<instance_id>``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.operation.Operation` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_instance" not in self._inner_api_calls:
self._inner_api_calls[
"create_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_instance,
default_retry=self._method_configs["CreateInstance"].retry,
default_timeout=self._method_configs["CreateInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.CreateInstanceRequest(
parent=parent, instance_id=instance_id, instance=instance
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["create_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
spanner_instance_admin_pb2.Instance,
metadata_type=spanner_instance_admin_pb2.CreateInstanceMetadata,
)
def update_instance(
self,
instance,
field_mask,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Updates an instance, and begins allocating or releasing resources as
requested. The returned ``long-running operation`` can be used to track
the progress of updating the instance. If the named instance does not
exist, returns ``NOT_FOUND``.
Immediately upon completion of this request:
- For resource types for which a decrease in the instance's allocation
has been requested, billing is based on the newly-requested level.
Until completion of the returned operation:
- Cancelling the operation sets its metadata's ``cancel_time``, and
begins restoring resources to their pre-request values. The operation
is guaranteed to succeed at undoing all resource changes, after which
point it terminates with a ``CANCELLED`` status.
- All other attempts to modify the instance are rejected.
- Reading the instance via the API continues to give the pre-request
resource levels.
Upon completion of the returned operation:
- Billing begins for all successfully-allocated resources (some types
may have lower than the requested levels).
- All newly-reserved resources are available for serving the instance's
tables.
- The instance's new resource levels are readable via the API.
The returned ``long-running operation`` will have a name of the format
``<instance_name>/operations/<operation_id>`` and can be used to track
the instance modification. The ``metadata`` field type is
``UpdateInstanceMetadata``. The ``response`` field type is ``Instance``,
if successful.
Authorization requires ``spanner.instances.update`` permission on
resource ``name``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> # TODO: Initialize `instance`:
>>> instance = {}
>>>
>>> # TODO: Initialize `field_mask`:
>>> field_mask = {}
>>>
>>> response = client.update_instance(instance, field_mask)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
instance (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Instance]): Required. The instance to update, which must always include the
instance name. Otherwise, only fields mentioned in ``field_mask`` need
be included.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Instance`
field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): Required. A mask specifying which fields in ``Instance`` should be
updated. The field mask must always be specified; this prevents any
future fields in ``Instance`` from being erased accidentally by clients
that do not know about them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.operation.Operation` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "update_instance" not in self._inner_api_calls:
self._inner_api_calls[
"update_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_instance,
default_retry=self._method_configs["UpdateInstance"].retry,
default_timeout=self._method_configs["UpdateInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.UpdateInstanceRequest(
instance=instance, field_mask=field_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("instance.name", instance.name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
operation = self._inner_api_calls["update_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
spanner_instance_admin_pb2.Instance,
metadata_type=spanner_instance_admin_pb2.UpdateInstanceMetadata,
)
def list_instance_configs(
self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists the supported instance configurations for a given project.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_instance_configs(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_instance_configs(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The name of the project for which a list of supported
instance configurations is requested. Values are of the form
``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_instance_configs" not in self._inner_api_calls:
self._inner_api_calls[
"list_instance_configs"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_instance_configs,
default_retry=self._method_configs["ListInstanceConfigs"].retry,
default_timeout=self._method_configs["ListInstanceConfigs"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.ListInstanceConfigsRequest(
parent=parent, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_instance_configs"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="instance_configs",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_instance_config(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets information about a particular instance configuration.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_config_path('[PROJECT]', '[INSTANCE_CONFIG]')
>>>
>>> response = client.get_instance_config(name)
Args:
name (str): Required. The name of the requested instance configuration. Values
are of the form ``projects/<project>/instanceConfigs/<config>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.InstanceConfig` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_instance_config" not in self._inner_api_calls:
self._inner_api_calls[
"get_instance_config"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_instance_config,
default_retry=self._method_configs["GetInstanceConfig"].retry,
default_timeout=self._method_configs["GetInstanceConfig"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.GetInstanceConfigRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_instance_config"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_instances(
self,
parent,
page_size=None,
filter_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists all instances in the given project.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_instances(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_instances(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The name of the project for which a list of instances is
requested. Values are of the form ``projects/<project>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
filter_ (str): An expression for filtering the results of the request. Filter rules
are case insensitive. The fields eligible for filtering are:
- ``name``
- ``display_name``
- ``labels.key`` where key is the name of a label
Some examples of using filters are:
- ``name:*`` --> The instance has a name.
- ``name:Howl`` --> The instance's name contains the string "howl".
- ``name:HOWL`` --> Equivalent to above.
- ``NAME:howl`` --> Equivalent to above.
- ``labels.env:*`` --> The instance has the label "env".
- ``labels.env:dev`` --> The instance has the label "env" and the value
of the label contains the string "dev".
- ``name:howl labels.env:dev`` --> The instance's name contains "howl"
and it has the label "env" with its value containing "dev".
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "list_instances" not in self._inner_api_calls:
self._inner_api_calls[
"list_instances"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_instances,
default_retry=self._method_configs["ListInstances"].retry,
default_timeout=self._method_configs["ListInstances"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.ListInstancesRequest(
parent=parent, page_size=page_size, filter=filter_
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_instances"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="instances",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_instance(
self,
name,
field_mask=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets information about a particular instance.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> response = client.get_instance(name)
Args:
name (str): Required. The name of the requested instance. Values are of the form
``projects/<project>/instances/<instance>``.
field_mask (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.FieldMask]): If field_mask is present, specifies the subset of ``Instance``
fields that should be returned. If absent, all ``Instance`` fields are
returned.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Instance` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_instance" not in self._inner_api_calls:
self._inner_api_calls[
"get_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_instance,
default_retry=self._method_configs["GetInstance"].retry,
default_timeout=self._method_configs["GetInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.GetInstanceRequest(
name=name, field_mask=field_mask
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_instance(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes an instance.
Immediately upon completion of the request:
- Billing ceases for all of the instance's reserved resources.
Soon afterward:
- The instance and *all of its databases* immediately and irrevocably
disappear from the API. All data in the databases is permanently
deleted.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[INSTANCE]')
>>>
>>> client.delete_instance(name)
Args:
name (str): Required. The name of the instance to be deleted. Values are of the
form ``projects/<project>/instances/<instance>``
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "delete_instance" not in self._inner_api_calls:
self._inner_api_calls[
"delete_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_instance,
default_retry=self._method_configs["DeleteInstance"].retry,
default_timeout=self._method_configs["DeleteInstance"].timeout,
client_info=self._client_info,
)
request = spanner_instance_admin_pb2.DeleteInstanceRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy on an instance resource. Replaces any
existing policy.
Authorization requires ``spanner.instances.setIamPolicy`` on
``resource``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> # TODO: Initialize `resource`:
>>> resource = ''
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
policy (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_iam_policy(
self,
resource,
options_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the access control policy for an instance resource. Returns an
empty policy if an instance exists but does not have a policy set.
Authorization requires ``spanner.instances.getIamPolicy`` on
``resource``.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> # TODO: Initialize `resource`:
>>> resource = ''
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
options_ (Union[dict, ~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to
``GetIamPolicy``. This field is only used by Cloud IAM.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.spanner_admin_instance_v1.types.GetPolicyOptions`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"get_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs["GetIamPolicy"].retry,
default_timeout=self._method_configs["GetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(
resource=resource, options=options_
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def test_iam_permissions(
self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns permissions that the caller has on the specified instance
resource.
Attempting this RPC on a non-existent Cloud Spanner instance resource
will result in a NOT_FOUND error if the user has
``spanner.instances.list`` permission on the containing Google Cloud
Project. Otherwise returns an empty set of permissions.
Example:
>>> from google.cloud import spanner_admin_instance_v1
>>>
>>> client = spanner_admin_instance_v1.InstanceAdminClient()
>>>
>>> # TODO: Initialize `resource`:
>>> resource = ''
>>>
>>> # TODO: Initialize `permissions`:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions
with wildcards (such as '*' or 'storage.*') are not allowed. For more
information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.spanner_admin_instance_v1.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "test_iam_permissions" not in self._inner_api_calls:
self._inner_api_calls[
"test_iam_permissions"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs["TestIamPermissions"].retry,
default_timeout=self._method_configs["TestIamPermissions"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["test_iam_permissions"](
request, retry=retry, timeout=timeout, metadata=metadata
)
| 43.142157 | 165 | 0.613926 |
bc69b9f3ab057490f4ec7854149028c2c310ae9c
| 31,196 |
py
|
Python
|
src/ScaleHD/__backend.py
|
helloabunai/ScaleHD
|
b48c1a1ed742bdbda0a4cd42555d1e12d2e3024d
|
[
"MIT"
] | 3 |
2017-07-03T19:45:13.000Z
|
2020-05-12T16:56:19.000Z
|
src/ScaleHD/__backend.py
|
helloabunai/ScaleHD
|
b48c1a1ed742bdbda0a4cd42555d1e12d2e3024d
|
[
"MIT"
] | 1 |
2019-06-21T14:49:50.000Z
|
2019-06-24T08:24:37.000Z
|
src/ScaleHD/__backend.py
|
helloabunai/ScaleHD
|
b48c1a1ed742bdbda0a4cd42555d1e12d2e3024d
|
[
"MIT"
] | 2 |
2017-06-05T21:56:36.000Z
|
2021-03-22T20:34:13.000Z
|
#/usr/bin/python
__version__ = '1.0'
__author__ = '[email protected]'
##
## Imports
import string
import os
import errno
import shutil
import sys
import glob
import datetime
import subprocess
import logging as log
import numpy as np
import csv
from io import StringIO
import PyPDF2
from sklearn import preprocessing
from collections import defaultdict
from xml.etree import cElementTree
from lxml import etree
from reportlab.pdfgen import canvas
def parse_boolean(boolean_value):
"""
Given a string (boolean_value), returns a boolean value representing the string contents.
For example, a string with 'true', 't', 'y' or 'yes' will yield True.
"""
boolean_value = string.lower(boolean_value) in ('yes', 'y', 'true', 't', '1')
return boolean_value
def empty_string_check(string, raise_exception=True):
"""
Simple check to see if the string provided by parameter string is empty. False indicates the string is NOT empty.
Parameter raise_exception determines if a ValueError exception should be raised if the string is empty.
If raise_exception is False and the string is empty, True is returned.
"""
if string != '':
return False
if raise_exception:
raise ValueError("Empty string detected!")
return True
def sanitise_inputs(parsed_arguments):
"""
Utilises filesystem_exists_check and check_input_files
if either return false, path is invalid or unsupported files present
so, quit
"""
trigger = False
##
## Jobname prefix validity check
if parsed_arguments.jobname:
for character in parsed_arguments.jobname:
if character is ' ' or character is '/':
log.error('{}{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified Job Name has invalid characters: "', character, '"'))
trigger = True
##
## Config mode check
if parsed_arguments.config:
if not filesystem_exists_check(parsed_arguments.config[0]):
log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file could not be found.'))
trigger = True
for xmlfile in parsed_arguments.config:
if not check_input_files('.xml',xmlfile):
log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Specified config file is not an XML file.'))
trigger = True
return trigger
def extract_data(input_data_directory):
target_files = glob.glob(os.path.join(input_data_directory, '*'))
for extract_target in target_files:
if extract_target.lower().endswith(('.fq.gz', '.fastq.gz')):
log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Detected compressed input data. Extracting!'))
break
for extract_target in target_files:
unzipd = subprocess.Popen(['gzip', '-q', '-f', '-d', extract_target], stderr=subprocess.PIPE)
unzipd.wait()
return True
def sequence_pairings(data_path, instance_rundir):
##
## Get input files from data path
## Sort so that ordering isn't screwy on linux
input_files = glob.glob(os.path.join(data_path, '*'))
sorted_input = sorted(input_files)
sequence_pairs = []
file_count = len(sorted_input)
if not file_count % 2 == 0:
log.error('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'I/O: Non-even number of input files specified. Cannot continue without pairing!'))
sys.exit(2)
##
## Optimise so code isn't recycled
for i in range(0, len(sorted_input), 2):
file_pair = {}
forward_data = sorted_input[i]
reverse_data = sorted_input[i+1]
##
## Check forward ends with R1
forward_data_name = sorted_input[i].split('/')[-1].split('.')[0]
if not forward_data_name.endswith('_R1'):
log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Forward input file does not end in _R1. ', forward_data))
sys.exit(2)
##
## Check reverse ends with R2
reverse_data_name = sorted_input[i+1].split('/')[-1].split('.')[0]
if not reverse_data_name.endswith('_R2'):
log.error('{}{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'I/O: Reverse input file does not end in _R2. ', reverse_data))
sys.exit(2)
##
## Make Stage outputs for use in everywhere else in pipeline
sample_root = '_'.join(forward_data_name.split('_')[:-1])
instance_path = os.path.join(instance_rundir)
seq_qc_path = os.path.join(instance_rundir, sample_root, 'SeqQC')
align_path = os.path.join(instance_rundir, sample_root, 'Align')
predict_path = os.path.join(instance_rundir, sample_root, 'Predict')
file_pair[sample_root] = [forward_data, reverse_data, instance_path, seq_qc_path, align_path, predict_path]
sequence_pairs.append(file_pair)
return sequence_pairs
def filesystem_exists_check(path, raise_exception=True):
"""
Checks to see if the path, specified by parameter path, exists. Can be either a directory or file.
If the path exists, True is returned. If the path does not exist, and raise_exception is set to True,
an IOError is raised - else False is returned.
"""
if os.path.lexists(path):
return True
if raise_exception:
log.error('{}{}{}{}'.format(Colour.red,'shd__ ',Colour.end,'Specified input path could not be found.'))
return False
def check_input_files(input_format, input_file):
if input_file.endswith(input_format):
return True
return False
def initialise_libraries(instance_params):
trigger = False
##
## Subfunction for recycling code
## Calls UNIX type for checking binaries present
## Changed from WHICH as apparently type functions over different shells/config files
##
## To determine which binaries to check for
## AttributeError in the situation where instance_params origin differs
## try for -c style, except AttributeError for -b style
try:
quality_control = instance_params.config_dict['instance_flags']['@quality_control']
alignment = instance_params.config_dict['instance_flags']['@sequence_alignment']
genotyping = instance_params.config_dict['instance_flags']['@genotype_prediction']
snp_calling = instance_params.config_dict['instance_flags']['@snp_calling']
except AttributeError:
quality_control = instance_params['quality_control']
alignment = instance_params['sequence_alignment']
genotyping = instance_params['genotype_prediction']
snp_calling = instance_params['snp_calling']
if quality_control == 'True':
try:type_func('java')
except NameError: trigger=True
try:type_func('fastqc')
except NameError: trigger=True
try:type_func('cutadapt')
except NameError: trigger=True
if alignment == 'True':
try:type_func('seqtk')
except NameError: trigger=True
try:type_func('bwa')
except NameError: trigger=True
try:type_func('samtools')
except NameError: trigger=True
try:type_func('generatr')
except NameError: trigger=True
if genotyping == 'True':
try:type_func('samtools')
except NameError: trigger=True
try:type_func('generatr')
except NameError: trigger=True
if snp_calling == 'True':
try: type_func('picard')
except NameError: trigger=True
try: type_func('freebayes')
except NameError: trigger=True
return trigger
def sanitise_outputs(jobname, output_argument):
run_dir = ''
output_root = output_argument[0]
if jobname:
target_output = os.path.join(output_root, jobname)
if not os.path.exists(target_output):
log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating Output with prefix: ', jobname))
run_dir = os.path.join(output_root, jobname)
mkdir_p(run_dir)
else:
purge_choice = ''
while True:
purge_choice = input('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Job folder already exists. Delete existing folder? Y/N: '))
if not (purge_choice.lower() == 'y') and not (purge_choice.lower() == 'n'):
log.info('{}{}{}{}'.format(Colour.red, 'shd__ ', Colour.end, 'Invalid input. Please input Y or N.'))
continue
else:
break
if purge_choice.lower() == 'y':
log.info('{}{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Clearing pre-existing Jobname Prefix: ', jobname))
run_dir = os.path.join(output_root, jobname)
if os.path.exists(run_dir):
shutil.rmtree(run_dir, ignore_errors=True)
mkdir_p(run_dir)
else:
raise Exception('User chose not to delete pre-existing Job folder. Cannot write output.')
else:
## Ensures root output is a real directory
## Generates folder name based on date (for run ident)
date = datetime.date.today().strftime('%d-%m-%Y')
walltime = datetime.datetime.now().strftime('%H%M%S')
today = date + '-' + walltime
## If the user specified root doesn't exist, make it
## Then make the run directory for datetime
if not os.path.exists(output_root):
log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating output root... '))
mkdir_p(output_root)
run_dir = os.path.join(output_root, 'ScaleHDRun_'+today)
log.info('{}{}{}{}'.format(Colour.bold, 'shd__ ', Colour.end, 'Creating instance run directory.. '))
mkdir_p(run_dir)
## Inform user it's all gonna be okaaaayyyy
log.info('{}{}{}{}'.format(Colour.green, 'shd__ ', Colour.end, 'Output directories OK!'))
return run_dir
def replace_fqfile(mutate_list, target_fqfile, altered_path):
if target_fqfile in mutate_list:
loc = mutate_list.index(target_fqfile)
mutate_list[loc] = altered_path
return mutate_list
def scrape_summary_data(stage, input_report_file):
##
## If the argument input_report_file is from trimming..
if stage == 'trim':
with open(input_report_file, 'r') as trpf:
trim_lines = trpf.readlines()
##
## Determine buffer size to slice from above array
scraping_buffer = 8
if '-q' in trim_lines[1]:
scraping_buffer += 1
##
## Get Anchor
summary_start = 0
for i in range(0, len(trim_lines)):
if '== Summary ==' in trim_lines[i]:
summary_start = i
##
## Slice and close
summary_data = trim_lines[summary_start:summary_start + scraping_buffer]
trpf.close()
return summary_data[2:]
##
## If the argument input_report_file is from alignment..
if stage == 'align':
with open(input_report_file, 'r') as alnrpf:
align_lines = alnrpf.readlines()
alnrpf.close()
##
## No ranges required, only skip first line
return align_lines[1:]
##
## No need to tidy up report for genotyping
## since we already have the data from our own objects
if stage == 'gtype':
pass
def generate_atypical_xml(label, allele_object, index_path, direction):
"""
:param allele_object:
:param index_path:
:return:
"""
##TODO docstring
atypical_path = os.path.join(index_path, '{}{}_{}.xml'.format(direction, label, allele_object.get_reflabel()))
fp_flank = 'GCGACCCTGGAAAAGCTGATGAAGGCCTTCGAGTCCCTCAAGTCCTTC'
cagstart = ''; cagend = ''
intv = allele_object.get_intervening()
ccgstart = ''; ccgend = ''
ccglen = allele_object.get_ccg()
cctlen = allele_object.get_cct()
tp_flank = 'CAGCTTCCTCAGCCGCCGCCGCAGGCACAGCCGCTGCT'
if direction == 'fw':
cagstart = '1'; cagend = '200'
ccgstart = '1'; ccgend = '20'
if direction == 'rv':
cagstart = '100'; cagend = '100'
ccgstart = '1'; ccgend = '20'
##
## Create XML
data_root = etree.Element('data')
loci_root = etree.Element('loci', label=allele_object.get_reflabel()); data_root.append(loci_root)
##
## Loci Nodes
fp_input = etree.Element('input', type='fiveprime', flank=fp_flank)
cag_region = etree.Element('input', type='repeat_region', order='1', unit='CAG', start=cagstart, end=cagend)
intervening = etree.Element('input', type='intervening', sequence=intv, prior='1')
ccg_region = etree.Element('input', type='repeat_region', order='2', unit='CCG', start=ccgstart, end=ccgend)
cct_region = etree.Element('input', type='repeat_region', order='3', unit='CCT', start=str(cctlen), end=str(cctlen))
tp_input = etree.Element('input', type='threeprime', flank=tp_flank)
for node in [fp_input, cag_region, intervening, ccg_region, cct_region, tp_input]:
loci_root.append(node)
s = etree.tostring(data_root, pretty_print=True)
with open(atypical_path, 'w') as xmlfi:
xmlfi.write(s.decode())
xmlfi.close()
return atypical_path
| 38.277301 | 155 | 0.702526 |
bc6acbe604cb779e768957863e598871559e6e15
| 3,966 |
py
|
Python
|
tests/test_models/test_backbones/test_encoder_decoders/test_deepfill_encoder.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 1,884 |
2020-07-09T18:53:43.000Z
|
2022-03-31T12:06:18.000Z
|
tests/test_models/test_backbones/test_encoder_decoders/test_deepfill_encoder.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 622 |
2020-07-09T18:52:27.000Z
|
2022-03-31T14:41:09.000Z
|
tests/test_models/test_backbones/test_encoder_decoders/test_deepfill_encoder.py
|
Jian137/mmediting-1
|
e1ac6c93441ec96696d0b530f040b91b809015b6
|
[
"Apache-2.0"
] | 361 |
2020-07-09T19:21:47.000Z
|
2022-03-31T09:58:27.000Z
|
# Copyright (c) OpenMMLab. All rights reserved.
import torch
from mmedit.models.backbones import ContextualAttentionNeck, DeepFillEncoder
from mmedit.models.common import SimpleGatedConvModule
| 35.72973 | 76 | 0.620524 |
bc6c634a8700c04a493d0272caa915d4e9038c51
| 580 |
py
|
Python
|
mvp/migrations/0004_auto_20201127_0649.py
|
Wastecoinng/mvp_beta
|
2faa4b9eeac99b2c284bafad955b90f9951991fc
|
[
"MIT"
] | null | null | null |
mvp/migrations/0004_auto_20201127_0649.py
|
Wastecoinng/mvp_beta
|
2faa4b9eeac99b2c284bafad955b90f9951991fc
|
[
"MIT"
] | null | null | null |
mvp/migrations/0004_auto_20201127_0649.py
|
Wastecoinng/mvp_beta
|
2faa4b9eeac99b2c284bafad955b90f9951991fc
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.13 on 2020-11-27 05:49
from django.db import migrations, models
| 24.166667 | 105 | 0.591379 |
bc6daca01b612effa21edcc2bb7e569ddcd2750f
| 751 |
py
|
Python
|
adminapp/migrations/0012_auto_20210714_1155.py
|
mofresh27/MuseumExperience-Group2-Python-BE-1
|
d6ca7aceeddfcfdefdf112ab5e40cf74d6b472ce
|
[
"MIT"
] | null | null | null |
adminapp/migrations/0012_auto_20210714_1155.py
|
mofresh27/MuseumExperience-Group2-Python-BE-1
|
d6ca7aceeddfcfdefdf112ab5e40cf74d6b472ce
|
[
"MIT"
] | 1 |
2021-07-19T14:27:28.000Z
|
2021-07-19T14:27:28.000Z
|
adminapp/migrations/0012_auto_20210714_1155.py
|
mofresh27/MuseumExperience-Group2-Python-BE-1
|
d6ca7aceeddfcfdefdf112ab5e40cf74d6b472ce
|
[
"MIT"
] | 2 |
2021-07-14T21:56:46.000Z
|
2021-07-15T16:11:41.000Z
|
# Generated by Django 3.2.4 on 2021-07-14 11:55
from django.db import migrations, models
import uuid
| 25.033333 | 78 | 0.565912 |
bc6de8ef28a6c9ca4fc7727dee2d21bb765f85a1
| 1,585 |
py
|
Python
|
scripts/json_parse.py
|
andrewsimonds14/Capstone
|
5ae56b9be40846e9993a8f23aaa8e1ef92cd9ea3
|
[
"MIT"
] | null | null | null |
scripts/json_parse.py
|
andrewsimonds14/Capstone
|
5ae56b9be40846e9993a8f23aaa8e1ef92cd9ea3
|
[
"MIT"
] | null | null | null |
scripts/json_parse.py
|
andrewsimonds14/Capstone
|
5ae56b9be40846e9993a8f23aaa8e1ef92cd9ea3
|
[
"MIT"
] | null | null | null |
import json
import os
import nibabel as nib
import csv
from operator import itemgetter
# PATH TO PREPROCESSED DATA
raw_data_path = '/home/lab/nnUNet_data/nnUNet_raw_data_base/nnUNet_raw_data/Task500_BrainMets'
pixdim_ind = [1,2,3] # Indexes at which the voxel size [x,y,z] is stored
# PATH TO JSON FILE
with open('/home/lab/nnUNet_data/RESULTS_FOLDER/nnUNet/3d_fullres/Task500_BrainMets/nnUNetTrainerV2__nnUNetPlansv2.1/fold_4/validation_raw/summary.json') as file:
data = json.load(file)
with open('json_parsed.csv', mode='w') as csv_file:
csv_writer = csv.writer(csv_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
csv_writer.writerow(['Case Number', 'Dice Score', 'Voxel Size-X', 'Voxel Size-Y', 'Voxel Size-Z'])
for img in data['results']['all']:
# Get dice score on image
dice = img['1']['Dice']
# Get nifti data on image
img_filename = (os.path.basename(img['reference']).split('.'))[0]
img_ni = nib.load(raw_data_path + '/imagesTr/' + img_filename + '_0000.nii.gz')
label_ni = nib.load(raw_data_path + '/labelsTr/' + img_filename + '.nii.gz')
voxel_size = itemgetter(*pixdim_ind)(img_ni.header["pixdim"])
# Get tumor dimensions
# tumor_size =
# Get case number corresponding to image
case_number = img_filename.split('_')[1]
# Write to csv file
csv_writer = csv.writer(csv_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
csv_writer.writerow([case_number, dice, voxel_size[0], voxel_size[1], voxel_size[2]])
| 36.860465 | 162 | 0.683281 |
bc6e15840fb47699b6ed6ae5254ac356715fcfad
| 2,794 |
py
|
Python
|
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
tests/gen_test.py
|
tinylambda/tornadio2
|
7b112e2e207bd7500288b42896f9970c16e623ad
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
tornadio2.tests.gen
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by the Serge S. Koval, see AUTHORS for more details.
:license: Apache, see LICENSE for more details.
"""
from collections import deque
from nose.tools import eq_
from tornadio2 import gen
_queue = None
def test():
init_environment()
dummy = Dummy(run_sync)
dummy.test('test')
eq_(dummy.v, 'test')
def test_async():
init_environment()
dummy = Dummy(queue_async)
dummy.test('test')
run_async()
# Verify value
eq_(dummy.v, 'test')
def test_sync_queue():
init_environment()
dummy = DummyList(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async()
# Verify value
eq_(dummy.v, ['1', '2', '3'])
def test_sync_queue_oor():
init_environment()
dummy = DummyList(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async_oor()
# Verify value
eq_(dummy.v, ['1', '2', '3'])
def test_async_queue_oor():
init_environment()
dummy = DummyListOutOfOrder(queue_async)
dummy.test('1')
dummy.test('2')
dummy.test('3')
run_async_oor()
# Verify value
eq_(dummy.v, ['3', '2', '1'])
| 17.910256 | 77 | 0.598067 |
bc6e2a6ace5b77db9c88569ae6f6456c11dc1f48
| 21,122 |
py
|
Python
|
DeepBrainSeg/tumor/Tester.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | 1 |
2021-01-01T18:06:50.000Z
|
2021-01-01T18:06:50.000Z
|
DeepBrainSeg/tumor/Tester.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | null | null | null |
DeepBrainSeg/tumor/Tester.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | 1 |
2021-01-01T18:06:52.000Z
|
2021-01-01T18:06:52.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# author: Avinash Kori
# contact: [email protected]
import torch
import SimpleITK as sitk
import numpy as np
import nibabel as nib
from torch.autograd import Variable
from skimage.transform import resize
from torchvision import transforms
from time import gmtime, strftime
from tqdm import tqdm
import pdb
import os
from ..helpers.helper import *
from os.path import expanduser
home = expanduser("~")
#========================================================================================
# prediction functions.....................
bin_path = os.path.join('/opt/ANTs/bin/')
# ========================================================================================
if __name__ == '__main__':
ext = deepSeg(True)
ext.get_segmentation_brats('../../sample_volume/Brats18_CBICA_AVG_1/')
| 46.937778 | 168 | 0.548717 |
bc6e8fa55969e186c06ce2946db2244dfbf09a10
| 7,334 |
py
|
Python
|
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
statsmodels/discrete/tests/test_conditional.py
|
porcpine1967/statsmodels
|
db4900056d80732ffff2733454fac88781ced8d2
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from statsmodels.discrete.conditional_models import (
ConditionalLogit, ConditionalPoisson)
from statsmodels.tools.numdiff import approx_fprime
from numpy.testing import assert_allclose
import pandas as pd
| 30.558333 | 78 | 0.587947 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.