hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
11 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
251
max_stars_repo_name
stringlengths
4
130
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
251
max_issues_repo_name
stringlengths
4
130
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
251
max_forks_repo_name
stringlengths
4
130
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.05M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.04M
alphanum_fraction
float64
0
1
9c245a520078fb55db53d97b8e520bef999698c6
9,538
py
Python
api/base/settings/defaults.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
null
null
null
api/base/settings/defaults.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
null
null
null
api/base/settings/defaults.py
mattclark/osf.io
7a362ceb6af3393d3d0423aafef336ee13277303
[ "Apache-2.0" ]
null
null
null
""" Django settings for api project. Generated by 'django-admin startproject' using Django 1.8. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ import os from urlparse import urlparse from website import settings as osf_settings BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ DATABASES = { 'default': { 'CONN_MAX_AGE': 0, 'ENGINE': 'osf.db.backends.postgresql', # django.db.backends.postgresql 'NAME': os.environ.get('OSF_DB_NAME', 'osf'), 'USER': os.environ.get('OSF_DB_USER', 'postgres'), 'PASSWORD': os.environ.get('OSF_DB_PASSWORD', ''), 'HOST': os.environ.get('OSF_DB_HOST', '127.0.0.1'), 'PORT': os.environ.get('OSF_DB_PORT', '5432'), 'ATOMIC_REQUESTS': True, 'TEST': { 'SERIALIZE': False, }, }, } DATABASE_ROUTERS = ['osf.db.router.PostgreSQLFailoverRouter', ] PASSWORD_HASHERS = [ 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', 'django.contrib.auth.hashers.BCryptPasswordHasher', ] AUTH_USER_MODEL = 'osf.OSFUser' # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = osf_settings.SECRET_KEY AUTHENTICATION_BACKENDS = ( 'api.base.authentication.backends.ODMBackend', 'guardian.backends.ObjectPermissionBackend', ) # SECURITY WARNING: don't run with debug turned on in production! DEV_MODE = osf_settings.DEV_MODE DEBUG = osf_settings.DEBUG_MODE DEBUG_PROPAGATE_EXCEPTIONS = True # session: SESSION_COOKIE_NAME = 'api' SESSION_COOKIE_SECURE = osf_settings.SECURE_MODE SESSION_COOKIE_HTTPONLY = osf_settings.SESSION_COOKIE_HTTPONLY # csrf: CSRF_COOKIE_NAME = 'api-csrf' CSRF_COOKIE_SECURE = osf_settings.SECURE_MODE CSRF_COOKIE_HTTPONLY = osf_settings.SECURE_MODE ALLOWED_HOSTS = [ '.osf.io', ] # Application definition INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', 'django.contrib.sessions', 'django.contrib.staticfiles', 'django.contrib.admin', # 3rd party 'django_celery_beat', 'django_celery_results', 'rest_framework', 'corsheaders', 'raven.contrib.django.raven_compat', 'django_extensions', 'guardian', 'storages', 'waffle', 'elasticsearch_metrics', # OSF 'osf', # Addons 'addons.osfstorage', 'addons.bitbucket', 'addons.box', 'addons.dataverse', 'addons.dropbox', 'addons.figshare', 'addons.forward', 'addons.github', 'addons.gitlab', 'addons.googledrive', 'addons.mendeley', 'addons.onedrive', 'addons.owncloud', 'addons.s3', 'addons.twofactor', 'addons.wiki', 'addons.zotero', ) # local development using https if osf_settings.SECURE_MODE and DEBUG: INSTALLED_APPS += ('sslserver',) # TODO: Are there more granular ways to configure reporting specifically related to the API? RAVEN_CONFIG = { 'tags': {'App': 'api'}, 'dsn': osf_settings.SENTRY_DSN, 'release': osf_settings.VERSION, } BULK_SETTINGS = { 'DEFAULT_BULK_LIMIT': 100, } MAX_PAGE_SIZE = 100 REST_FRAMEWORK = { 'PAGE_SIZE': 10, 'DEFAULT_RENDERER_CLASSES': ( 'api.base.renderers.JSONAPIRenderer', 'api.base.renderers.JSONRendererWithESISupport', 'api.base.renderers.BrowsableAPIRendererNoForms', ), 'DEFAULT_PARSER_CLASSES': ( 'api.base.parsers.JSONAPIParser', 'api.base.parsers.JSONAPIParserForRegularJSON', 'rest_framework.parsers.FormParser', 'rest_framework.parsers.MultiPartParser', ), 'EXCEPTION_HANDLER': 'api.base.exceptions.json_api_exception_handler', 'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'api.base.content_negotiation.JSONAPIContentNegotiation', 'DEFAULT_VERSIONING_CLASS': 'api.base.versioning.BaseVersioning', 'DEFAULT_VERSION': '2.0', 'ALLOWED_VERSIONS': ( '2.0', '2.1', '2.2', '2.3', '2.4', '2.5', '2.6', '2.7', '2.8', '2.9', '2.10', '2.11', '2.12', '2.13', '2.14', '2.15', '2.16', '2.17', ), 'DEFAULT_FILTER_BACKENDS': ('api.base.filters.OSFOrderingFilter',), 'DEFAULT_PAGINATION_CLASS': 'api.base.pagination.JSONAPIPagination', 'ORDERING_PARAM': 'sort', 'DEFAULT_AUTHENTICATION_CLASSES': ( # Custom auth classes 'api.base.authentication.drf.OSFBasicAuthentication', 'api.base.authentication.drf.OSFSessionAuthentication', 'api.base.authentication.drf.OSFCASAuthentication', ), 'DEFAULT_THROTTLE_CLASSES': ( 'rest_framework.throttling.UserRateThrottle', 'api.base.throttling.NonCookieAuthThrottle', ), 'DEFAULT_THROTTLE_RATES': { 'user': '10000/day', 'non-cookie-auth': '100/hour', 'add-contributor': '10/second', 'create-guid': '1000/hour', 'root-anon-throttle': '1000/hour', 'test-user': '2/hour', 'test-anon': '1/hour', 'send-email': '2/minute', }, } # Settings related to CORS Headers addon: allow API to receive authenticated requests from OSF # CORS plugin only matches based on "netloc" part of URL, so as workaround we add that to the list CORS_ORIGIN_ALLOW_ALL = False CORS_ORIGIN_WHITELIST = ( urlparse(osf_settings.DOMAIN).netloc, osf_settings.DOMAIN, ) # This needs to remain True to allow cross origin requests that are in CORS_ORIGIN_WHITELIST to # use cookies. CORS_ALLOW_CREDENTIALS = True # Set dynamically on app init ORIGINS_WHITELIST = () MIDDLEWARE = ( 'api.base.middleware.DjangoGlobalMiddleware', 'api.base.middleware.CeleryTaskMiddleware', 'api.base.middleware.PostcommitTaskMiddleware', # A profiling middleware. ONLY FOR DEV USE # Uncomment and add "prof" to url params to recieve a profile for that url # 'api.base.middleware.ProfileMiddleware', # 'django.contrib.sessions.middleware.SessionMiddleware', 'api.base.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', # 'django.contrib.auth.middleware.AuthenticationMiddleware', # 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', # 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', 'waffle.middleware.WaffleMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, }, ] ROOT_URLCONF = 'api.base.urls' WSGI_APPLICATION = 'api.base.wsgi.application' LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # https://django-storages.readthedocs.io/en/latest/backends/gcloud.html if os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', False): # Required to interact with Google Cloud Storage DEFAULT_FILE_STORAGE = 'api.base.storage.RequestlessURLGoogleCloudStorage' GS_BUCKET_NAME = os.environ.get('GS_BUCKET_NAME', 'cos-osf-stage-cdn-us') GS_FILE_OVERWRITE = os.environ.get('GS_FILE_OVERWRITE', False) elif osf_settings.DEV_MODE or osf_settings.DEBUG_MODE: DEFAULT_FILE_STORAGE = 'api.base.storage.DevFileSystemStorage' # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, 'static/vendor') API_BASE = 'v2/' API_PRIVATE_BASE = '_/' STATIC_URL = '/static/' NODE_CATEGORY_MAP = osf_settings.NODE_CATEGORY_MAP DEBUG_TRANSACTIONS = DEBUG JWT_SECRET = 'osf_api_cas_login_jwt_secret_32b' JWE_SECRET = 'osf_api_cas_login_jwe_secret_32b' ENABLE_VARNISH = osf_settings.ENABLE_VARNISH ENABLE_ESI = osf_settings.ENABLE_ESI VARNISH_SERVERS = osf_settings.VARNISH_SERVERS ESI_MEDIA_TYPES = osf_settings.ESI_MEDIA_TYPES ADDONS_FOLDER_CONFIGURABLE = ['box', 'dropbox', 's3', 'googledrive', 'figshare', 'owncloud', 'onedrive'] ADDONS_OAUTH = ADDONS_FOLDER_CONFIGURABLE + ['dataverse', 'github', 'bitbucket', 'gitlab', 'mendeley', 'zotero', 'forward'] BYPASS_THROTTLE_TOKEN = 'test-token' OSF_SHELL_USER_IMPORTS = None # Settings for use in the admin OSF_URL = 'https://osf.io' SELECT_FOR_UPDATE_ENABLED = True # Disable anonymous user permissions in django-guardian ANONYMOUS_USER_NAME = None # If set to True, automated tests with extra queries will fail. NPLUSONE_RAISE = False # salt used for generating hashids HASHIDS_SALT = 'pinkhimalayan' # django-elasticsearch-metrics ELASTICSEARCH_DSL = { 'default': { 'hosts': os.environ.get('ELASTIC6_URI', '127.0.0.1:9201'), 'retry_on_timeout': True, }, } # Store yearly indices for time-series metrics ELASTICSEARCH_METRICS_DATE_FORMAT = '%Y' WAFFLE_CACHE_NAME = 'waffle_cache' STORAGE_USAGE_CACHE_NAME = 'storage_usage' CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, STORAGE_USAGE_CACHE_NAME: { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'osf_cache_table', }, WAFFLE_CACHE_NAME: { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }
28.990881
123
0.698784
9c24aa3c9d37482f32e1338312a0a18d1b445f4a
66
py
Python
tests/__init__.py
GTedHa/gblackboard
61c13ca69113019b8fc691acaa1953751f517347
[ "MIT" ]
null
null
null
tests/__init__.py
GTedHa/gblackboard
61c13ca69113019b8fc691acaa1953751f517347
[ "MIT" ]
3
2019-02-02T21:17:49.000Z
2021-11-15T17:48:12.000Z
tests/__init__.py
GTedHa/gblackboard
61c13ca69113019b8fc691acaa1953751f517347
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Unit test package for gblackboard."""
16.5
40
0.590909
9c25aa6fdaf3676c5515d5ada4dd3bbb5e192b55
25,225
py
Python
src/fabricflow/fibc/api/fibcapis_pb2_grpc.py
RudSmith/beluganos
7a3f8524e1d9d9313d96476c783a96096180654c
[ "Apache-2.0" ]
119
2017-10-05T11:37:37.000Z
2022-02-23T05:01:54.000Z
src/fabricflow/fibc/api/fibcapis_pb2_grpc.py
RudSmith/beluganos
7a3f8524e1d9d9313d96476c783a96096180654c
[ "Apache-2.0" ]
5
2017-10-23T00:49:09.000Z
2020-08-17T14:58:16.000Z
src/fabricflow/fibc/api/fibcapis_pb2_grpc.py
RudSmith/beluganos
7a3f8524e1d9d9313d96476c783a96096180654c
[ "Apache-2.0" ]
17
2017-10-21T12:33:44.000Z
2022-02-03T10:55:18.000Z
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc import fibcapi_pb2 as fibcapi__pb2 import fibcapis_pb2 as fibcapis__pb2 def add_FIBCApApiServicer_to_server(servicer, server): rpc_method_handlers = { 'Monitor': grpc.unary_stream_rpc_method_handler( servicer.Monitor, request_deserializer=fibcapis__pb2.ApMonitorRequest.FromString, response_serializer=fibcapis__pb2.ApMonitorReply.SerializeToString, ), 'GetPortStats': grpc.unary_stream_rpc_method_handler( servicer.GetPortStats, request_deserializer=fibcapis__pb2.ApGetPortStatsRequest.FromString, response_serializer=fibcapi__pb2.FFPortStats.SerializeToString, ), 'ModPortStats': grpc.unary_unary_rpc_method_handler( servicer.ModPortStats, request_deserializer=fibcapis__pb2.ApModPortStatsRequest.FromString, response_serializer=fibcapis__pb2.ApModPortStatsReply.SerializeToString, ), 'GetPortEntries': grpc.unary_stream_rpc_method_handler( servicer.GetPortEntries, request_deserializer=fibcapis__pb2.ApGetPortEntriesRequest.FromString, response_serializer=fibcapis__pb2.DbPortEntry.SerializeToString, ), 'GetIDEntries': grpc.unary_stream_rpc_method_handler( servicer.GetIDEntries, request_deserializer=fibcapis__pb2.ApGetIdEntriesRequest.FromString, response_serializer=fibcapis__pb2.DbIdEntry.SerializeToString, ), 'GetDpEntries': grpc.unary_stream_rpc_method_handler( servicer.GetDpEntries, request_deserializer=fibcapis__pb2.ApGetDpEntriesRequest.FromString, response_serializer=fibcapis__pb2.DbDpEntry.SerializeToString, ), 'AddPortEntry': grpc.unary_unary_rpc_method_handler( servicer.AddPortEntry, request_deserializer=fibcapis__pb2.DbPortEntry.FromString, response_serializer=fibcapis__pb2.ApAddPortEntryReply.SerializeToString, ), 'AddIDEntry': grpc.unary_unary_rpc_method_handler( servicer.AddIDEntry, request_deserializer=fibcapis__pb2.DbIdEntry.FromString, response_serializer=fibcapis__pb2.ApAddIdEntryReply.SerializeToString, ), 'DelPortEntry': grpc.unary_unary_rpc_method_handler( servicer.DelPortEntry, request_deserializer=fibcapis__pb2.DbPortKey.FromString, response_serializer=fibcapis__pb2.ApDelPortEntryReply.SerializeToString, ), 'DelIDEntry': grpc.unary_unary_rpc_method_handler( servicer.DelIDEntry, request_deserializer=fibcapis__pb2.DbIdEntry.FromString, response_serializer=fibcapis__pb2.ApDelIdEntryReply.SerializeToString, ), 'GetStats': grpc.unary_stream_rpc_method_handler( servicer.GetStats, request_deserializer=fibcapis__pb2.ApGetStatsRequest.FromString, response_serializer=fibcapis__pb2.StatsEntry.SerializeToString, ), 'RunOAM': grpc.unary_unary_rpc_method_handler( servicer.RunOAM, request_deserializer=fibcapi__pb2.OAM.Request.FromString, response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'fibcapi.FIBCApApi', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) def add_FIBCVmApiServicer_to_server(servicer, server): rpc_method_handlers = { 'SendHello': grpc.unary_unary_rpc_method_handler( servicer.SendHello, request_deserializer=fibcapi__pb2.Hello.FromString, response_serializer=fibcapis__pb2.HelloReply.SerializeToString, ), 'SendPortConfig': grpc.unary_unary_rpc_method_handler( servicer.SendPortConfig, request_deserializer=fibcapi__pb2.PortConfig.FromString, response_serializer=fibcapis__pb2.PortConfigReply.SerializeToString, ), 'SendFlowMod': grpc.unary_unary_rpc_method_handler( servicer.SendFlowMod, request_deserializer=fibcapi__pb2.FlowMod.FromString, response_serializer=fibcapis__pb2.FlowModReply.SerializeToString, ), 'SendGroupMod': grpc.unary_unary_rpc_method_handler( servicer.SendGroupMod, request_deserializer=fibcapi__pb2.GroupMod.FromString, response_serializer=fibcapis__pb2.GroupModReply.SerializeToString, ), 'SendOAMReply': grpc.unary_unary_rpc_method_handler( servicer.SendOAMReply, request_deserializer=fibcapis__pb2.OAMReply.FromString, response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString, ), 'Monitor': grpc.unary_stream_rpc_method_handler( servicer.Monitor, request_deserializer=fibcapis__pb2.VmMonitorRequest.FromString, response_serializer=fibcapis__pb2.VmMonitorReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'fibcapi.FIBCVmApi', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) def add_FIBCVsApiServicer_to_server(servicer, server): rpc_method_handlers = { 'SendHello': grpc.unary_unary_rpc_method_handler( servicer.SendHello, request_deserializer=fibcapi__pb2.FFHello.FromString, response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString, ), 'SendFFPacket': grpc.unary_unary_rpc_method_handler( servicer.SendFFPacket, request_deserializer=fibcapi__pb2.FFPacket.FromString, response_serializer=fibcapis__pb2.FFPacketReply.SerializeToString, ), 'SendPacketIn': grpc.unary_unary_rpc_method_handler( servicer.SendPacketIn, request_deserializer=fibcapi__pb2.FFPacketIn.FromString, response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString, ), 'SendOAMReply': grpc.unary_unary_rpc_method_handler( servicer.SendOAMReply, request_deserializer=fibcapis__pb2.OAMReply.FromString, response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString, ), 'Monitor': grpc.unary_stream_rpc_method_handler( servicer.Monitor, request_deserializer=fibcapis__pb2.VsMonitorRequest.FromString, response_serializer=fibcapis__pb2.VsMonitorReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'fibcapi.FIBCVsApi', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) def add_FIBCDpApiServicer_to_server(servicer, server): rpc_method_handlers = { 'SendHello': grpc.unary_unary_rpc_method_handler( servicer.SendHello, request_deserializer=fibcapi__pb2.FFHello.FromString, response_serializer=fibcapis__pb2.FFHelloReply.SerializeToString, ), 'SendPacketIn': grpc.unary_unary_rpc_method_handler( servicer.SendPacketIn, request_deserializer=fibcapi__pb2.FFPacketIn.FromString, response_serializer=fibcapis__pb2.FFPacketInReply.SerializeToString, ), 'SendPortStatus': grpc.unary_unary_rpc_method_handler( servicer.SendPortStatus, request_deserializer=fibcapi__pb2.FFPortStatus.FromString, response_serializer=fibcapis__pb2.FFPortStatusReply.SerializeToString, ), 'SendL2AddrStatus': grpc.unary_unary_rpc_method_handler( servicer.SendL2AddrStatus, request_deserializer=fibcapi__pb2.FFL2AddrStatus.FromString, response_serializer=fibcapis__pb2.L2AddrStatusReply.SerializeToString, ), 'SendMultipartReply': grpc.unary_unary_rpc_method_handler( servicer.SendMultipartReply, request_deserializer=fibcapis__pb2.DpMultipartReply.FromString, response_serializer=fibcapis__pb2.DpMultipartReplyAck.SerializeToString, ), 'SendOAMReply': grpc.unary_unary_rpc_method_handler( servicer.SendOAMReply, request_deserializer=fibcapis__pb2.OAMReply.FromString, response_serializer=fibcapis__pb2.OAMReplyAck.SerializeToString, ), 'Monitor': grpc.unary_stream_rpc_method_handler( servicer.Monitor, request_deserializer=fibcapis__pb2.DpMonitorRequest.FromString, response_serializer=fibcapis__pb2.DpMonitorReply.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'fibcapi.FIBCDpApi', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
40.949675
83
0.745966
9c2605f1809d64546c20b13c622af83a1ba7e6e8
210
py
Python
cymbology/identifiers/__init__.py
pmart123/security_id
95087be9525ab8d2fd47baa93f83aaa30e76bb54
[ "BSD-2-Clause" ]
12
2015-09-15T17:17:39.000Z
2015-09-16T18:18:52.000Z
cymbology/identifiers/__init__.py
MartinThoma/cymbology
95087be9525ab8d2fd47baa93f83aaa30e76bb54
[ "BSD-2-Clause" ]
5
2017-09-15T21:22:07.000Z
2021-08-19T09:15:59.000Z
cymbology/identifiers/__init__.py
pmart123/security_id
95087be9525ab8d2fd47baa93f83aaa30e76bb54
[ "BSD-2-Clause" ]
1
2021-08-19T09:12:59.000Z
2021-08-19T09:12:59.000Z
from cymbology.identifiers.sedol import Sedol from cymbology.identifiers.cusip import Cusip, cusip_from_isin from cymbology.identifiers.isin import Isin __all__ = ('Sedol', 'Cusip', 'cusip_from_isin', 'Isin')
35
62
0.804762
9c26cf2339aa7a4ef06216de7bd0bf3332068b1a
948
py
Python
api/src/error_report/models.py
Noahffiliation/corpus-christi
c69ec88784de7d2e5acde3012926f307b43e38b3
[ "MIT" ]
35
2018-11-29T20:06:52.000Z
2021-04-12T19:01:42.000Z
api/src/error_report/models.py
Noahffiliation/corpus-christi
c69ec88784de7d2e5acde3012926f307b43e38b3
[ "MIT" ]
529
2018-12-31T23:51:25.000Z
2022-02-26T10:42:29.000Z
api/src/error_report/models.py
Noahffiliation/corpus-christi
c69ec88784de7d2e5acde3012926f307b43e38b3
[ "MIT" ]
10
2018-12-04T16:17:00.000Z
2021-04-07T00:47:52.000Z
from marshmallow import Schema, fields from marshmallow.validate import Range, Length from sqlalchemy import Column, Integer, Boolean, DateTime from ..db import Base from ..shared.models import StringTypes # ---- Error-report
29.625
77
0.728903
9c26d711887f84da99433b770df53c3bffc460c4
1,067
py
Python
Python/Vowel-Substring/solution.py
arpitran/HackerRank_solutions
a3a77c858edd3955ea38530916db9051b1aa93f9
[ "MIT" ]
null
null
null
Python/Vowel-Substring/solution.py
arpitran/HackerRank_solutions
a3a77c858edd3955ea38530916db9051b1aa93f9
[ "MIT" ]
null
null
null
Python/Vowel-Substring/solution.py
arpitran/HackerRank_solutions
a3a77c858edd3955ea38530916db9051b1aa93f9
[ "MIT" ]
null
null
null
#!/bin/python3 import math import os import random import re import sys # # Complete the 'findSubstring' function below. # # The function is expected to return a STRING. # The function accepts following parameters: # 1. STRING s # 2. INTEGER k #
20.519231
118
0.626992
9c2757bc39980fb41a4822e37ad9596b865f8c2a
24
py
Python
nima/models/productos/constants.py
erichav/NIMA
6ca845047e2d1764f07af76bfbbed9f1a82bc10f
[ "MIT" ]
null
null
null
nima/models/productos/constants.py
erichav/NIMA
6ca845047e2d1764f07af76bfbbed9f1a82bc10f
[ "MIT" ]
null
null
null
nima/models/productos/constants.py
erichav/NIMA
6ca845047e2d1764f07af76bfbbed9f1a82bc10f
[ "MIT" ]
1
2018-11-18T03:58:53.000Z
2018-11-18T03:58:53.000Z
COLLECTION = 'productos'
24
24
0.791667
9c28065db1d863fb5b79db27e4909a5e2d4c5505
4,501
py
Python
deepchem/metrics/score_function.py
hsjang001205/deepchem
02fce35729826b1ef12a1cfa6519b491510217be
[ "MIT" ]
1
2020-08-19T17:25:27.000Z
2020-08-19T17:25:27.000Z
deepchem/metrics/score_function.py
swpper/deepchem
510b9bf1805bc5a472c1a519700e6b128e06c651
[ "MIT" ]
1
2020-09-22T18:42:21.000Z
2020-09-22T18:42:21.000Z
deepchem/metrics/score_function.py
swpper/deepchem
510b9bf1805bc5a472c1a519700e6b128e06c651
[ "MIT" ]
1
2020-10-06T13:31:21.000Z
2020-10-06T13:31:21.000Z
"""Evaluation metrics.""" import numpy as np from sklearn.metrics import matthews_corrcoef # noqa from sklearn.metrics import recall_score # noqa from sklearn.metrics import cohen_kappa_score from sklearn.metrics import r2_score # noqa from sklearn.metrics import mean_squared_error from sklearn.metrics import mean_absolute_error from sklearn.metrics import precision_score # noqa from sklearn.metrics import precision_recall_curve from sklearn.metrics import auc from sklearn.metrics import jaccard_score from sklearn.metrics import f1_score from sklearn.metrics import roc_auc_score # noqa from sklearn.metrics import accuracy_score # noqa from sklearn.metrics import balanced_accuracy_score # noqa from scipy.stats import pearsonr # kappa_score is an alias for `sklearn.metrics.cohen_kappa_score` kappa_score = cohen_kappa_score def pearson_r2_score(y: np.ndarray, y_pred: np.ndarray) -> float: """Computes Pearson R^2 (square of Pearson correlation). Parameters ---------- y: np.ndarray ground truth array y_pred: np.ndarray predicted array Returns ------- float The Pearson-R^2 score. """ return pearsonr(y, y_pred)[0]**2 def jaccard_index(y: np.ndarray, y_pred: np.ndarray) -> float: """Computes Jaccard Index which is the Intersection Over Union metric which is commonly used in image segmentation tasks. DEPRECATED: WILL BE REMOVED IN A FUTURE VERSION OF DEEEPCHEM. USE `jaccard_score` instead. Parameters ---------- y: np.ndarray ground truth array y_pred: np.ndarray predicted array Returns ------- score: float The jaccard index. A number between 0 and 1. """ return jaccard_score(y, y_pred) def pixel_error(y: np.ndarray, y_pred: np.ndarray) -> float: """An error metric in case y, y_pred are images. Defined as 1 - the maximal F-score of pixel similarity, or squared Euclidean distance between the original and the result labels. Parameters ---------- y: np.ndarray ground truth array y_pred: np.ndarray predicted array Returns ------- score: float The pixel-error. A number between 0 and 1. """ return 1 - f1_score(y, y_pred) def prc_auc_score(y: np.ndarray, y_pred: np.ndarray) -> float: """Compute area under precision-recall curve Parameters ---------- y: np.ndarray A numpy array of shape `(N, n_classes)` or `(N,)` with true labels y_pred: np.ndarray Of shape `(N, n_classes)` with class probabilities. Returns ------- float The area under the precision-recall curve. A number between 0 and 1. """ precision, recall, _ = precision_recall_curve(y[:, 1], y_pred[:, 1]) return auc(recall, precision) def rms_score(y_true: np.ndarray, y_pred: np.ndarray) -> float: """Computes RMS error.""" return np.sqrt(mean_squared_error(y_true, y_pred)) def mae_score(y_true: np.ndarray, y_pred: np.ndarray) -> float: """Computes MAE.""" return mean_absolute_error(y_true, y_pred) def bedroc_score(y_true: np.ndarray, y_pred: np.ndarray, alpha: float = 20.0): """Compute BEDROC metric. BEDROC metric implemented according to Truchon and Bayley that modifies the ROC score by allowing for a factor of early recognition. Please confirm details from [1]_. Parameters ---------- y_true: np.ndarray Binary class labels. 1 for positive class, 0 otherwise y_pred: np.ndarray Predicted labels alpha: float, default 20.0 Early recognition parameter Returns ------- float Value in [0, 1] that indicates the degree of early recognition Notes ----- This function requires RDKit to be installed. References ---------- .. [1] Truchon et al. "Evaluating virtual screening methods: good and bad metrics for the early recognition problem." Journal of chemical information and modeling 47.2 (2007): 488-508. """ try: from rdkit.ML.Scoring.Scoring import CalcBEDROC except ModuleNotFoundError: raise ValueError("This function requires RDKit to be installed.") # validation assert len(y_true) == len(y_pred), 'Number of examples do not match' assert np.array_equal( np.unique(y_true).astype(int), [0, 1]), ('Class labels must be binary: %s' % np.unique(y_true)) yt = np.asarray(y_true) yp = np.asarray(y_pred) yt = yt.flatten() yp = yp[:, 1].flatten() # Index 1 because one_hot predictions scores = list(zip(yt, yp)) scores = sorted(scores, key=lambda pair: pair[1], reverse=True) return CalcBEDROC(scores, 0, alpha)
27.278788
92
0.709842
9c2938d99163d6ef8085c36d2b63a4a8fe4a49b8
117,896
py
Python
hvm/chains/base.py
hyperevo/py-helios-node
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
[ "MIT" ]
null
null
null
hvm/chains/base.py
hyperevo/py-helios-node
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
[ "MIT" ]
null
null
null
hvm/chains/base.py
hyperevo/py-helios-node
ff417fe3fe90f85c9f95b3d8a5f0dd4c80532ee8
[ "MIT" ]
null
null
null
from __future__ import absolute_import import operator from collections import deque import functools from abc import ( ABCMeta, abstractmethod ) import rlp_cython as rlp import time import math from uuid import UUID from typing import ( # noqa: F401 Any, Optional, Callable, cast, Dict, Generator, Iterator, Tuple, Type, TYPE_CHECKING, Union, List, Iterable, ) import logging from itertools import groupby from hvm.rlp.receipts import Receipt from hvm.types import Timestamp from eth_typing import ( Address, BlockNumber, Hash32, ) from eth_utils import ( to_tuple, to_set, ) from hvm.db.backends.base import BaseDB from hvm.db.backends.memory import MemoryDB from hvm.db.chain import ( BaseChainDB, ChainDB, ) from hvm.db.journal import ( JournalDB, ) from hvm.db.read_only import ReadOnlyDB from hvm.constants import ( BLOCK_GAS_LIMIT, BLANK_ROOT_HASH, NUMBER_OF_HEAD_HASH_TO_SAVE, TIME_BETWEEN_HEAD_HASH_SAVE, GENESIS_PARENT_HASH, ) from hvm.db.trie import make_trie_root_and_nodes from hvm import constants from hvm.estimators import ( get_gas_estimator, ) from hvm.exceptions import ( HeaderNotFound, TransactionNotFound, ValidationError, VMNotFound, BlockOnWrongChain, CanonicalHeadNotFound, CannotCalculateStake, NotEnoughTimeBetweenBlocks, ReceivableTransactionNotFound, TriedImportingGenesisBlock, JournalDbNotActivated, ReplacingBlocksNotAllowed, UnprocessedBlockNotAllowed, AppendHistoricalRootHashTooOld, HistoricalNetworkTPCMissing, HistoricalMinGasPriceError, UnprocessedBlockChildIsProcessed, ParentNotFound, NoChronologicalBlocks, RewardProofSenderBlockMissing, InvalidHeadRootTimestamp, RewardAmountRoundsToZero, TriedDeletingGenesisBlock, NoGenesisBlockPresent) from eth_keys.exceptions import ( BadSignature, ) from hvm.utils.blocks import reorganize_chronological_block_list_for_correct_chronological_order_at_index from hvm.validation import ( validate_block_number, validate_uint256, validate_word, validate_vm_configuration, validate_canonical_address, validate_is_queue_block, validate_centisecond_timestamp, ) from hvm.rlp.blocks import ( BaseBlock, BaseQueueBlock, ) from hvm.rlp.headers import ( BlockHeader, HeaderParams, ) from hvm.rlp.transactions import ( BaseTransaction, BaseReceiveTransaction ) from hvm.utils.db import ( apply_state_dict, ) from hvm.utils.datatypes import ( Configurable, ) from hvm.utils.headers import ( compute_gas_limit_bounds, ) from hvm.utils.hexadecimal import ( encode_hex, decode_hex ) from hvm.utils.rlp import ( ensure_imported_block_unchanged, ) from hvm.db.chain_head import ChainHeadDB from hvm.db.consensus import ConsensusDB from eth_keys import keys from eth_keys.datatypes import( BaseKey, PublicKey, PrivateKey ) from hvm.utils.numeric import ( effecient_diff, are_items_in_list_equal, ) from sortedcontainers import ( SortedList, SortedDict, ) from hvm.rlp.consensus import NodeStakingScore, PeerNodeHealth from hvm.rlp.accounts import TransactionKey if TYPE_CHECKING: from hvm.vm.base import BaseVM # noqa: F401 from functools import partial import asyncio # Mapping from address to account state. # 'balance', 'nonce' -> int # 'code' -> bytes # 'storage' -> Dict[int, int] AccountState = Dict[Address, Dict[str, Union[int, bytes, Dict[int, int]]]] # # Global Record and discard API # def enable_read_only_db(self) -> None: if not isinstance(self.db, ReadOnlyDB): self.base_db = self.db self.db = ReadOnlyDB(self.base_db) self.reinitialize() # # Helpers # # # Chain API # def get_chain_at_block_parent(self, block: BaseBlock) -> BaseChain: """ Returns a `Chain` instance with the given block's parent at the chain head. """ try: parent_header = self.get_block_header_by_hash(block.header.parent_hash) except HeaderNotFound: raise ValidationError("Parent ({0}) of block {1} not found".format( block.header.parent_hash, block.header.hash )) init_header = self.create_header_from_parent(parent_header) return type(self)(self.chaindb.db, self.wallet_address, self.private_key, init_header) # # VM API # def get_vm(self, header: BlockHeader=None, timestamp: Timestamp = None) -> 'BaseVM': """ Returns the VM instance for the given block timestamp. Or if timestamp is given, gets the vm for that timestamp """ if header is not None and timestamp is not None: raise ValueError("Cannot specify header and timestamp for get_vm(). Only one is allowed.") if header is None or header == self.header: header = self.header if timestamp is not None: header = header.copy(timestamp = timestamp) vm_class = self.get_vm_class_for_block_timestamp(header.timestamp) return vm_class(header=header, chaindb=self.chaindb, network_id=self.network_id) else: vm_class = self.get_vm_class_for_block_timestamp(header.timestamp) return vm_class(header=header, chaindb=self.chaindb, network_id=self.network_id) # # Header API # def create_header_from_parent(self, parent_header, **header_params): """ Passthrough helper to the VM class of the block descending from the given header. """ return self.get_vm_class_for_block_timestamp().create_header_from_parent(parent_header, **header_params) def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeader: """ Returns the requested block header as specified by block hash. Raises BlockNotFound if there's no block header with the given hash in the db. """ validate_word(block_hash, title="Block Hash") return self.chaindb.get_block_header_by_hash(block_hash) def get_canonical_head(self, chain_address = None): """ Returns the block header at the canonical chain head. Raises CanonicalHeadNotFound if there's no head defined for the canonical chain. """ if chain_address is not None: return self.chaindb.get_canonical_head(chain_address) else: return self.chaindb.get_canonical_head(self.wallet_address) # # Block API # def get_block_by_header(self, block_header: BlockHeader) -> BaseBlock: """ Returns the requested block as specified by the block header. """ block_class = self.get_vm_class_for_block_timestamp(block_header.timestamp).get_block_class() send_transactions = self.chaindb.get_block_transactions(block_header, block_class.transaction_class) receive_transactions = self.chaindb.get_block_receive_transactions(block_header,block_class.receive_transaction_class) reward_bundle = self.chaindb.get_reward_bundle(block_header.reward_hash, block_class.reward_bundle_class) output_block = block_class(block_header, send_transactions, receive_transactions, reward_bundle) return output_block def get_block(self) -> BaseBlock: """ Returns the current TIP block. """ return self.get_vm().block def get_queue_block(self) -> BaseBlock: """ Returns the current TIP block. """ return self.get_vm().queue_block # def get_block_by_hash(self, block_hash: Hash32) -> BaseBlock: # """ # Returns the requested block as specified by block hash. # """ # validate_word(block_hash, title="Block Hash") # block_header = self.get_block_header_by_hash(block_hash) # return self.get_block_by_header(block_header) # def get_canonical_block_by_number(self, block_number: BlockNumber) -> BaseBlock: # """ # Returns the block with the given number in the canonical chain. # # Raises BlockNotFound if there's no block with the given number in the # canonical chain. # """ # validate_uint256(block_number, title="Block Number") # return self.get_block_by_hash(self.chaindb.get_canonical_block_hash(block_number)) # # def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32: # """ # Returns the block hash with the given number in the canonical chain. # # Raises BlockNotFound if there's no block with the given number in the # canonical chain. # """ # return self.chaindb.get_canonical_block_hash(block_number) # # Blockchain Database API # # else: # self.chain_head_db.add_block_hash_to_timestamp_without_propogating_to_present(self.wallet_address, block_header.hash, window_for_this_block) # # Queueblock API # def sign_queue_block(self, *args: Any, **kwargs: Any) -> BaseQueueBlock: """ Passthrough helper to the current VM class. """ return self.get_vm().sign_queue_block(*args, **kwargs) def sign_header(self, *args: Any, **kwargs: Any) -> BlockHeader: """ Passthrough helper to the current VM class. """ return self.get_vm().sign_header(*args, **kwargs) # # Transaction API # def get_canonical_transaction(self, transaction_hash: Hash32) -> BaseTransaction: """ Returns the requested transaction as specified by the transaction hash from the canonical chain. Raises TransactionNotFound if no transaction with the specified hash is found in the main chain. """ (block_hash, index, is_receive) = self.chaindb.get_transaction_index(transaction_hash) block_header = self.get_block_header_by_hash(block_hash) VM = self.get_vm_class_for_block_timestamp(block_header.timestamp) if is_receive == False: transaction = self.chaindb.get_transaction_by_index_and_block_hash( block_hash, index, VM.get_transaction_class(), ) else: transaction = self.chaindb.get_receive_transaction_by_index_and_block_hash( block_hash, index, VM.get_receive_transaction_class(), ) if transaction.hash == transaction_hash: return transaction else: raise TransactionNotFound("Found transaction {} instead of {} in block {} at {}".format( encode_hex(transaction.hash), encode_hex(transaction_hash), block_hash, index, )) def create_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction: """ Passthrough helper to the current VM class. """ return self.get_vm().create_transaction(*args, **kwargs) def create_receive_transaction(self, *args: Any, **kwargs: Any) -> BaseReceiveTransaction: """ Passthrough helper to the current VM class. """ return self.get_vm().create_receive_transaction(*args, **kwargs) # # Chronological Chain api # def get_block_hashes_that_are_new_for_this_historical_root_hash_timestamp(self, historical_root_hash_timestamp: Timestamp) -> List[Tuple[Timestamp, Hash32]]: ''' This is a time consuming function that gets all of the blocks that are new in this root hash that didn't exist in the base root hash. :param timestamp: :return: ''' block_window_start = historical_root_hash_timestamp - TIME_BETWEEN_HEAD_HASH_SAVE base_root_hash = self.chain_head_db.get_historical_root_hash(block_window_start) new_root_hash = self.chain_head_db.get_historical_root_hash(historical_root_hash_timestamp) if base_root_hash == new_root_hash: return None if base_root_hash is None or new_root_hash is None: raise InvalidHeadRootTimestamp( "Could not load block hashes for this historical_root_hash_timestamp because we don't have a root hash for this window or the previous window.") base_head_block_hashes = set(self.chain_head_db.get_head_block_hashes(base_root_hash)) new_head_block_hashes = set(self.chain_head_db.get_head_block_hashes(new_root_hash)) diff_head_block_hashes = new_head_block_hashes - base_head_block_hashes chronological_block_hash_timestamps = [] # now we have to run down each chain until we get to a block that is older than block_window_start for head_block_hash in diff_head_block_hashes: header = self.chaindb.get_block_header_by_hash(head_block_hash) chronological_block_hash_timestamps.append([header.timestamp, head_block_hash]) while True: if header.parent_hash == GENESIS_PARENT_HASH: break try: header = self.chaindb.get_block_header_by_hash(header.parent_hash) except HeaderNotFound: break if header.timestamp < block_window_start: break chronological_block_hash_timestamps.append([header.timestamp, header.hash]) assert len(chronological_block_hash_timestamps) > 0 chronological_block_hash_timestamps.sort() return chronological_block_hash_timestamps # def initialize_historical_root_hashes_and_chronological_blocks(self) -> None: # ''' # This function rebuilds all historical root hashes, and chronological blocks, from the blockchain database. It starts with the saved root hash and works backwards. # This function needs to be run from chain because it requires chain_head_db and chaindb. # :return: # ''' # # self.chain_head_db.load_saved_root_hash() # current_window = self.chain_head_db.current_window # earliest_root_hash = self.chain_head_db.earliest_window # #TIME_BETWEEN_HEAD_HASH_SAVE # # # 1) iterate down the root hash times # # 2) create new chain_head_db with memorydb # # 3) go through each chain and any blocks newer than the timestamp, save to chronological window. # # 4) when you reach a block less than the timestamp, set it as chain head in the new memory based chain_head_db # # 5) get the root hash # # 6) set this root hash in the real chain_head_db at the correct timestamp. # # # A chronological block window holds all of the blocks starting at its timestamp, going to timestamp + TIME_BETWEEN_HEAD_HASH_SAVE # # A historical root hash is the root hash at the given timestamp, so it includes all blocks earlier than that timestamp. # # # us a journaldb so that it doesnt write changes to the database. # temp_chain_head_db = self.get_chain_head_db_class()(MemoryDB()) # #temp_chain_head_db = self.get_chain_head_db_class().load_from_saved_root_hash(JournalDB(self.db)) # for current_timestamp in range(current_window, earliest_root_hash-TIME_BETWEEN_HEAD_HASH_SAVE, -TIME_BETWEEN_HEAD_HASH_SAVE): # self.logger.debug("Rebuilding chronological block window {}".format(current_timestamp)) # if current_timestamp < self.genesis_block_timestamp: # break # # if current_timestamp == current_window: # head_block_hashes = self.chain_head_db.get_head_block_hashes_list() # else: # head_block_hashes = temp_chain_head_db.get_head_block_hashes_list() # # # iterate over all chains # for head_block_hash in head_block_hashes: # current_block_hash = head_block_hash # # now iterate over blocks in chain # while True: # current_header = self.chaindb.get_block_header_by_hash(current_block_hash) # if current_header.timestamp >= current_timestamp: # # add it to chronological block window in the real chain head db # self.chain_head_db.add_block_hash_to_chronological_window(current_header.hash, current_header.timestamp) # else: # # The block is older than the timestamp. Set it as the chain head block hash in our temp chain head db # temp_chain_head_db.set_chain_head_hash(current_header.chain_address, current_header.hash) # break # if current_header.parent_hash == GENESIS_PARENT_HASH: # # we reached the end of the chain # temp_chain_head_db.delete_chain_head_hash(current_header.chain_address) # break # # set the current block to the parent so we move down the chain # current_block_hash = current_header.parent_hash # # # Now that we have gone through all chains, and removed any blocks newer than this timestamp, the root hash in the # # temp chain head db is the correct one for this historical root hash timestamp. # self.chain_head_db.save_single_historical_root_hash(temp_chain_head_db.root_hash, Timestamp(current_timestamp)) def initialize_historical_root_hashes_and_chronological_blocks(self) -> None: ''' This function rebuilds all historical root hashes, and chronological blocks, from the blockchain database. It starts with the saved root hash and works backwards. This function needs to be run from chain because it requires chain_head_db and chaindb. :return: ''' self.chain_head_db.load_saved_root_hash() current_window = self.chain_head_db.current_window earliest_root_hash = self.chain_head_db.earliest_window #TIME_BETWEEN_HEAD_HASH_SAVE # the saved # 1) iterate down the root hash times # 2) create new chain_head_db with memorydb # 3) go through each chain and any blocks newer than the timestamp, save to chronological window. # 4) when you reach a block less than the timestamp, set it as chain head in the new memory based chain_head_db # 5) get the root hash # 6) set this root hash in the real chain_head_db at the correct timestamp. # A chronological block window holds all of the blocks starting at its timestamp, going to timestamp + TIME_BETWEEN_HEAD_HASH_SAVE # A historical root hash is the root hash at the given timestamp, so it includes all blocks earlier than that timestamp. self.logger.debug("Rebuilding chronological block windows") # us a journaldb so that it doesnt write changes to the database. temp_chain_head_db = self.get_chain_head_db_class()(MemoryDB()) #temp_chain_head_db = self.get_chain_head_db_class().load_from_saved_root_hash(JournalDB(self.db)) for current_timestamp in range(current_window, earliest_root_hash-TIME_BETWEEN_HEAD_HASH_SAVE, -TIME_BETWEEN_HEAD_HASH_SAVE): if current_timestamp < self.genesis_block_timestamp: break head_block_hashes = self.chain_head_db.get_head_block_hashes_list() # iterate over all chains for head_block_hash in head_block_hashes: current_block_hash = head_block_hash # now iterate over blocks in chain while True: current_header = self.chaindb.get_block_header_by_hash(current_block_hash) if current_header.timestamp >= current_timestamp: # add it to chronological block window in the real chain head db self.chain_head_db.add_block_hash_to_chronological_window(current_header.hash, current_header.timestamp) else: # The block is older than the timestamp. Set it as the chain head block hash in our temp chain head db self.chain_head_db.set_chain_head_hash(current_header.chain_address, current_header.hash) break if current_header.parent_hash == GENESIS_PARENT_HASH: # we reached the end of the chain self.chain_head_db.delete_chain_head_hash(current_header.chain_address) break # set the current block to the parent so we move down the chain current_block_hash = current_header.parent_hash # Now that we have gone through all chains, and removed any blocks newer than this timestamp, the root hash in the # temp chain head db is the correct one for this historical root hash timestamp. self.chain_head_db.save_single_historical_root_hash(self.chain_head_db.root_hash, Timestamp(current_timestamp)) self.chain_head_db.persist() # finally, lets load the saved root hash again so we are up to date. self.chain_head_db.load_saved_root_hash() # # Execution API # def estimate_gas(self, transaction: BaseTransaction, at_header: BlockHeader=None) -> int: """ Returns an estimation of the amount of gas the given transaction will use if executed on top of the block specified by the given header. """ if at_header is None: at_header = self.get_canonical_head() with self.get_vm(at_header).state_in_temp_block() as state: return self.gas_estimator(state, transaction) # # Reverting block functions # def purge_unprocessed_block(self, block_hash, purge_children_too = True): ''' Deletes all unprocessed block lookups, and unprocessed children lookups for this block and all children blocks. Todo: delete saved block header, and saved transaction tries for each block as well ''' self.logger.debug("purging unprocessed block") if purge_children_too: self.logger.debug("purging unprocessed children") if self.chaindb.has_unprocessed_children(block_hash): self.logger.debug("HAS UNPROCESSED CHILDREN BLOCKS") children_block_hashes = self.chaindb.get_block_children(block_hash) if children_block_hashes != None: for child_block_hash in children_block_hashes: #this includes the child in this actual chain as well as children from send transactions. if not self.chaindb.is_block_unprocessed(child_block_hash): raise UnprocessedBlockChildIsProcessed("In process of deleting children of unprocessed block, and found one that is processed. This should never happen") else: self.purge_unprocessed_block(child_block_hash) try: block = self.get_block_by_hash(block_hash) chain = encode_hex(block.header.chain_address) self.logger.debug("deleting unprocessed child block number {} on chain {}".format(block.number, chain)) self.chaindb.remove_block_from_unprocessed(block) except HeaderNotFound: pass from hvm.utils.profile import profile def _import_block(self, block: BaseBlock, perform_validation: bool=True, save_block_head_hash_timestamp = True, allow_unprocessed = True, ensure_block_unchanged: bool = True, microblock_origin: bool = False) -> BaseBlock: """ Imports a complete block. """ self.logger.debug("importing block {} with number {}".format(block.__repr__(), block.number)) self.validate_time_from_genesis_block(block) if isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_queue_block_class()): # If it was a queueblock, then the header will have changed after importing perform_validation = False ensure_block_unchanged = False queue_block = True else: queue_block = False if not self.chaindb.is_block_unprocessed(block.header.parent_hash): #this part checks to make sure the parent exists try: vm = self.get_vm(timestamp = block.header.timestamp) self.logger.debug("importing block with vm {}".format(vm.__repr__())) if queue_block: imported_block = vm.import_block(block, private_key = self.private_key) else: imported_block = vm.import_block(block) # Validate the imported block. if ensure_block_unchanged: if microblock_origin: # this started out as a microblock. So we only ensure the microblock fields are unchanged. self.logger.debug('ensuring block unchanged. microblock correction') corrected_micro_block = block.copy(header = block.header.copy( receipt_root = imported_block.header.receipt_root, bloom = imported_block.header.bloom, gas_limit = imported_block.header.gas_limit, gas_used = imported_block.header.gas_used, account_hash = imported_block.header.account_hash, account_balance = imported_block.header.account_balance, )) ensure_imported_block_unchanged(imported_block, corrected_micro_block) else: self.logger.debug('ensuring block unchanged') ensure_imported_block_unchanged(imported_block, block) else: self.logger.debug('Not checking block for changes.') if perform_validation: self.validate_block(imported_block) #self.chain_head_db.set_chain_head_hash(self.wallet_address, imported_block.header.hash) if save_block_head_hash_timestamp: self.chain_head_db.add_block_hash_to_chronological_window(imported_block.header.hash, imported_block.header.timestamp) self.save_chain_head_hash_to_trie_for_time_period(imported_block.header) self.chain_head_db.set_chain_head_hash(imported_block.header.chain_address, imported_block.header.hash) self.chain_head_db.persist(True) self.chaindb.persist_block(imported_block) vm.state.account_db.persist(save_account_hash = True, wallet_address = self.wallet_address) #here we must delete the unprocessed lookup before importing children #because the children cannot be imported if their chain parent is unprocessed. #but we cannot delete the lookup for unprocessed children yet. self.chaindb.remove_block_from_unprocessed(imported_block) # Add chronological consistency lookups self.save_block_chronological_consistency_lookups(imported_block) try: self.header = self.create_header_from_parent(self.get_canonical_head()) except CanonicalHeadNotFound: self.header = self.get_vm_class_for_block_timestamp().create_genesis_block(self.wallet_address).header self.queue_block = None self.logger.debug( 'IMPORTED_BLOCK: number %s | hash %s', imported_block.number, encode_hex(imported_block.hash), ) # Make sure our wallet address hasn't magically changed if self.wallet_address != imported_block.header.chain_address: raise ValidationError("Attempted to import a block onto the wrong chain.") return_block = imported_block except ReceivableTransactionNotFound as e: if not allow_unprocessed: raise UnprocessedBlockNotAllowed() self.logger.debug("Saving block as unprocessed because of ReceivableTransactionNotFound error: {}".format(e)) return_block = self.save_block_as_unprocessed(block) if self.raise_errors: raise e except RewardProofSenderBlockMissing as e: if not allow_unprocessed: raise UnprocessedBlockNotAllowed() self.logger.debug("Saving block as unprocessed because of RewardProofSenderBlockMissing error: {}".format(e)) return_block = self.save_block_as_unprocessed(block) else: if not allow_unprocessed: raise UnprocessedBlockNotAllowed() self.logger.debug("Saving block as unprocessed because parent on this chain is unprocessed") return_block = self.save_block_as_unprocessed(block) return return_block def save_block_chronological_consistency_lookups(self, block: BaseBlock) -> None: ''' We need to require that the proof sender chain doesn't add a block after their claimed chain_head_hash, and the timestamp of this block being imported. :param block: :return: ''' block_header = block.header reward_bundle = self.chaindb.get_reward_bundle(block_header.reward_hash, block.reward_bundle_class) chronological_consistency_key = [block_header.timestamp, block_header.hash] for proof in reward_bundle.reward_type_2.proof: # timestamp, block hash of block responsible sender_chain_header = self.chaindb.get_block_header_by_hash(proof.head_hash_of_sender_chain) # The chronological consistency restrictions are placed on the block on top of the one giving the proof. block_number_with_restrictions = sender_chain_header.block_number + 1 self.logger.debug("saving chronological consistency lookup for chain {}, block {}, timestamp {}".format(encode_hex(sender_chain_header.chain_address), block_number_with_restrictions, block_header.timestamp)) self.chaindb.add_block_consistency_key(sender_chain_header.chain_address, block_number_with_restrictions, chronological_consistency_key) # # Chronologically consistent blockchain db API # def check_block_chronological_consistency(self, block: BaseBlock) -> List[Hash32]: ''' Checks to see if the block breaks any chronological consistency. If it does, it will return a list of blocks that need to be reverted for this block to be imported returns list of block hashes that have to be reverted :param block: :return: ''' consistency_keys = self.chaindb.get_block_chronological_consistency_keys(block.header.chain_address, block.header.block_number) block_hashes_to_revert = list() for consistency_key in consistency_keys: if consistency_key[0] > block.header.timestamp: block_hashes_to_revert.append(consistency_key[1]) return block_hashes_to_revert # # Validation API # def validate_block(self, block: BaseBlock) -> None: """ Performs validation on a block that is either being mined or imported. Since block validation (specifically the uncle validation must have access to the ancestor blocks, this validation must occur at the Chain level. """ self.validate_gaslimit(block.header) def validate_gaslimit(self, header: BlockHeader) -> None: """ Validate the gas limit on the given header. """ #parent_header = self.get_block_header_by_hash(header.parent_hash) #low_bound, high_bound = compute_gas_limit_bounds(parent_header) #if header.gas_limit < low_bound: # raise ValidationError( # "The gas limit on block {0} is too low: {1}. It must be at least {2}".format( # encode_hex(header.hash), header.gas_limit, low_bound)) if header.gas_limit > BLOCK_GAS_LIMIT: raise ValidationError( "The gas limit on block {0} is too high: {1}. It must be at most {2}".format( encode_hex(header.hash), header.gas_limit, BLOCK_GAS_LIMIT)) def validate_block_specification(self, block) -> bool: ''' This validates everything we can without looking at the blockchain database. It doesnt need to assume that we have the block that sent the transactions. This that this can check: block signature send transaction signatures receive transaction signatures - dont need to check this. it doesnt add any security signatures of send transaction within receive transactions send transaction root matches transactions receive transaction root matches transactions ''' if not isinstance(block, self.get_vm(timestamp = block.header.timestamp).get_block_class()): self.logger.debug("converting block to correct class") block = self.get_vm(timestamp = block.header.timestamp).convert_block_to_correct_class(block) block.header.check_signature_validity() for transaction in block.transactions: transaction.validate() for transaction in block.receive_transactions: transaction.validate() send_tx_root_hash, _ = make_trie_root_and_nodes(block.transactions) if block.header.transaction_root != send_tx_root_hash: raise ValidationError("Block has invalid transaction root") receive_tx_root_hash, _ = make_trie_root_and_nodes(block.receive_transactions) if block.header.receive_transaction_root != receive_tx_root_hash: raise ValidationError("Block has invalid receive transaction root") return True # # Stake API # # gets the stake for the timestamp corresponding to teh chronological block window, so it is all blocks for the next 1000 seconds. def get_new_block_hash_to_test_peer_node_health(self) -> Hash32: ''' returns one of the newest blocks we have seen. :return: ''' before_this_timestamp = int(time.time()) - 60 # ask the peer for a block that was received at before 1 minute ago current_historical_window = int(time.time() / TIME_BETWEEN_HEAD_HASH_SAVE) * TIME_BETWEEN_HEAD_HASH_SAVE for timestamp in range(current_historical_window, current_historical_window-NUMBER_OF_HEAD_HASH_TO_SAVE*TIME_BETWEEN_HEAD_HASH_SAVE, -1* TIME_BETWEEN_HEAD_HASH_SAVE): chronological_window = self.chain_head_db.load_chronological_block_window(timestamp) if chronological_window is not None: chronological_window.sort(key=lambda x: -1*x[0]) for timestamp_hash in chronological_window: if timestamp_hash[0] < before_this_timestamp: return timestamp_hash[1] #if we get to here then we don't have any blocks within all chronological block windows... raise NoChronologicalBlocks() # # Min Block Gas API used for throttling the network # def re_initialize_historical_minimum_gas_price_at_genesis(self) -> None: ''' re-initializes system with last set min gas price and net tpc cap ''' hist_min_gas_price = self.chaindb.load_historical_minimum_gas_price() hist_tpc_cap = self.chaindb.load_historical_network_tpc_capability() hist_tx_per_centisecond = self.chaindb.load_historical_tx_per_centisecond() if hist_min_gas_price is not None: init_min_gas_price = hist_min_gas_price[-1][1] else: init_min_gas_price = 1 if hist_tpc_cap is not None: init_tpc_cap = hist_tpc_cap[-1][1] else: init_tpc_cap = self.get_local_tpc_cap() if hist_tx_per_centisecond is not None: init_tpc = hist_tx_per_centisecond[-1][1] else: init_tpc = None self.chaindb.initialize_historical_minimum_gas_price_at_genesis(init_min_gas_price, init_tpc_cap, init_tpc) def _update_tpc_from_chronological(self, new_hist_tpc_dict): ''' returns True if they are all the same as what we already had in the database, otherwise it returns False ''' if not isinstance(new_hist_tpc_dict, dict): raise ValidationError("Expected a dict. Didn't get a dict.") hist_tpc = self.chaindb.load_historical_tx_per_centisecond() difference_found = False if hist_tpc is None: hist_tpc = list(new_hist_tpc_dict.items()) else: hist_tpc_dict = dict(hist_tpc) for timestamp, tpc in new_hist_tpc_dict.items(): if timestamp not in hist_tpc_dict or hist_tpc_dict[timestamp] != tpc: #if tpc != 0: difference_found = True hist_tpc_dict[timestamp] = tpc hist_tpc = list(hist_tpc_dict.items()) #print(hist_tpc) #save it to db self.chaindb.save_historical_tx_per_centisecond(hist_tpc, de_sparse = False) return not difference_found # # Consensus DB passthrough's that depend on block timestamp #
46.989239
285
0.672601
9c29bc02bf13f97d4663f0060faece281922045c
3,113
py
Python
integreat_cms/api/v3/regions.py
Integreat/cms-django
ab0a89576ae901f4b30aa8e9c65ff43c44654a80
[ "Apache-2.0" ]
21
2018-10-26T20:10:45.000Z
2020-10-22T09:41:46.000Z
integreat_cms/api/v3/regions.py
Integreat/cms-django
ab0a89576ae901f4b30aa8e9c65ff43c44654a80
[ "Apache-2.0" ]
392
2018-10-25T08:34:07.000Z
2020-11-19T08:20:30.000Z
integreat_cms/api/v3/regions.py
digitalfabrik/integreat-cms
ab0a89576ae901f4b30aa8e9c65ff43c44654a80
[ "Apache-2.0" ]
23
2019-03-06T17:11:35.000Z
2020-10-16T04:36:41.000Z
""" This module includes functions related to the regions API endpoint. """ from django.http import JsonResponse from ...cms.models import Region from ...cms.constants import region_status from ..decorators import json_response def transform_region(region): """ Function to create a JSON from a single region object, including information if region is live/active. :param region: The region object which should be converted :type region: ~integreat_cms.cms.models.regions.region.Region :return: data necessary for API :rtype: dict """ return { "id": region.id, "name": region.full_name, "path": region.slug, "live": region.status == region_status.ACTIVE, "prefix": region.prefix, "name_without_prefix": region.name, "plz": region.postal_code, "extras": region.offers.exists(), "events": region.events_enabled, "pois": region.locations_enabled, "push_notifications": region.push_notifications_enabled, "longitude": region.longitude, "latitude": region.latitude, "bounding_box": region.bounding_box.api_representation, "aliases": region.aliases, "tunews": region.tunews_enabled, } def transform_region_by_status(region): """ Function to create a JSON from a single "active" region object. :param region: The region object which should be converted :type region: ~integreat_cms.cms.models.regions.region.Region :return: data necessary for API :rtype: dict """ result = transform_region(region) # Remove status del result["live"] return result
28.559633
106
0.673305
9c2a4913dd37bcfdaee2efb5a4e62c145d6170b0
10,964
py
Python
cli/src/ansible/AnsibleVarsGenerator.py
romsok24/epiphany
f058984939561fc8d51288765976118ae12e6c32
[ "Apache-2.0" ]
null
null
null
cli/src/ansible/AnsibleVarsGenerator.py
romsok24/epiphany
f058984939561fc8d51288765976118ae12e6c32
[ "Apache-2.0" ]
null
null
null
cli/src/ansible/AnsibleVarsGenerator.py
romsok24/epiphany
f058984939561fc8d51288765976118ae12e6c32
[ "Apache-2.0" ]
null
null
null
import copy import os from cli.src.Config import Config from cli.src.helpers.build_io import (get_ansible_path, get_ansible_path_for_build, get_ansible_vault_path) from cli.src.helpers.data_loader import (load_all_schema_objs_from_directory, load_schema_obj, types) from cli.src.helpers.doc_list_helpers import (ExpectedSingleResultException, select_first, select_single) from cli.src.helpers.naming_helpers import to_feature_name, to_role_name from cli.src.helpers.ObjDict import ObjDict from cli.src.helpers.yaml_helpers import dump from cli.src.schema.DefaultMerger import DefaultMerger from cli.src.Step import Step from cli.version import VERSION
46.854701
124
0.671105
9c2b5cff1157d02c2e830e9f89af05add8167ae9
247,996
py
Python
Python/4 kyu/Snail/test_snail.py
newtonsspawn/codewars_challenges
62b20d4e729c8ba79eac7cae6a179af57abd45d4
[ "MIT" ]
3
2020-05-29T23:29:35.000Z
2021-08-12T03:16:44.000Z
Python/4 kyu/Snail/test_snail.py
newtonsspawn/codewars_challenges
62b20d4e729c8ba79eac7cae6a179af57abd45d4
[ "MIT" ]
null
null
null
Python/4 kyu/Snail/test_snail.py
newtonsspawn/codewars_challenges
62b20d4e729c8ba79eac7cae6a179af57abd45d4
[ "MIT" ]
3
2020-05-22T12:14:55.000Z
2021-04-15T12:52:42.000Z
from unittest import TestCase from snail import snail
69.447214
80
0.361711
9c2c850b8212d47e83a1fb645622cfcbef2e844f
7,385
py
Python
python/tink/jwt/_raw_jwt.py
cuonglm/tink
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
[ "Apache-2.0" ]
null
null
null
python/tink/jwt/_raw_jwt.py
cuonglm/tink
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
[ "Apache-2.0" ]
null
null
null
python/tink/jwt/_raw_jwt.py
cuonglm/tink
df5fa42e45b4d43aac6c3506ceba2956b79a62b8
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and """The raw JSON Web Token (JWT).""" import copy import datetime import json from typing import cast, Mapping, Set, List, Dict, Optional, Text, Union, Any from tink import core from tink.jwt import _jwt_error from tink.jwt import _jwt_format _REGISTERED_NAMES = frozenset({'iss', 'sub', 'jti', 'aud', 'exp', 'nbf', 'iat'}) _MAX_TIMESTAMP_VALUE = 253402300799 # 31 Dec 9999, 23:59:59 GMT Claim = Union[None, bool, int, float, Text, List[Any], Dict[Text, Any]]
34.189815
80
0.677319
9c2cb9849ca550c888fd888e8fc11648dd0f1d72
2,501
py
Python
plenum/test/view_change/test_no_instance_change_before_node_is_ready.py
evernym/indy-plenum
dc390caa16c0b15dcc549d557ede6f64c0c1b842
[ "Apache-2.0" ]
null
null
null
plenum/test/view_change/test_no_instance_change_before_node_is_ready.py
evernym/indy-plenum
dc390caa16c0b15dcc549d557ede6f64c0c1b842
[ "Apache-2.0" ]
null
null
null
plenum/test/view_change/test_no_instance_change_before_node_is_ready.py
evernym/indy-plenum
dc390caa16c0b15dcc549d557ede6f64c0c1b842
[ "Apache-2.0" ]
2
2017-12-13T21:14:54.000Z
2021-06-06T15:48:03.000Z
import pytest from plenum.server.view_change.view_changer import ViewChanger from stp_core.common.log import getlogger from plenum.test.pool_transactions.helper import start_not_added_node, add_started_node logger = getlogger() def test_no_instance_change_on_primary_disconnection_for_not_ready_node( looper, txnPoolNodeSet, tdir, tconf, allPluginsPath, sdk_pool_handle, sdk_wallet_steward): """ Test steps: 1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready. 2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary) 3. make sure no InstanceChange sent by the new node 4. add the node to the pool (send NODE txn) and make sure that the node is ready now. 5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary) 6. make sure no InstanceChange sent by the new node """ # 1. create a new node, but don't add it to the pool (so not send NODE txn), so that the node is not ready. sigseed, bls_key, new_node, node_ha, client_ha = \ start_not_added_node(looper, tdir, tconf, allPluginsPath, "TestTheta") # 2. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary) looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2) # 3. make sure no InstanceChange sent by the new node assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__) logger.info("Start added node {}".format(new_node)) # 4. add the node to the pool (send NODE txn) and make sure that the node is ready now. add_started_node(looper, new_node, node_ha, client_ha, txnPoolNodeSet, sdk_pool_handle, sdk_wallet_steward, bls_key) # 5. wait for more than VIEW_CHANGE_TIMEOUT (a timeout for initial check for disconnected primary) looper.runFor(tconf.VIEW_CHANGE_TIMEOUT + 2) # 6. make sure no InstanceChange sent by the new node assert 0 == new_node.view_changer.spylog.count(ViewChanger.sendInstanceChange.__name__)
41
111
0.692123
9c2cc3afd9b6c1b92e54b37f0abed285dd347905
288
py
Python
src/cache/requests_cache_abstract.py
tomaszkingukrol/rest-api-cache-proxy
50738f168f36d285b9a924d9f9d106a65b5617c8
[ "Apache-2.0" ]
null
null
null
src/cache/requests_cache_abstract.py
tomaszkingukrol/rest-api-cache-proxy
50738f168f36d285b9a924d9f9d106a65b5617c8
[ "Apache-2.0" ]
null
null
null
src/cache/requests_cache_abstract.py
tomaszkingukrol/rest-api-cache-proxy
50738f168f36d285b9a924d9f9d106a65b5617c8
[ "Apache-2.0" ]
null
null
null
from abc import ABC, abstractclassmethod from model.response import ResponseModel
22.153846
67
0.743056
9c2cdfaea02de247b5a0a427743330312fb34eb8
16,904
py
Python
dialogue-engine/test/programytest/config/file/test_json.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
104
2020-03-30T09:40:00.000Z
2022-03-06T22:34:25.000Z
dialogue-engine/test/programytest/config/file/test_json.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
25
2020-06-12T01:36:35.000Z
2022-02-19T07:30:44.000Z
dialogue-engine/test/programytest/config/file/test_json.py
cotobadesign/cotoba-agent-oss
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
[ "MIT" ]
10
2020-04-02T23:43:56.000Z
2021-05-14T13:47:01.000Z
""" Copyright (c) 2020 COTOBA DESIGN, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import os from programy.config.file.json_file import JSONConfigurationFile from programy.clients.events.console.config import ConsoleConfiguration from programy.utils.substitutions.substitues import Substitutions from programytest.config.file.base_file_tests import ConfigurationBaseFileTests
31.245841
129
0.562885
9c2d9e0a79b8d15e42eda3577f2435526ea67e86
1,688
py
Python
searching/jump_search.py
magnusrodseth/data-structures-and-algorithms
45dfdc0859683d5c76b82b87f415e2c0cdbc15e8
[ "MIT" ]
null
null
null
searching/jump_search.py
magnusrodseth/data-structures-and-algorithms
45dfdc0859683d5c76b82b87f415e2c0cdbc15e8
[ "MIT" ]
null
null
null
searching/jump_search.py
magnusrodseth/data-structures-and-algorithms
45dfdc0859683d5c76b82b87f415e2c0cdbc15e8
[ "MIT" ]
null
null
null
import math from typing import List def jump_search(array: List[int], value: int) -> int: """ Performs a jump search on a list of integers. :param array: is the array to search. :param value: is the value to search. :return: the index of the value, or -1 if it doesn't exist.' """ if len(array) == 0: return -1 block_size = get_block_size(array) # Pointers for traversing the array start_pointer = 0 next_pointer = block_size while (start_pointer < len(array)) and (array[next_pointer - 1] < value): start_pointer = next_pointer next_pointer += block_size # Prevent next from going out of bounds if next_pointer > len(array): next_pointer = len(array) # Linear search through the relevant block for i in range(start_pointer, next_pointer): if array[i] == value: return i return -1 def get_block_size(array: List[int]) -> int: """ Gets the block size of an array for jump search. The block size is the square root of the length of the array. We then calculate the absolute value of this block size, because we're using the value as index pointer, and negative values do not make sense here. This value is then floored to act as index pointer in the array. :param array: is the array to search. :return: the block size to be used in jump search. """ return math.floor(abs(math.sqrt(len(array)))) if __name__ == '__main__': # Array must be sorted in order for binary search to work array = [3, 5, 6, 9, 11, 18, 20, 21, 24, 30] print(array) index = jump_search(array, 31) print(index)
28.610169
93
0.650474
9c2e7f928242ca7ff6d49135cef52a68297eec05
3,484
py
Python
pincer/objects/message/sticker.py
mjneff2/Pincer
a11bc3e4bad319fdf927d913c58c933576ec7c99
[ "MIT" ]
null
null
null
pincer/objects/message/sticker.py
mjneff2/Pincer
a11bc3e4bad319fdf927d913c58c933576ec7c99
[ "MIT" ]
null
null
null
pincer/objects/message/sticker.py
mjneff2/Pincer
a11bc3e4bad319fdf927d913c58c933576ec7c99
[ "MIT" ]
null
null
null
# Copyright Pincer 2021-Present # Full MIT License can be found in `LICENSE` at the project root. from __future__ import annotations from dataclasses import dataclass from enum import IntEnum from typing import List, Optional, TYPE_CHECKING from ...utils.api_object import APIObject from ...utils.types import MISSING if TYPE_CHECKING: from ..user import User from ...utils import APINullable, Snowflake
21.506173
72
0.661022
9c2f576718895138632a90d08831ce15358276a1
147
py
Python
app/core/utils.py
yayunl/llfselfhelp
55994003ad51e1664a733c84ed6afcb9f28b6499
[ "MIT" ]
null
null
null
app/core/utils.py
yayunl/llfselfhelp
55994003ad51e1664a733c84ed6afcb9f28b6499
[ "MIT" ]
4
2021-04-08T21:37:08.000Z
2021-09-22T19:31:17.000Z
app/core/utils.py
yayunl/llfselfhelp
55994003ad51e1664a733c84ed6afcb9f28b6499
[ "MIT" ]
null
null
null
from django.views.generic import \ UpdateView as BaseUpdateView
18.375
41
0.782313
9c2faa49ef48fc93a9aff0f5610c889ba1ee0f3a
3,219
py
Python
demo/test_bug_3d.py
zhanwj/multi-task-pytorch
7d57645ec8be0ca0c258cfa99fb788e3cd37f106
[ "MIT" ]
2
2019-06-11T16:16:11.000Z
2020-07-21T10:34:40.000Z
demo/test_bug_3d.py
zhanwj/multi-task-pytorch
7d57645ec8be0ca0c258cfa99fb788e3cd37f106
[ "MIT" ]
null
null
null
demo/test_bug_3d.py
zhanwj/multi-task-pytorch
7d57645ec8be0ca0c258cfa99fb788e3cd37f106
[ "MIT" ]
2
2019-05-21T11:07:29.000Z
2019-06-11T16:17:02.000Z
import torch import lib.modeling.resnet as resnet import lib.modeling.semseg_heads as snet import torch.nn as nn import torch.optim as optim import utils.resnet_weights_helper as resnet_utils from torch.autograd import Variable from roi_data.loader import RoiDataLoader, MinibatchSampler, collate_minibatch, collate_minibatch_semseg from datasets.roidb import combined_roidb_for_training, combined_roidb_for_training_semseg import os import numpy as np import nn as mynn import cv2 from modeling.model_builder_3DSD import Generalized_3DSD from modeling.model_builder_PSP3D import DispSeg from core.config import cfg, cfg_from_file, cfg_from_list, assert_and_infer_cfg #load net cfg_file = 'e2e_segdisp-R-50_3Dpool_1x.yaml' cfg_from_file(cfg_file) print (cfg.SEM) print (cfg.DISP) #cfg_from_list(cfg_file) #assert_and_infer_cfg() devices_ids=[5] os.environ["CUDA_VISIBLE_DEVICES"] = ','.join([str(ids) for ids in devices_ids]) torch.backends.cudnn.benchmark=True #torch.cuda.set_device(3) len_gpus = len(devices_ids) batch_size = 2 * len_gpus #net = mynn.DataParallel(load_net().to('cuda'), minibatch=True) net = mynn.DataParallel(DispSeg().to('cuda'), minibatch=True) optimizer = optim.SGD(net.parameters(), lr=0.000875, momentum=0.9) criterion = nn.NLLLoss(ignore_index=255) #dataloader= dataloader(batch_size, len_gpus) for i in range(10): #for i, inputs in zip(range(1000), dataloader): inputs = dataloader(batch_size, len_gpus) for key in inputs: inputs[key] = torch.chunk(inputs[key], chunks=len_gpus, dim=0) optimizer.zero_grad() loss=net(**inputs) optimizer.step() for k in loss['losses'].keys(): print (loss['losses'][k].item())
38.321429
113
0.695247
9c2fcf2ba9545bbf4f412026ea905a2899fef624
2,511
py
Python
regenesis/modelgen.py
crijke/regenesis
e53a0c6302aa458ff9ae95f573d5594351e5434c
[ "MIT" ]
16
2015-04-09T14:40:53.000Z
2021-07-13T15:03:35.000Z
regenesis/modelgen.py
crijke/regenesis
e53a0c6302aa458ff9ae95f573d5594351e5434c
[ "MIT" ]
1
2018-06-25T07:51:18.000Z
2018-06-25T07:51:18.000Z
regenesis/modelgen.py
crijke/regenesis
e53a0c6302aa458ff9ae95f573d5594351e5434c
[ "MIT" ]
3
2015-12-20T18:24:21.000Z
2018-06-24T16:57:25.000Z
import json from regenesis.queries import get_cubes, get_all_dimensions, get_dimensions from pprint import pprint if __name__ == '__main__': with open('model.json', 'wb') as fh: model = generate_model() json.dump(model, fh, indent=2)
30.253012
78
0.502589
9c304e47e988cc3ac6451c94e5e66110773b8469
2,909
py
Python
tests/components/evil_genius_labs/test_light.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
tests/components/evil_genius_labs/test_light.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
24,710
2016-04-13T08:27:26.000Z
2020-03-02T12:59:13.000Z
tests/components/evil_genius_labs/test_light.py
liangleslie/core
cc807b4d597daaaadc92df4a93c6e30da4f570c6
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""Test Evil Genius Labs light.""" from unittest.mock import patch import pytest from homeassistant.components.light import ( ATTR_COLOR_MODE, ATTR_SUPPORTED_COLOR_MODES, ColorMode, LightEntityFeature, ) from homeassistant.const import ATTR_SUPPORTED_FEATURES
33.056818
86
0.645583
9c30953c84b77a7d66d7d91568a3c0c17191380f
4,410
py
Python
python_on_whales/download_binaries.py
joshbode/python-on-whales
4d5b8b4c5c6dc3ac0af5713e4fe5a72788f44cda
[ "MIT" ]
null
null
null
python_on_whales/download_binaries.py
joshbode/python-on-whales
4d5b8b4c5c6dc3ac0af5713e4fe5a72788f44cda
[ "MIT" ]
null
null
null
python_on_whales/download_binaries.py
joshbode/python-on-whales
4d5b8b4c5c6dc3ac0af5713e4fe5a72788f44cda
[ "MIT" ]
null
null
null
import platform import shutil import tempfile import warnings from pathlib import Path import requests from tqdm import tqdm DOCKER_VERSION = "20.10.5" BUILDX_VERSION = "0.5.1" CACHE_DIR = Path.home() / ".cache" / "python-on-whales" TEMPLATE_CLI = ( "https://download.docker.com/{os}/static/stable/{arch}/docker-{version}.tgz" ) WINDOWS_CLI_URL = "https://github.com/StefanScherer/docker-cli-builder/releases/download/{version}/docker.exe"
34.186047
110
0.664172
9c336d5aaf0a0461822389d24ee86c2449c67183
4,415
py
Python
reinvent-2019/connected-photo-booth/lambda_code/Cerebro_GetQRCode.py
chriscoombs/aws-builders-fair-projects
eee405931030b833fa8c51e906c73d09ce051bcd
[ "Apache-2.0" ]
null
null
null
reinvent-2019/connected-photo-booth/lambda_code/Cerebro_GetQRCode.py
chriscoombs/aws-builders-fair-projects
eee405931030b833fa8c51e906c73d09ce051bcd
[ "Apache-2.0" ]
null
null
null
reinvent-2019/connected-photo-booth/lambda_code/Cerebro_GetQRCode.py
chriscoombs/aws-builders-fair-projects
eee405931030b833fa8c51e906c73d09ce051bcd
[ "Apache-2.0" ]
null
null
null
import boto3 import json import os import logging from contextlib import closing from boto3.dynamodb.conditions import Key, Attr from botocore.exceptions import ClientError from random import shuffle import time import pyqrcode import png __BUCKET_NAME__ = "project-cerebro" dynamo = boto3.client('dynamodb') logger = None print("In initialize fn ...") logger = logging.getLogger() if int(os.environ['DEBUG_MODE']): logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) logger.info("Initialize: Just a test") logger.debug("Initialize: debug a test") def create_presigned_url(bucket_name, object_name, expiration=3600): """Generate a presigned URL to share an S3 object :param bucket_name: string :param object_name: string :param expiration: Time in seconds for the presigned URL to remain valid :return: Presigned URL as string. If error, returns None. """ # Generate a presigned URL for the S3 object s3_client = boto3.client('s3') try: response = s3_client.generate_presigned_url('get_object', Params={'Bucket': bucket_name, 'Key': object_name}, ExpiresIn=expiration) except ClientError as e: logging.error(e) return None # The response contains the presigned URL return response # input parameters are: # 1. image ID # output parameters are: # 1. generated QRCode # workflow: # 1. first get the image_id # 2. confirm this exists in s3 # 3. generate a presigned URL with this s3 path # 4. create a QR Code image with this url embedded # 5. return the QR code stored in S3 temp.
30.034014
115
0.622877
9c34eed08700cdd8cf4f29ef427c58eea1da7c35
4,241
py
Python
dependencies/svgwrite/tests/test_drawing.py
charlesmchen/typefacet
8c6db26d0c599ece16f3704696811275120a4044
[ "Apache-2.0" ]
21
2015-01-16T05:10:02.000Z
2021-06-11T20:48:15.000Z
dependencies/svgwrite/tests/test_drawing.py
charlesmchen/typefacet
8c6db26d0c599ece16f3704696811275120a4044
[ "Apache-2.0" ]
1
2019-09-09T12:10:27.000Z
2020-05-22T10:12:14.000Z
dependencies/svgwrite/tests/test_drawing.py
charlesmchen/typefacet
8c6db26d0c599ece16f3704696811275120a4044
[ "Apache-2.0" ]
2
2015-05-03T04:51:08.000Z
2018-08-24T08:28:53.000Z
#!/usr/bin/env python #coding:utf-8 # Author: mozman --<[email protected]> # Purpose: test drawing module # Created: 11.09.2010 # Copyright (C) 2010, Manfred Moitzi # License: GPLv3 from __future__ import unicode_literals import os import unittest from io import StringIO from svgwrite.drawing import Drawing from svgwrite.container import Group if __name__=='__main__': unittest.main()
38.908257
110
0.557887
9c36070009525ecb4d0b9ecb8aa020fd7b1f9bca
1,480
py
Python
src/cms/views/error_handler/error_handler.py
digitalfabrik/coldaid-backend
b769510570d5921e30876565263813c0362994e2
[ "Apache-2.0" ]
4
2019-12-05T16:45:17.000Z
2020-05-09T07:26:34.000Z
src/cms/views/error_handler/error_handler.py
digitalfabrik/coldaid-backend
b769510570d5921e30876565263813c0362994e2
[ "Apache-2.0" ]
56
2019-12-05T12:31:37.000Z
2021-01-07T15:47:45.000Z
src/cms/views/error_handler/error_handler.py
digitalfabrik/coldaid-backend
b769510570d5921e30876565263813c0362994e2
[ "Apache-2.0" ]
2
2019-12-11T09:52:26.000Z
2020-05-09T07:26:38.000Z
from django.shortcuts import render from django.utils.translation import ugettext as _ # pylint: disable=unused-argument
37
78
0.686486
9c36da1c5a18d69672ff02d87de44158f45e8811
738
py
Python
examples/ex3/app/models.py
trym-inc/django-msg
0b306524515a8fb4840d1a2ef8cf20901b64bc11
[ "MIT" ]
7
2018-02-28T19:03:48.000Z
2020-12-21T01:15:34.000Z
examples/ex3/app/models.py
trym-inc/django-msg
0b306524515a8fb4840d1a2ef8cf20901b64bc11
[ "MIT" ]
null
null
null
examples/ex3/app/models.py
trym-inc/django-msg
0b306524515a8fb4840d1a2ef8cf20901b64bc11
[ "MIT" ]
null
null
null
from typing import NamedTuple from django.contrib.auth.models import AbstractUser from django.db import models from msg.models import Msg
28.384615
65
0.612466
9c3934843ce267b0dc897db0634f69b0dfaade62
280
py
Python
Data_Structures/2d_array_ds.py
csixteen/HackerRank
3ef6fa48599341f481b9e266c69df2d449a7b313
[ "MIT" ]
4
2018-04-19T20:32:54.000Z
2020-04-21T12:28:00.000Z
Data_Structures/2d_array_ds.py
csixteen/HackerRank
3ef6fa48599341f481b9e266c69df2d449a7b313
[ "MIT" ]
null
null
null
Data_Structures/2d_array_ds.py
csixteen/HackerRank
3ef6fa48599341f481b9e266c69df2d449a7b313
[ "MIT" ]
null
null
null
matrix = [list(map(int, input().split())) for _ in range(6)] max_sum = None for i in range(4): for j in range(4): s = sum(matrix[i][j:j+3]) + matrix[i+1][j+1] + sum(matrix[i+2][j:j+3]) if max_sum is None or s > max_sum: max_sum = s print(max_sum)
28
78
0.557143
9c3a00aad13ad525c3f1adcd91ff20ba8d288a5b
6,558
py
Python
tfx/examples/chicago_taxi_pipeline/serving/chicago_taxi_client.py
pingsutw/tfx
bf0d1d74e3f6ea429989fc7b80b82bea08077857
[ "Apache-2.0" ]
1
2021-07-21T15:54:20.000Z
2021-07-21T15:54:20.000Z
tfx/examples/chicago_taxi_pipeline/serving/chicago_taxi_client.py
pingsutw/tfx
bf0d1d74e3f6ea429989fc7b80b82bea08077857
[ "Apache-2.0" ]
1
2020-08-28T09:59:13.000Z
2020-08-28T09:59:13.000Z
tfx/examples/chicago_taxi_pipeline/serving/chicago_taxi_client.py
pingsutw/tfx
bf0d1d74e3f6ea429989fc7b80b82bea08077857
[ "Apache-2.0" ]
1
2020-11-06T11:44:33.000Z
2020-11-06T11:44:33.000Z
# Lint as: python2, python3 # Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A client for the chicago_taxi demo.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import base64 import json import os import subprocess import tempfile import requests from tensorflow_transform import coders as tft_coders from tensorflow_transform.tf_metadata import dataset_schema from tensorflow_transform.tf_metadata import schema_utils from google.protobuf import text_format from tensorflow.python.lib.io import file_io # pylint: disable=g-direct-tensorflow-import from tensorflow.python.platform import app # pylint: disable=g-direct-tensorflow-import from tensorflow_metadata.proto.v0 import schema_pb2 from tfx.utils import io_utils _LOCAL_INFERENCE_TIMEOUT_SECONDS = 5.0 _LABEL_KEY = 'tips' # Tf.Transform considers these features as "raw" def _make_csv_coder(schema, column_names): """Return a coder for tf.transform to read csv files.""" raw_feature_spec = _get_raw_feature_spec(schema) parsing_schema = dataset_schema.from_feature_spec(raw_feature_spec) return tft_coders.CsvCoder(column_names, parsing_schema) def _read_schema(path): """Reads a schema from the provided location. Args: path: The location of the file holding a serialized Schema proto. Returns: An instance of Schema or None if the input argument is None """ result = schema_pb2.Schema() contents = file_io.read_file_to_string(path) text_format.Parse(contents, result) return result def _do_local_inference(host, port, serialized_examples): """Performs inference on a model hosted by the host:port server.""" json_examples = [] for serialized_example in serialized_examples: # The encoding follows the guidelines in: # https://www.tensorflow.org/tfx/serving/api_rest example_bytes = base64.b64encode(serialized_example).decode('utf-8') predict_request = '{ "b64": "%s" }' % example_bytes json_examples.append(predict_request) json_request = '{ "instances": [' + ','.join(map(str, json_examples)) + ']}' server_url = 'http://' + host + ':' + port + '/v1/models/chicago_taxi:predict' response = requests.post( server_url, data=json_request, timeout=_LOCAL_INFERENCE_TIMEOUT_SECONDS) response.raise_for_status() prediction = response.json() print(json.dumps(prediction, indent=4)) def _do_aiplatform_inference(model, version, serialized_examples): """Performs inference on the model:version in AI Platform.""" working_dir = tempfile.mkdtemp() instances_file = os.path.join(working_dir, 'test.json') json_examples = [] for serialized_example in serialized_examples: # The encoding follows the example in: # https://github.com/GoogleCloudPlatform/training-data-analyst/blob/master/quests/tpu/invoke_model.py json_examples.append('{ "inputs": { "b64": "%s" } }' % base64.b64encode(serialized_example).decode('utf-8')) file_io.write_string_to_file(instances_file, '\n'.join(json_examples)) gcloud_command = [ 'gcloud', 'ai-platform', 'predict', '--model', model, '--version', version, '--json-instances', instances_file ] print(subprocess.check_output(gcloud_command)) def _do_inference(model_handle, examples_file, num_examples, schema): """Sends requests to the model and prints the results. Args: model_handle: handle to the model. This can be either "aiplatform:model:version" or "host:port" examples_file: path to csv file containing examples, with the first line assumed to have the column headers num_examples: number of requests to send to the server schema: a Schema describing the input data Returns: Response from model server """ filtered_features = [ feature for feature in schema.feature if feature.name != _LABEL_KEY ] del schema.feature[:] schema.feature.extend(filtered_features) column_names = io_utils.load_csv_column_names(examples_file) csv_coder = _make_csv_coder(schema, column_names) proto_coder = _make_proto_coder(schema) input_file = open(examples_file, 'r') input_file.readline() # skip header line serialized_examples = [] for _ in range(num_examples): one_line = input_file.readline() if not one_line: print('End of example file reached') break one_example = csv_coder.decode(one_line) serialized_example = proto_coder.encode(one_example) serialized_examples.append(serialized_example) parsed_model_handle = model_handle.split(':') if parsed_model_handle[0] == 'aiplatform': _do_aiplatform_inference( model=parsed_model_handle[1], version=parsed_model_handle[2], serialized_examples=serialized_examples) else: _do_local_inference( host=parsed_model_handle[0], port=parsed_model_handle[1], serialized_examples=serialized_examples) if __name__ == '__main__': app.run(main)
33.630769
105
0.745502
9c3a1a0942bfa4b3696876f16d5ec82b36b6c9bd
23,156
py
Python
PyVideo/main.py
BlackIQ/Cute
5835e989d661f23b04b6e436589c6e844167522e
[ "Apache-2.0" ]
5
2021-11-21T10:59:47.000Z
2022-01-16T11:57:14.000Z
PyVideo/main.py
BlackIQ/Cute
5835e989d661f23b04b6e436589c6e844167522e
[ "Apache-2.0" ]
null
null
null
PyVideo/main.py
BlackIQ/Cute
5835e989d661f23b04b6e436589c6e844167522e
[ "Apache-2.0" ]
null
null
null
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, Q_ARG, QAbstractItemModel, QFileInfo, qFuzzyCompare, QMetaObject, QModelIndex, QObject, Qt, QThread, QTime, QUrl) from PyQt5.QtGui import QColor, qGray, QImage, QPainter, QPalette from PyQt5.QtMultimedia import (QAbstractVideoBuffer, QMediaContent, QMediaMetaData, QMediaPlayer, QMediaPlaylist, QVideoFrame, QVideoProbe) from PyQt5.QtMultimediaWidgets import QVideoWidget from PyQt5.QtWidgets import (QApplication, QComboBox, QDialog, QFileDialog, QFormLayout, QHBoxLayout, QLabel, QListView, QMessageBox, QPushButton, QSizePolicy, QSlider, QStyle, QToolButton, QVBoxLayout, QWidget) if __name__ == '__main__': import sys app = QApplication(sys.argv) player = Player(sys.argv[1:]) player.show() sys.exit(app.exec_())
35.734568
217
0.634738
9c3a31af53788d8bf47df143a1f5099537838024
1,234
py
Python
tests/snapshot/periodic.py
Uornca/mirheo
162c722ffa27c02e1f5b0d1866816e44c2393f0f
[ "MIT" ]
22
2019-07-17T13:06:41.000Z
2021-12-15T14:45:24.000Z
tests/snapshot/periodic.py
Uornca/mirheo
162c722ffa27c02e1f5b0d1866816e44c2393f0f
[ "MIT" ]
63
2019-06-26T13:30:47.000Z
2021-02-23T10:13:10.000Z
tests/snapshot/periodic.py
Uornca/mirheo
162c722ffa27c02e1f5b0d1866816e44c2393f0f
[ "MIT" ]
9
2019-10-11T07:32:19.000Z
2021-05-17T11:25:35.000Z
#!/usr/bin/env python """Test checkpoint-like periodic snapshots. We test that there are that many folders and that the currentStep changes. """ import mirheo as mir u = mir.Mirheo(nranks=(1, 1, 1), domain=(4, 6, 8), debug_level=3, log_filename='log', no_splash=True, checkpoint_every=10, checkpoint_mode='Incremental', checkpoint_folder='periodic_snapshots/snapshot_', checkpoint_mechanism='Snapshot') pv = mir.ParticleVectors.ParticleVector('pv', mass=1) ic = mir.InitialConditions.Uniform(number_density=2) u.registerParticleVector(pv, ic) dpd = mir.Interactions.Pairwise('dpd', rc=1.0, kind='DPD', a=10.0, gamma=10.0, kBT=1.0, power=0.5) lj = mir.Interactions.Pairwise('lj', rc=1.0, kind='LJ', epsilon=1.25, sigma=0.75) u.registerInteraction(dpd) u.registerInteraction(lj) u.setInteraction(dpd, pv, pv) minimize = mir.Integrators.Minimize('minimize', max_displacement=1. / 1024) u.registerIntegrator(minimize) u.run(45, dt=0.125) # TEST: snapshot.periodic # cd snapshot # rm -rf periodic_snapshots/ # mir.run --runargs "-n 2" ./periodic.py # ls periodic_snapshots | cat > snapshot.out.txt # grep -rH --include=*.json currentStep periodic_snapshots/ | sort >> snapshot.out.txt
34.277778
98
0.71799
9c3aa677e610f9e2bf81b41d5bae0ca83fbbae6f
3,632
py
Python
tools/resource_prefetch_predictor/generate_database.py
xzhan96/chromium.src
1bd0cf3997f947746c0fc5406a2466e7b5f6159e
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
1
2021-01-07T18:51:03.000Z
2021-01-07T18:51:03.000Z
tools/resource_prefetch_predictor/generate_database.py
emilio/chromium.src
1bd0cf3997f947746c0fc5406a2466e7b5f6159e
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
tools/resource_prefetch_predictor/generate_database.py
emilio/chromium.src
1bd0cf3997f947746c0fc5406a2466e7b5f6159e
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
#!/usr/bin/python # # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Loads a set of web pages several times on a device, and extracts the predictor database. """ import argparse import logging import os import sys _SRC_PATH = os.path.abspath(os.path.join( os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil')) from devil.android import device_utils sys.path.append(os.path.join(_SRC_PATH, 'build', 'android')) import devil_chromium sys.path.append(os.path.join(_SRC_PATH, 'tools', 'android', 'loading')) import controller from options import OPTIONS import page_track _PAGE_LOAD_TIMEOUT = 20 def _CreateArgumentParser(): """Creates and returns the argument parser.""" parser = argparse.ArgumentParser( description=('Loads a set of web pages several times on a device, and ' 'extracts the predictor database.'), parents=[OPTIONS.GetParentParser()]) parser.add_argument('--device', help='Device ID') parser.add_argument('--urls_filename', help='File containing a list of URLs ' '(one per line). URLs can be repeated.') parser.add_argument('--output_filename', help='File to store the database in.') parser.add_argument('--url_repeat', help=('Number of times each URL in the input ' 'file is loaded.'), default=3) return parser def _FindDevice(device_id): """Returns a device matching |device_id| or the first one if None, or None.""" devices = device_utils.DeviceUtils.HealthyDevices() if device_id is None: return devices[0] matching_devices = [d for d in devices if str(d) == device_id] if not matching_devices: return None return matching_devices[0] def _Setup(device): """Sets up a device and returns an instance of RemoteChromeController.""" chrome_controller = controller.RemoteChromeController(device) device.ForceStop(OPTIONS.ChromePackage().package) chrome_controller.AddChromeArguments( ['--speculative-resource-prefetching=learning']) chrome_controller.ResetBrowserState() return chrome_controller if __name__ == '__main__': main()
32.141593
80
0.704295
9c3ae120bfd666dab5412a24ae65101fc3c9e81d
9,947
py
Python
palm_wrapper/job_submission/domain.py
madeline-scyphers/palm
0ecf9eb49f66b86f284bac9506c9570159aba02b
[ "MIT" ]
null
null
null
palm_wrapper/job_submission/domain.py
madeline-scyphers/palm
0ecf9eb49f66b86f284bac9506c9570159aba02b
[ "MIT" ]
6
2021-12-07T15:59:42.000Z
2021-12-07T16:03:45.000Z
palm_wrapper/job_submission/domain.py
madeline-scyphers/palm
0ecf9eb49f66b86f284bac9506c9570159aba02b
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod from typing import Optional from xml import dom import numpy as np import pandas as pd from .utils import get_factors_rev def setup_domain(cfg): domain_x, domain_y = cfg["domain"]["x"], (round(cfg["domain"]["y"] * cfg["domain"]["urban_ratio"])) plot_footprint, plot_ratio, dx, dy = ( cfg["plot"]["plot_footprint"], cfg["plot"]["plot_ratio"], cfg["domain"]["dx"], cfg["domain"]["dy"], ) plots = calc_plot_sizes( domain_x, domain_y, plot_footprint, cfg["house"]["footprint"], plot_ratio, dx, dy, cfg["domain"]["y"], ) tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y = get_best_plot_size(plots, plot_footprint, plot_ratio, dx, dy) house_x, house_y = calc_house_size(tplot_x, tplot_y, cfg["house"]["footprint"], dx, dy) house = House(house_x, house_y, cfg["house"]["height"]) return Domain.from_plot_size(house, cfg, tplot_x, tplot_y, tdomain_x, tdomain_y, trimmed_y, plot_ratio, cfg["domain"]["stack_height"]) if __name__ == "__main__": from .load_wrapper_config import get_wrapper_config config = get_wrapper_config() domain = setup_domain(config) domain
38.405405
138
0.6037
9c3c80f9f134a4c7e10b07a2a070fce993cd44e3
373
py
Python
zad5.py
Alba126/Laba21
ce5735ca223d92287efa64bc3347f4356234b399
[ "MIT" ]
null
null
null
zad5.py
Alba126/Laba21
ce5735ca223d92287efa64bc3347f4356234b399
[ "MIT" ]
null
null
null
zad5.py
Alba126/Laba21
ce5735ca223d92287efa64bc3347f4356234b399
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- config: utf-8 -*- from tkinter import * from random import random root = Tk() root['bg'] = 'white' root.title('crown') img = PhotoImage(file='crown.png') bt1 = Button(image=img, command=on_click) bt1.place(relx=0.5, rely=0.5, anchor=CENTER) root.mainloop()
16.217391
44
0.643432
9c3e76ea8723f85b50595507d895179df16ec7b9
341
py
Python
tests/importer/utils/test_utils.py
HumanCellAtlas/ingest-common
6a230f9606f64cd787b67c143854db36e012a2b7
[ "Apache-2.0" ]
null
null
null
tests/importer/utils/test_utils.py
HumanCellAtlas/ingest-common
6a230f9606f64cd787b67c143854db36e012a2b7
[ "Apache-2.0" ]
null
null
null
tests/importer/utils/test_utils.py
HumanCellAtlas/ingest-common
6a230f9606f64cd787b67c143854db36e012a2b7
[ "Apache-2.0" ]
null
null
null
from openpyxl import Workbook
24.357143
73
0.73607
9c3ee51c4543a5b2653184ca78a98a29af6b98cb
2,114
py
Python
test/test_import_stats.py
WBobby/pytorch
655960460ccca936fa5c06df6bbafd25b5582115
[ "Intel" ]
24
2020-11-02T21:25:12.000Z
2022-03-17T07:20:33.000Z
test/test_import_stats.py
WBobby/pytorch
655960460ccca936fa5c06df6bbafd25b5582115
[ "Intel" ]
1
2019-08-01T00:17:43.000Z
2019-09-12T01:31:53.000Z
test/test_import_stats.py
WBobby/pytorch
655960460ccca936fa5c06df6bbafd25b5582115
[ "Intel" ]
12
2020-11-06T05:00:37.000Z
2022-01-30T19:17:36.000Z
import subprocess import sys import unittest import pathlib from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent try: # Just in case PyTorch was not built in 'develop' mode sys.path.append(str(REPO_ROOT)) from tools.stats.scribe import rds_write, register_rds_schema except ImportError: register_rds_schema = None rds_write = None # these tests could eventually be changed to fail if the import/init # time is greater than a certain threshold, but for now we just use them # as a way to track the duration of `import torch` in our ossci-metrics # S3 bucket (see tools/stats/print_test_stats.py) if __name__ == "__main__": if register_rds_schema and IS_IN_CI: register_rds_schema( "import_stats", { "test_name": "string", "peak_memory_bytes": "int", "time_ms": "int", }, ) run_tests()
31.088235
116
0.637181
9c3ffd59fa98b323892e6f69d6dc5851e106b046
1,365
py
Python
post_office/validators.py
fasih/django-post_office
e4086527a48bc0d1e5b8e0dfe9c27ab3a6260224
[ "MIT" ]
661
2015-01-07T09:35:14.000Z
2022-03-24T11:45:33.000Z
post_office/validators.py
fasih/django-post_office
e4086527a48bc0d1e5b8e0dfe9c27ab3a6260224
[ "MIT" ]
267
2015-01-10T22:45:08.000Z
2022-03-31T11:49:52.000Z
post_office/validators.py
fasih/django-post_office
e4086527a48bc0d1e5b8e0dfe9c27ab3a6260224
[ "MIT" ]
238
2015-01-10T22:53:39.000Z
2022-03-24T12:56:16.000Z
from django.core.exceptions import ValidationError from django.core.validators import validate_email from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist from django.utils.encoding import force_str def validate_email_with_name(value): """ Validate email address. Both "Recipient Name <[email protected]>" and "[email protected]" are valid. """ value = force_str(value) recipient = value if '<' in value and '>' in value: start = value.find('<') + 1 end = value.find('>') if start < end: recipient = value[start:end] validate_email(recipient) def validate_comma_separated_emails(value): """ Validate every email address in a comma separated list of emails. """ if not isinstance(value, (tuple, list)): raise ValidationError('Email list must be a list/tuple.') for email in value: try: validate_email_with_name(email) except ValidationError: raise ValidationError('Invalid email: %s' % email, code='invalid') def validate_template_syntax(source): """ Basic Django Template syntax validation. This allows for robuster template authoring. """ try: Template(source) except (TemplateSyntaxError, TemplateDoesNotExist) as err: raise ValidationError(str(err))
28.4375
80
0.677656
9c429be32392440a110878d04d24fb43356f3b77
1,144
py
Python
paperhub/input.py
GiuseppeBaldini/PaperHub
5efdee1a0374c995a6717a4baee2106df808af12
[ "MIT" ]
null
null
null
paperhub/input.py
GiuseppeBaldini/PaperHub
5efdee1a0374c995a6717a4baee2106df808af12
[ "MIT" ]
1
2020-03-27T12:05:14.000Z
2020-03-28T01:10:20.000Z
paperhub/input.py
GiuseppeBaldini/PaperHub
5efdee1a0374c995a6717a4baee2106df808af12
[ "MIT" ]
null
null
null
# Input DOI / URL import re import sys # Pyperclip is not built-in, check and download if needed try: import pyperclip except (ImportError, ModuleNotFoundError): print('Pyperclip module not found. Please download it.') sys.exit(0) # Regex for links link_regex = re.compile(r'''( http[s]?:// (?:[a-zA-Z]| [0-9]| [$-_@.&+]| [!*\(\),]| (?:%[0-9a-fA-F][0-9a-fA-F]))+ )''', re.IGNORECASE | re.VERBOSE) # Get DOI / URL using different methods # Method 1: argument try: input_link = sys.argv[1] # Method 2: clipboard except IndexError: input_link = pyperclip.paste() # Method 3: manual input def regex_check(regex, link): """ Check using regex. If DOI/URL are not in the right format, require manual input until correct or Enter to quit. """ while True: match = re.match(regex, link) if match == None: link = str(input('''Enter valid DOI / URL or press Enter to quit: > ''')) if link == '': exit() else: continue else: return link url = regex_check(link_regex, input_link)
23.346939
85
0.581294
9c42cb2bbf7ba2f9f5bbb8435dcd766270fb6340
6,338
py
Python
main.py
chillum1718/EffcientNetV2
4338652454185db648a6ea5df04528bcafb24ed2
[ "Apache-2.0" ]
null
null
null
main.py
chillum1718/EffcientNetV2
4338652454185db648a6ea5df04528bcafb24ed2
[ "Apache-2.0" ]
null
null
null
main.py
chillum1718/EffcientNetV2
4338652454185db648a6ea5df04528bcafb24ed2
[ "Apache-2.0" ]
null
null
null
import argparse import csv import os import torch import tqdm from torch import distributed from torch.utils import data from torchvision import datasets from torchvision import transforms from nets import nn from utils import util data_dir = os.path.join('..', 'Dataset', 'IMAGENET') if __name__ == '__main__': main()
36.011364
100
0.570369
9c42d1030d5bf12bec44656b0c6d8328e6f4647e
2,897
py
Python
cgbind/esp.py
duartegroup/cgbind
8c2369d4c49e8b008fc3951719d99e0c4f6b6b16
[ "MIT" ]
7
2020-06-08T16:18:56.000Z
2021-01-28T09:59:16.000Z
cgbind/esp.py
duartegroup/cgbind
8c2369d4c49e8b008fc3951719d99e0c4f6b6b16
[ "MIT" ]
null
null
null
cgbind/esp.py
duartegroup/cgbind
8c2369d4c49e8b008fc3951719d99e0c4f6b6b16
[ "MIT" ]
2
2020-11-16T04:52:43.000Z
2021-06-04T05:07:29.000Z
import numpy as np from time import time from cgbind.atoms import get_atomic_number from cgbind.log import logger from cgbind.constants import Constants from cgbind.exceptions import CgbindCritical def get_esp_cube_lines(charges, atoms): """ From a list of charges and a set of xyzs create the electrostatic potential map grid-ed uniformly between the most negative x, y, z values -5 and the largest x, y, z +5 :param charges: (list(float)) :param atoms: (list(autode.atoms.Atom)) :return: (list(str)), (min ESP value, max ESP value) """ logger.info('Calculating the ESP and generating a .cube file') start_time = time() try: from esp_gen import get_cube_lines except ModuleNotFoundError: raise CgbindCritical('esp_gen not available. cgbind must be ' 'installed with the --esp_gen flag') if charges is None: logger.error('Could not generate an .cube file, charges were None') return [], (None, None) coords = np.array([atom.coord for atom in atoms]) charges = np.array(charges) # Get the max and min points from the coordinates max_cart_values = np.max(coords, axis=0) min_cat_values = np.min(coords, axis=0) # The grid needs to be slightly larger than the smallest/largest Cartesian # coordinate # NOTE: All distances from here are in Bohr (a0) i.e. atomic units min_carts = Constants.ang2a0 * (min_cat_values - 5 * np.ones(3)) max_carts = Constants.ang2a0 * (max_cart_values + 5 * np.ones(3)) coords = np.array([Constants.ang2a0 * np.array(coord) for coord in coords]) # Number of voxels will be nx * ny * nz nx, ny, nz = 50, 50, 50 vox_size = max_carts - min_carts rx, ry, rz = vox_size[0] / nx, vox_size[1] / ny, vox_size[2] / nz # Write the .cube file lines cube_file_lines = ['Generated by cgbind\n', 'ESP\n'] n_atoms = len(coords) min_x, min_y, min_z = min_carts cube_file_lines.append(f'{n_atoms:>5d}{min_x:>12f}{min_y:>12f}{min_z:>12f}\n') # n_atoms origin(x y z) cube_file_lines.append(f'{nx:>5d}{rx:>12f}{0.0:>12f}{0.0:>12f}\n') # Number of voxels and their size cube_file_lines.append(f'{ny:>5d}{0.0:>12f}{ry:>12f}{0.0:>12f}\n') cube_file_lines.append(f'{nz:>5d}{0.0:>12f}{0.0:>12f}{rz:>12f}\n') for atom in atoms: x, y, z = atom.coord cube_file_lines.append(f'{get_atomic_number(atom):>5d}{0.0:>12f}' f'{Constants.ang2a0*x:>12f}{Constants.ang2a0*y:>12f}{Constants.ang2a0*z:>12f}\n') # Looping over x, y, z is slow in python so use Cython extension cube_val_lines, min_val, max_val = get_cube_lines(nx, ny, nz, coords, min_carts, charges, vox_size) cube_file_lines += cube_val_lines logger.info(f'ESP generated in {time()-start_time:.3f} s') return cube_file_lines, (min_val, max_val)
38.118421
112
0.661374
9c43dad16fef03fbc908a7aa39b6c4226fc2883c
6,051
py
Python
codes/test_specular.py
mcdenoising/AdvMCDenoise
4ba00098c2d0f50a7dfc1e345b5e50a20768d7e8
[ "MIT" ]
35
2019-11-04T06:49:39.000Z
2022-01-13T07:53:37.000Z
codes/test_specular.py
qbhan/Adversarial_MCdenoising
a99bf312baf2430d750d70a79270aca0720532aa
[ "MIT" ]
1
2019-11-28T22:33:11.000Z
2019-11-28T22:33:11.000Z
codes/test_specular.py
qbhan/Adversarial_MCdenoising
a99bf312baf2430d750d70a79270aca0720532aa
[ "MIT" ]
8
2019-11-08T04:58:08.000Z
2020-11-03T07:49:58.000Z
import os import sys import logging import time import argparse import numpy as np from collections import OrderedDict import scripts.options as option import utils.util as util from data.util import bgr2ycbcr from data import create_dataset, create_dataloader from models import create_model # options parser = argparse.ArgumentParser() parser.add_argument('-opt', type=str, required=True, help='Path to options JSON file.') opt = option.parse(parser.parse_args().opt, is_train=False) util.mkdirs((path for key, path in opt['path'].items() if not key == 'pretrain_model_G')) opt = option.dict_to_nonedict(opt) util.setup_logger(None, opt['path']['log'], 'test.log', level=logging.INFO, screen=True) logger = logging.getLogger('base') logger.info(option.dict2str(opt)) # Create test dataset and dataloader test_loaders = [] for phase, dataset_opt in sorted(opt['datasets'].items()): test_set = create_dataset(dataset_opt) test_loader = create_dataloader(test_set, dataset_opt) logger.info('Number of test images in [{:s}]: {:d}'.format(dataset_opt['name'], len(test_set))) test_loaders.append(test_loader) # Create model model = create_model(opt) for test_loader in test_loaders: test_set_name = test_loader.dataset.opt['name'] logger.info('\nTesting [{:s}]...'.format(test_set_name)) test_start_time = time.time() dataset_dir = os.path.join(opt['path']['results_root'], test_set_name) util.mkdir(dataset_dir) test_results = OrderedDict() test_results['psnr'] = [] test_results['ssim'] = [] test_results['psnr_y'] = [] test_results['ssim_y'] = [] for data in test_loader: need_GT = False if test_loader.dataset.opt['dataroot_GT'] is None else True # need_GT = True model.feed_data_specular(data, need_GT=need_GT) if opt["image_type"] == "exr": y = data["x_offset"] x = data["y_offset"] img_path = data['NOISY_path'][0] img_name = os.path.splitext(os.path.basename(img_path))[0] start = time.time() model.test() # test end = time.time() print("Time elapsed... %f "%(end - start)) visuals = model.get_current_visuals(need_GT=need_GT) denoised_img = util.tensor2img(visuals['DENOISED']) # uint8 noisy_img = util.tensor2img(visuals['NOISY']) gt_img = util.tensor2img(visuals['GT']) # uint8 # save images suffix = opt['suffix'] if suffix ==None: suffix = "" save_DENOISED_img_path = os.path.join(dataset_dir, img_name + suffix + '_1denoised.png') save_NOISY_img_path = os.path.join(dataset_dir, img_name + suffix + '_0noisy.png') save_GT_img_path = os.path.join(dataset_dir, img_name + suffix + '_2gt.png') # calculate PSNR and SSIM if need_GT: # gt_img = util.tensor2img(visuals['GT']) gt_img = gt_img / 255. denoised_img = denoised_img / 255. crop_border = test_loader.dataset.opt['scale'] cropped_denoised_img = denoised_img#[crop_border:-crop_border, crop_border:-crop_border, :] cropped_gt_img = gt_img#[crop_border:-crop_border, crop_border:-crop_border, :] psnr = util.calculate_psnr(cropped_denoised_img * 255, cropped_gt_img * 255) ssim = util.calculate_ssim(cropped_denoised_img * 255, cropped_gt_img * 255) test_results['psnr'].append(psnr) test_results['ssim'].append(ssim) if gt_img.shape[2] == 3: # RGB image denoised_img_y = bgr2ycbcr(denoised_img, only_y=True) gt_img_y = bgr2ycbcr(gt_img, only_y=True) cropped_denoised_img_y = denoised_img_y[crop_border:-crop_border, crop_border:-crop_border] cropped_gt_img_y = gt_img_y[crop_border:-crop_border, crop_border:-crop_border] psnr_y = util.calculate_psnr(cropped_denoised_img_y * 255, cropped_gt_img_y * 255) ssim_y = util.calculate_ssim(cropped_denoised_img_y * 255, cropped_gt_img_y * 255) test_results['psnr_y'].append(psnr_y) test_results['ssim_y'].append(ssim_y) logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}; PSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}.'\ .format(img_name, psnr, ssim, psnr_y, ssim_y)) else: logger.info('{:20s} - PSNR: {:.6f} dB; SSIM: {:.6f}.'.format(img_name, psnr, ssim)) else: logger.info(img_name) if opt["image_type"] == "exr": denoised_exr = util.tensor2exr(visuals['DENOISED']) # uint8 noisy_exr = util.tensor2exr(visuals['NOISY']) gt_exr = util.tensor2exr(visuals['GT']) # uint8 save_DENOISED_img_path = os.path.join(dataset_dir, img_name + suffix + '_1denoised.exr') save_NOISY_img_path = os.path.join(dataset_dir, img_name + suffix + '_0noisy.exr') save_GT_img_path = os.path.join(dataset_dir, img_name + suffix + '_2gt.exr') util.saveEXRfromMatrix(save_DENOISED_img_path, denoised_exr, (x, y)) util.saveEXRfromMatrix(save_NOISY_img_path, noisy_exr, (x, y)) util.saveEXRfromMatrix(save_GT_img_path, gt_exr, (x, y)) if need_GT: # metrics # Average PSNR/SSIM results ave_psnr = sum(test_results['psnr']) / len(test_results['psnr']) ave_ssim = sum(test_results['ssim']) / len(test_results['ssim']) logger.info('----Average PSNR/SSIM results for {}----\n\tPSNR: {:.6f} dB; SSIM: {:.6f}\n'\ .format(test_set_name, ave_psnr, ave_ssim)) # if test_results['psnr_y'] and test_results['ssim_y']: # ave_psnr_y = sum(test_results['psnr_y']) / len(test_results['psnr_y']) # ave_ssim_y = sum(test_results['ssim_y']) / len(test_results['ssim_y']) # logger.info('----Y channel, average PSNR/SSIM----\n\tPSNR_Y: {:.6f} dB; SSIM_Y: {:.6f}\n'\ # .format(ave_psnr_y, ave_ssim_y))
44.822222
107
0.637085
9c4403124da36e660f5e49831ef1324004e35d3f
5,403
py
Python
neuralNetwork/layer3/nerualNet.py
zzw0929/deeplearning
d96aadd71838fa60a4c031b13fe475d4839e8a33
[ "Apache-2.0" ]
4
2017-09-04T07:54:33.000Z
2017-09-04T16:55:04.000Z
neuralNetwork/layer3/nerualNet.py
zzw0929/deeplearning
d96aadd71838fa60a4c031b13fe475d4839e8a33
[ "Apache-2.0" ]
null
null
null
neuralNetwork/layer3/nerualNet.py
zzw0929/deeplearning
d96aadd71838fa60a4c031b13fe475d4839e8a33
[ "Apache-2.0" ]
null
null
null
# coding:utf-8 import time import matplotlib.pyplot as plt import numpy as np import sklearn import sklearn.datasets import sklearn.linear_model import matplotlib matplotlib.rcParams['figure.figsize'] = (10.0, 8.0) np.random.seed(0) X, y = sklearn.datasets.make_moons(200, noise=0.20) plt.scatter(X[:,0], X[:,1], s=40, c=y, cmap=plt.cm.Spectral) # plt.show() clf = sklearn.linear_model.LogisticRegressionCV() clf.fit(X, y) # Helper function to plot a decision boundary. # If you don't fully understand this function don't worry, it just generates # the contour plot below. plot_decision_boundary(lambda x: clf.predict(x)) plt.title("Logistic Regression") #plt.show() num_examples = len(X) # training set size nn_input_dim = 2 # input layer dimensionality nn_output_dim = 2 # output layer dimensionality # Gradient descent parameters (I picked these by hand) epsilon = 0.01 # learning rate for gradient descent reg_lambda = 0.01 # regularization strength # Helper function to evaluate the total loss on the dataset # This function learns parameters for the neural network and returns the model. # - nn_hdim: Number of nodes in the hidden layer # - num_passes: Number of passes through the training data for gradient descent # - print_loss: If True, print the loss every 1000 iterations if __name__ == '__main__': #print(y) #print(12121) #print(X) test_1()
34.634615
99
0.644827
9c44390f5cc963fc28a219803d274d7a3995f439
101
py
Python
app/domain/create_db.py
Lifeistrange/flaskweb
6226e9f546d96d5f0a8f11104a37849e8f16ce80
[ "MIT" ]
null
null
null
app/domain/create_db.py
Lifeistrange/flaskweb
6226e9f546d96d5f0a8f11104a37849e8f16ce80
[ "MIT" ]
null
null
null
app/domain/create_db.py
Lifeistrange/flaskweb
6226e9f546d96d5f0a8f11104a37849e8f16ce80
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding=utf-8 from manage import db import app.domain.model db.create_all()
12.625
23
0.742574
9c4523a703ff0d45d61f298f70ea4dd4f700b946
1,188
py
Python
tljh_repo2docker/tests/utils.py
TimoRoth/tljh-repo2docker
35e7e940266de0490990acc780b64802afe973c1
[ "BSD-3-Clause" ]
46
2020-05-04T19:32:39.000Z
2022-03-25T13:47:41.000Z
tljh_repo2docker/tests/utils.py
TimoRoth/tljh-repo2docker
35e7e940266de0490990acc780b64802afe973c1
[ "BSD-3-Clause" ]
41
2020-04-29T09:58:34.000Z
2022-03-15T21:44:15.000Z
tljh_repo2docker/tests/utils.py
TimoRoth/tljh-repo2docker
35e7e940266de0490990acc780b64802afe973c1
[ "BSD-3-Clause" ]
9
2020-04-29T08:42:12.000Z
2021-11-04T04:01:35.000Z
import asyncio import json from aiodocker import Docker, DockerError from jupyterhub.tests.utils import api_request
25.826087
85
0.574074
9c464b6985d41cae6c644e444882f725004b5bea
657
py
Python
05_ARIADNE_SUBSCRIPTIONS_GRAPHQL/api/resolvers/mutations/__init__.py
CrispenGari/python-flask
3e7896f401920b8dd045d807212ec24b8353a75a
[ "Apache-2.0" ]
2
2021-11-08T07:37:18.000Z
2021-11-13T09:23:46.000Z
05_ARIADNE_SUBSCRIPTIONS_GRAPHQL/api/resolvers/mutations/__init__.py
CrispenGari/Flask
3e7896f401920b8dd045d807212ec24b8353a75a
[ "Apache-2.0" ]
null
null
null
05_ARIADNE_SUBSCRIPTIONS_GRAPHQL/api/resolvers/mutations/__init__.py
CrispenGari/Flask
3e7896f401920b8dd045d807212ec24b8353a75a
[ "Apache-2.0" ]
null
null
null
from api import db from uuid import uuid4 from ariadne import MutationType from api.models import Post from api.store import queues mutation = MutationType()
24.333333
61
0.572298
9c464f3f464935bc0cc2e17b41fede6128938835
1,200
py
Python
async_sched/client/__init__.py
justengel/async_sched
f980722d51d15025522b2265426b0188ff368418
[ "MIT" ]
1
2020-10-19T13:36:20.000Z
2020-10-19T13:36:20.000Z
async_sched/client/__init__.py
justengel/async_sched
f980722d51d15025522b2265426b0188ff368418
[ "MIT" ]
null
null
null
async_sched/client/__init__.py
justengel/async_sched
f980722d51d15025522b2265426b0188ff368418
[ "MIT" ]
null
null
null
from async_sched.client import quit_server as module_quit from async_sched.client import request_schedules as module_request from async_sched.client import run_command as module_run from async_sched.client import schedule_command as module_schedule from async_sched.client import stop_schedule as module_stop from async_sched.client import update_server as module_update from .client import Client, \ quit_server_async, quit_server, update_server_async, update_server, request_schedules_async, \ request_schedules, run_command_async, run_command, schedule_command_async, schedule_command, \ stop_schedule_async, stop_schedule # The other modules in this package exist for the "-m" python flag # `python -m async_sched.client.request_schedules --host "12.0.0.1" --port 8000` __all__ = ['Client', 'quit_server_async', 'quit_server', 'update_server_async', 'update_server', 'request_schedules_async', 'request_schedules', 'run_command_async', 'run_command', 'schedule_command_async', 'schedule_command', 'stop_schedule_async', 'stop_schedule', 'module_quit', 'module_request', 'module_run', 'module_schedule', 'module_stop', 'module_update']
52.173913
113
0.785
9c4665897e0c67f15abd7b343516434de1fce226
545
py
Python
full-stack-angular-ngrx/backend/src/core/interfaces/crud.py
t4d-classes/angular_02212022
152dfa4b14ee84c1c34cef0b852349b250103e3b
[ "MIT" ]
null
null
null
full-stack-angular-ngrx/backend/src/core/interfaces/crud.py
t4d-classes/angular_02212022
152dfa4b14ee84c1c34cef0b852349b250103e3b
[ "MIT" ]
null
null
null
full-stack-angular-ngrx/backend/src/core/interfaces/crud.py
t4d-classes/angular_02212022
152dfa4b14ee84c1c34cef0b852349b250103e3b
[ "MIT" ]
null
null
null
import abc from typing import TypeVar, Generic, List, Dict T = TypeVar('T')
19.464286
55
0.629358
9c47360ad31544c866959d439dec3d10ef843fd1
2,730
py
Python
package/tests/test_init_command.py
MrKriss/stonemason
d78becc9168c2566b31b48c9a951e2823bc98362
[ "MIT" ]
2
2017-11-13T17:40:52.000Z
2021-05-08T15:58:28.000Z
package/tests/test_init_command.py
MrKriss/masonry
d78becc9168c2566b31b48c9a951e2823bc98362
[ "MIT" ]
3
2017-09-03T22:58:37.000Z
2017-09-12T21:45:27.000Z
package/tests/test_init_command.py
MrKriss/stonemason
d78becc9168c2566b31b48c9a951e2823bc98362
[ "MIT" ]
null
null
null
from pathlib import Path import pytest import git import json from conftest import TEST_DIR
26.764706
86
0.642125
9c47b2d078f8bff3deaa6bb4fad2e317c962efca
2,657
py
Python
mistral/mistral/api/controllers/v2/service.py
Toure/openstack_mistral_wip
1c3d028cb7c918de74a3cb018c84d6c5ee42e3f1
[ "Apache-2.0" ]
null
null
null
mistral/mistral/api/controllers/v2/service.py
Toure/openstack_mistral_wip
1c3d028cb7c918de74a3cb018c84d6c5ee42e3f1
[ "Apache-2.0" ]
null
null
null
mistral/mistral/api/controllers/v2/service.py
Toure/openstack_mistral_wip
1c3d028cb7c918de74a3cb018c84d6c5ee42e3f1
[ "Apache-2.0" ]
null
null
null
# Copyright 2015 Huawei Technologies Co., Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from oslo_log import log as logging from pecan import rest import six import tooz.coordination import wsmeext.pecan as wsme_pecan from mistral.api import access_control as acl from mistral.api.controllers.v2 import resources # TODO(rakhmerov): invalid dependency, a REST controller must not depend on # a launch script. from mistral.cmd import launch from mistral import context from mistral import exceptions as exc from mistral.service import coordination from mistral.utils import rest_utils LOG = logging.getLogger(__name__)
35.905405
79
0.686865
9c47e111b35b005c12b173bba50fdccaf56fa07f
742
py
Python
setup.py
jtauber/greek-utils
1da19a5a784c4dac9d205ae1afdc5516ddcae9b4
[ "MIT" ]
13
2016-01-18T00:39:20.000Z
2019-12-13T12:55:28.000Z
setup.py
jtauber/greek-utils
1da19a5a784c4dac9d205ae1afdc5516ddcae9b4
[ "MIT" ]
2
2016-11-01T01:11:22.000Z
2016-12-24T05:33:06.000Z
setup.py
jtauber/greek-utils
1da19a5a784c4dac9d205ae1afdc5516ddcae9b4
[ "MIT" ]
5
2016-01-17T21:28:16.000Z
2018-08-03T22:04:05.000Z
from setuptools import setup setup( name="greek-utils", version="0.2", description="various utilities for processing Ancient Greek", license="MIT", url="http://github.com/jtauber/greek-utils", author="James Tauber", author_email="[email protected]", packages=["greekutils"], classifiers=[ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Text Processing", "Topic :: Text Processing :: Linguistic", "Topic :: Utilities", ], )
30.916667
65
0.598383
9c482fa55469ed7e8a8294ff4e637257f9060775
6,275
py
Python
source/tweet.py
jfilter/foia-bot
11a9e31116dddfcd7bbd17730be3bdb9cec65e27
[ "MIT" ]
null
null
null
source/tweet.py
jfilter/foia-bot
11a9e31116dddfcd7bbd17730be3bdb9cec65e27
[ "MIT" ]
null
null
null
source/tweet.py
jfilter/foia-bot
11a9e31116dddfcd7bbd17730be3bdb9cec65e27
[ "MIT" ]
null
null
null
""" tweet stuff in intervals """ import time import datetime import twitter from markov_chains import german_text from config import config_no, config_yes MAX_TWEET_LENGTH = 280 greeting = ' Sehr geehrte/r Anstragssteller/in.' ending = ' MfG' num_tweets = 3 # if __name__ == '__main__': # main()
33.918919
126
0.579283
9c48342a450b3888ddd355595c9462c4c225a106
2,880
py
Python
account_processing.py
amitjoshi9627/Playong
d54a8db05ae5035e122b8bc8d84c849f25483005
[ "MIT" ]
4
2019-04-22T15:16:45.000Z
2020-01-17T12:57:09.000Z
account_processing.py
amitjoshi9627/Playong
d54a8db05ae5035e122b8bc8d84c849f25483005
[ "MIT" ]
null
null
null
account_processing.py
amitjoshi9627/Playong
d54a8db05ae5035e122b8bc8d84c849f25483005
[ "MIT" ]
null
null
null
from selenium.webdriver import Firefox from selenium.webdriver.firefox.options import Options import getpass import time from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.action_chains import ActionChains from utils import *
32.359551
110
0.682986
9c49c6272ae1b539badcabd74a81163ceda4090b
1,104
py
Python
Mundo 3/teste.py
RafaelSdm/Curso-de-Python
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
[ "MIT" ]
1
2021-03-10T21:53:38.000Z
2021-03-10T21:53:38.000Z
Mundo 3/teste.py
RafaelSdm/Curso-de-Python
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
[ "MIT" ]
null
null
null
Mundo 3/teste.py
RafaelSdm/Curso-de-Python
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
[ "MIT" ]
null
null
null
pessoas = {'nomes': "Rafael","sexo":"macho alfa","idade":19} print(f"o {pessoas['nomes']} que se considera um {pessoas['sexo']} possui {pessoas['idade']}") print(pessoas.keys()) print(pessoas.values()) print(pessoas.items()) for c in pessoas.keys(): print(c) for c in pessoas.values(): print(c) for c, j in pessoas.items(): print(f"o {c} pertence ao {j}") del pessoas['sexo'] print(pessoas) pessoas["sexo"] = "macho alfa" print(pessoas) print("outro codida daqui pra frente \n\n\n\n\n\n") estado1 = {'estado': 'minas gerais', 'cidade':'capela nova' } estado2 = {'estado':'rio de janeiro', 'cidade':"rossinha"} brasil = [] brasil.append(estado1) brasil.append(estado2) print(brasil) print(f"o brasil possui um estado chamado {brasil[0]['estado']} e a prorpia possui uma cidade chamada {brasil[0]['cidade']}") print("-"*45) es = {} br = [] for c in range(0,3): es['estado'] = str(input("informe o seu estado:")) es['cidade'] = str(input("informe a sua cidade:")) br.append(es.copy()) for c in br: for i,j in c.items(): print(f"o campo {i} tem valor {j}")
23
125
0.638587
9c49fd2b9580ad32f0138ff3ca8bca4fa9148e22
526
py
Python
rsa-cipher/makeRsaKeys.py
mumbo-pro/cyrptography-algorithm
8e08c027c361f94c547f8b4ede723401399c93ed
[ "Apache-2.0" ]
1
2021-02-23T09:53:19.000Z
2021-02-23T09:53:19.000Z
rsa-cipher/makeRsaKeys.py
mumbo-pro/cyrptography-algorithm
8e08c027c361f94c547f8b4ede723401399c93ed
[ "Apache-2.0" ]
1
2019-09-18T08:24:05.000Z
2019-09-18T08:24:05.000Z
rsa-cipher/makeRsaKeys.py
mumbo-pro/cyrptography-algorithm
8e08c027c361f94c547f8b4ede723401399c93ed
[ "Apache-2.0" ]
null
null
null
# RSA Key Generator 2. # http://inventwithpython.com/hacking (BSD Licensed) 3. 4. import random, sys, os, rabinMiller, cryptomath The program imports the rabinMiller and cryptomath modules that we created in the last chapter, along with a few others. Chapter 24 Public Key Cryptography and the RSA Cipher 387 makeRsaKeys.py
65.75
188
0.714829
9c4a756656ca930b517891bc50444eed71522301
2,537
py
Python
atlas-outreach-data-tools-framework-1.1/Configurations/PlotConf_TTbarAnalysis.py
Harvard-Neutrino/phys145
c3dc5788128fa2a7db0af0c796cf3afd957bf0ed
[ "CC0-1.0" ]
null
null
null
atlas-outreach-data-tools-framework-1.1/Configurations/PlotConf_TTbarAnalysis.py
Harvard-Neutrino/phys145
c3dc5788128fa2a7db0af0c796cf3afd957bf0ed
[ "CC0-1.0" ]
null
null
null
atlas-outreach-data-tools-framework-1.1/Configurations/PlotConf_TTbarAnalysis.py
Harvard-Neutrino/phys145
c3dc5788128fa2a7db0af0c796cf3afd957bf0ed
[ "CC0-1.0" ]
1
2021-11-30T02:08:12.000Z
2021-11-30T02:08:12.000Z
config = { "Luminosity": 1000, "InputDirectory": "results", "Histograms" : { "WtMass" : {}, "etmiss" : {}, "lep_n" : {}, "lep_pt" : {}, "lep_eta" : {}, "lep_E" : {}, "lep_phi" : {"y_margin" : 0.6}, "lep_charge" : {"y_margin" : 0.6}, "lep_type" : {"y_margin" : 0.5}, "lep_ptconerel30" : {}, "lep_etconerel20" : {}, "lep_d0" : {}, "lep_z0" : {}, "n_jets" : {}, "jet_pt" : {}, "jet_m" : {}, "jet_jvf" : {"y_margin" : 0.4}, "jet_eta" : {}, "jet_MV1" : {"y_margin" : 0.3}, "vxp_z" : {}, "pvxp_n" : {}, }, "Paintables": { "Stack": { "Order" : ["Diboson", "DrellYan", "W", "Z", "stop", "ttbar"], "Processes" : { "Diboson" : { "Color" : "#fa7921", "Contributions" : ["WW", "WZ", "ZZ"]}, "DrellYan": { "Color" : "#5bc0eb", "Contributions" : ["DYeeM08to15", "DYeeM15to40", "DYmumuM08to15", "DYmumuM15to40", "DYtautauM08to15", "DYtautauM15to40"]}, "W": { "Color" : "#e55934", "Contributions" : ["WenuJetsBVeto", "WenuWithB", "WenuNoJetsBVeto", "WmunuJetsBVeto", "WmunuWithB", "WmunuNoJetsBVeto", "WtaunuJetsBVeto", "WtaunuWithB", "WtaunuNoJetsBVeto"]}, "Z": { "Color" : "#086788", "Contributions" : ["Zee", "Zmumu", "Ztautau"]}, "stop": { "Color" : "#fde74c", "Contributions" : ["stop_tchan_top", "stop_tchan_antitop", "stop_schan", "stop_wtchan"]}, "ttbar": { "Color" : "#9bc53d", "Contributions" : ["ttbar_lep", "ttbar_had"]} } }, "data" : { "Contributions": ["data_Egamma", "data_Muons"]} }, "Depictions": { "Order": ["Main", "Data/MC"], "Definitions" : { "Data/MC": { "type" : "Agreement", "Paintables" : ["data", "Stack"] }, "Main": { "type" : "Main", "Paintables": ["Stack", "data"] }, } }, }
32.525641
192
0.358691
9c4b16b905a82a27b27a39983a45cc2293e0e0ce
1,943
py
Python
modules/optimizations/dead_codes.py
OMGhozlan/deobshell
701c8a09f9258442255013605185ed0a7fbac704
[ "MIT" ]
null
null
null
modules/optimizations/dead_codes.py
OMGhozlan/deobshell
701c8a09f9258442255013605185ed0a7fbac704
[ "MIT" ]
null
null
null
modules/optimizations/dead_codes.py
OMGhozlan/deobshell
701c8a09f9258442255013605185ed0a7fbac704
[ "MIT" ]
null
null
null
# coding=utf-8 from ..logger import log_debug from ..utils import parent_map, replace_node, is_prefixed_var, get_used_vars
36.660377
116
0.574884
9c4c40d49329ce6958ed3b498e11172edf73d231
1,433
py
Python
Convert Integer A to Integer B.py
RijuDasgupta9116/LintCode
4629a3857b2c57418b86a3b3a7180ecb15e763e3
[ "Apache-2.0" ]
321
2015-01-04T04:01:44.000Z
2022-03-20T13:21:55.000Z
Convert Integer A to Integer B.py
leifoo/LintCode
2520762a1cfbd486081583136396a2b2cac6e4fb
[ "Apache-2.0" ]
1
2016-01-11T04:29:37.000Z
2016-01-11T04:29:37.000Z
Convert Integer A to Integer B.py
leifoo/LintCode
2520762a1cfbd486081583136396a2b2cac6e4fb
[ "Apache-2.0" ]
114
2015-01-27T06:08:17.000Z
2022-03-23T03:58:11.000Z
""" Determine the number of bits required to convert integer A to integer B Example Given n = 31, m = 14,return 2 (31)10=(11111)2 (14)10=(01110)2 """ __author__ = 'Danyang' if __name__=="__main__": assert Solution().bitSwapRequired(1, -1)==31 assert Solution().bitSwapRequired(31, 14)==2
19.630137
71
0.415213
9c4cf09ffcfa4dd9bf0d914e9750a3f14e039df3
605
py
Python
examples/basic/findQSpark.py
myriadrf/pyLMS7002M
b866deea1f05dba44c9ed1a1a4666352b811b66b
[ "Apache-2.0" ]
46
2016-11-29T05:10:36.000Z
2021-10-31T19:27:46.000Z
examples/basic/findQSpark.py
myriadrf/pyLMS7002M
b866deea1f05dba44c9ed1a1a4666352b811b66b
[ "Apache-2.0" ]
2
2017-04-15T21:36:01.000Z
2017-06-08T09:44:26.000Z
examples/basic/findQSpark.py
myriadrf/pyLMS7002M
b866deea1f05dba44c9ed1a1a4666352b811b66b
[ "Apache-2.0" ]
16
2016-11-28T20:47:55.000Z
2021-04-07T01:48:20.000Z
from pyLMS7002M import * print("Searching for QSpark...") try: QSpark = QSpark() except: print("QSpark not found") exit(1) print("\QSpark info:") QSpark.printInfo() # print the QSpark board info # QSpark.LMS7002_Reset() # reset the LMS7002M lms7002 = QSpark.getLMS7002() # get the LMS7002M object ver, rev, mask = lms7002.chipInfo # get the chip info print("\nLMS7002M info:") print("VER : "+str(ver)) print("REV : "+str(rev)) print("MASK : "+str(mask))
31.842105
80
0.528926
9c4d1d59e8d1a05ab55391042aa571be2ead1705
2,549
py
Python
macaddress/__init__.py
paradxum/django-macaddress
c223dc8c79555d2265789c4d13667036cfbd7bd8
[ "BSD-3-Clause" ]
42
2015-11-23T09:40:36.000Z
2022-03-15T18:15:44.000Z
macaddress/__init__.py
paradxum/django-macaddress
c223dc8c79555d2265789c4d13667036cfbd7bd8
[ "BSD-3-Clause" ]
19
2016-01-08T13:36:23.000Z
2021-05-13T23:57:39.000Z
macaddress/__init__.py
paradxum/django-macaddress
c223dc8c79555d2265789c4d13667036cfbd7bd8
[ "BSD-3-Clause" ]
16
2016-02-04T09:43:12.000Z
2021-04-15T13:27:40.000Z
from django.conf import settings from netaddr import mac_unix, mac_eui48 import importlib import warnings from pkg_resources import get_distribution, DistributionNotFound import os.path try: _dist = get_distribution('django-macaddress') except DistributionNotFound: __version__ = 'Please install this project with setup.py' else: __version__ = _dist.version VERSION = __version__ # synonym
43.20339
124
0.710867
9c4ee3a1833fdef3d1343fa0ed07aabcf8faecca
2,422
py
Python
textmagic/test/message_status_tests.py
dfstrauss/textmagic-sms-api-python
9ab05b461861ac53da651588bef6b0b504653ecd
[ "BSD-3-Clause" ]
2
2017-12-20T11:16:57.000Z
2022-02-22T06:46:19.000Z
textmagic/test/message_status_tests.py
dfstrauss/textmagic-sms-api-python
9ab05b461861ac53da651588bef6b0b504653ecd
[ "BSD-3-Clause" ]
2
2015-06-14T16:06:33.000Z
2017-08-23T11:38:22.000Z
textmagic/test/message_status_tests.py
dfstrauss/textmagic-sms-api-python
9ab05b461861ac53da651588bef6b0b504653ecd
[ "BSD-3-Clause" ]
5
2015-06-12T16:21:17.000Z
2022-02-22T06:46:23.000Z
import time from textmagic.test import ONE_TEST_NUMBER from textmagic.test import THREE_TEST_NUMBERS from textmagic.test import TextMagicTestsBase from textmagic.test import LiveUnsafeTests
36.149254
87
0.660198
9c4ef34765e81a312523257e87f5ab76933d8997
2,245
py
Python
apps/orders/models.py
LinkanDawang/FreshMallDemo
5b8e2d2e8e137f609e8ac1e29ea013bb3ef34edb
[ "Apache-2.0" ]
null
null
null
apps/orders/models.py
LinkanDawang/FreshMallDemo
5b8e2d2e8e137f609e8ac1e29ea013bb3ef34edb
[ "Apache-2.0" ]
5
2020-06-05T18:27:41.000Z
2022-01-13T00:48:03.000Z
apps/orders/models.py
LinkanDawang/dailyfresh
4f0360d5e4eeda4737234942248715b77d9e3b12
[ "Apache-2.0" ]
null
null
null
from django.db import models from utils.models import BaseModel from users.models import User, Address from goods.models import GoodsSKU # Create your models here.
28.782051
104
0.620045
9c4f72bb8eb3058809660eadcee54f1e16cab76f
18,201
py
Python
event/arguments/prepare/event_vocab.py
hunterhector/DDSemantics
883ef1015bd21d9b8575d8000faf3b506a09f21c
[ "Apache-2.0" ]
null
null
null
event/arguments/prepare/event_vocab.py
hunterhector/DDSemantics
883ef1015bd21d9b8575d8000faf3b506a09f21c
[ "Apache-2.0" ]
null
null
null
event/arguments/prepare/event_vocab.py
hunterhector/DDSemantics
883ef1015bd21d9b8575d8000faf3b506a09f21c
[ "Apache-2.0" ]
2
2018-06-24T17:40:31.000Z
2020-07-30T19:19:55.000Z
from collections import defaultdict, Counter import os import gzip import json import pickle from json.decoder import JSONDecodeError import logging from typing import Dict import pdb from event import util from event.arguments.prepare.slot_processor import get_simple_dep, is_propbank_dep logger = logging.getLogger(__name__) def get_arg_rep(self, dep, entity_rep): if dep.startswith("prep"): dep = self.get_vocab_word(dep, "preposition") arg_rep = self.make_arg(entity_rep, dep) return arg_rep def get_pred_rep(self, event): """ Take the predicates, and get the vocab index for it. This will first use the predicate itself, if not found, it will try to use the verb form. :param event: :return: """ pred = self.get_vocab_word(event["predicate"], "predicate") if pred == self.oovs["predicate"]: # Try to see if the verb form help. if "verb_form" in event: pred = self.get_vocab_word(event["verb_form"], "predicate") return self.make_predicate(pred) def get_fe_rep(self, frame_name, fe_role): # return self.make_fe(frame_name, fe_role) return self.get_vocab_word(self.make_fe(frame_name, fe_role), "fe") class EmbbedingVocab: def __init__(self, vocab_file, with_padding=False, extras=None): self.vocab_file = vocab_file self.vocab = {} self.tf = [] self.extras = [] self.pad = "__PADDING__" self.padded = False if with_padding: # Paddings should be at 0. self.padded = True self.vocab[self.pad] = 0 self.tf.append(0) if extras: for name in extras: self.add_extra(name) self.__read_vocab() def add_extra(self, name): """Add extra dimensions into the embedding vocab, used for special tokens. Args: name: Returns: """ if name in self.extras: logger.info( f"Extra {name} already exist in vocabulary " f"at index {self.vocab[name]}" ) return self.vocab[name] else: self.extras.append(name) extra_index = len(self.vocab) self.vocab[name] = extra_index self.tf.append(0) logger.info( f"Adding {name} as extra dimension {extra_index} " f"to {self.vocab_file}" ) return extra_index def create_sentences( doc, event_vocab, output_path, include_frame=False, use_simple_dep=False, prop_arg_only=False, ): if include_frame: print("Adding frames to sentences.") doc_count = 0 event_count = 0 with gzip.open(doc) as data, gzip.open(output_path, "w") as out: for line in data: try: doc_info = json.loads(line) except JSONDecodeError: continue sentence = [] represent_by_id = {} for entity in doc_info["entities"]: eid = entity["entityId"] represent = entity["representEntityHead"] represent_by_id[eid] = represent for event in doc_info["events"]: event_count += 1 sentence.append(event_vocab.get_pred_rep(event)) if include_frame and not event["frame"] == "NA": frame = event_vocab.get_vocab_word(event["frame"], "frame") sentence.append(frame) for arg in event["arguments"]: dep = arg["dep"] if ( arg["argStart"] == event["predicateStart"] and arg["argEnd"] == event["predicateEnd"] ): dep = "root" if use_simple_dep: dep = get_simple_dep(dep) if prop_arg_only and not is_propbank_dep(dep): continue sentence.append( event_vocab.get_arg_rep( dep, event_vocab.get_arg_entity_rep(arg, None) ) ) if include_frame and not arg["feName"] == "NA": fe = event_vocab.get_fe_rep(frame, arg["feName"]) if not fe == event_vocab.oovs["fe"]: sentence.append(fe) if "NA" in sentence: pdb.set_trace() doc_count += 1 out.write(str.encode(" ".join(sentence) + "\n")) if event_count % 1000 == 0: print( "\rCreated sentences for {} documents, " "{} events.".format(doc_count, event_count), end="", ) print( "\rCreated sentences for {} documents, " "{} events.\n".format(doc_count, event_count), end="", ) def write_sentences( sent_out, event_data, event_vocab, include_frame, simple_dep, prop_arg ): if not os.path.exists(sent_out): os.makedirs(sent_out) fname = "sent_with_frames.gz" if include_frame else "sent_pred_only.gz" out = os.path.join(sent_out, fname) if not os.path.exists(out): create_sentences( event_data, event_vocab, out, include_frame=include_frame, use_simple_dep=simple_dep, prop_arg_only=prop_arg, ) else: logger.info(f"Will not overwrite {out}") def main(event_data, vocab_dir, sent_out, prop_arg): if not os.path.exists(vocab_dir): os.makedirs(vocab_dir) event_vocab = TypedEventVocab(vocab_dir, event_data=event_data) logger.info("Done loading vocabulary.") # The 3 boolean are : include_frame,simple_dep, prop_arg if prop_arg: # For propbank style training. logger.info("Creating event sentences in propbank style") # Include frame or not version for propbank, but always use simple dep # and propbank style arguments. write_sentences(sent_out, event_data, event_vocab, False, True, True) write_sentences(sent_out, event_data, event_vocab, True, True, True) else: # For framenet style training. logger.info("Creating event sentences in FrameNet style") # Include frame or not version for framenet, but always use complex dep # and framenet style arguments. write_sentences(sent_out, event_data, event_vocab, True, False, False) write_sentences(sent_out, event_data, event_vocab, False, False, False) if __name__ == "__main__": parser = util.OptionPerLineParser( description="Event Vocabulary.", fromfile_prefix_chars="@" ) parser.add_argument("--vocab_dir", type=str, help="Vocabulary directory.") parser.add_argument("--input_data", type=str, help="Input data.") parser.add_argument("--sent_out", type=str, help="Sentence out dir.") parser.add_argument( "--prop_arg", action="store_true", help="Propbank arg only.", default=False ) util.set_basic_log() args = parser.parse_args() main(args.input_data, args.vocab_dir, args.sent_out, args.prop_arg)
31.598958
86
0.538652
9c4f8c513227a59417c089b50d394e43f128a12b
478
py
Python
20.py
dexinl/kids_math
48f6c37e221bbd2484ad19861c61e5ed7d3aa09e
[ "Apache-2.0" ]
null
null
null
20.py
dexinl/kids_math
48f6c37e221bbd2484ad19861c61e5ed7d3aa09e
[ "Apache-2.0" ]
null
null
null
20.py
dexinl/kids_math
48f6c37e221bbd2484ad19861c61e5ed7d3aa09e
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python import random count = 20 test_set = [] while count: a = random.randrange(3,20) b = random.randrange(3,20) if a > b and a - b > 1: if (b, a-b) not in test_set: test_set.append((b, a-b)) count -= 1 elif b > a and b - a > 1: if (a, b-a) not in test_set: test_set.append((a, b-a)) count -= 1 else: continue for (a,b) in test_set: print " %2d + %2d = " % (a,b)
19.12
37
0.479079
9c51e7ffa104c06ed45deeaa7e32faf7f56f41a1
4,570
py
Python
autovirt/equipment/domain/equipment.py
xlam/autovirt
a19f9237c8b1123ce4f4b8b396dc88122019d4f8
[ "MIT" ]
null
null
null
autovirt/equipment/domain/equipment.py
xlam/autovirt
a19f9237c8b1123ce4f4b8b396dc88122019d4f8
[ "MIT" ]
null
null
null
autovirt/equipment/domain/equipment.py
xlam/autovirt
a19f9237c8b1123ce4f4b8b396dc88122019d4f8
[ "MIT" ]
null
null
null
from enum import Enum from functools import reduce from math import ceil from typing import Optional, Tuple from autovirt import utils from autovirt.exception import AutovirtError from autovirt.structs import UnitEquipment, RepairOffer logger = utils.get_logger() # maximum allowed equipment price PRICE_MAX = 100000 # value to add and sub from offer quality when filtering QUALITY_DELTA = 3 def quantity_to_repair(units: list[UnitEquipment]) -> int: """Calculate total quantity of equipment to repair on given units""" return sum([unit.wear_quantity for unit in units]) def quantity_total(units: list[UnitEquipment]) -> int: """Calculate total equipment count on given units""" return sum([unit.quantity for unit in units]) def split_by_quality( units: list[UnitEquipment], quality_type: QualityType = QualityType.REQUIRED ) -> dict[float, list[UnitEquipment]]: """Split units by quality (required or installed)""" res: dict[float, list[UnitEquipment]] = {} for unit in units: quality = getattr(unit, quality_type.value) if quality not in res.keys(): res[quality] = [] res[quality].append(unit) return res def split_mismatch_quality_units( units: list[UnitEquipment], ) -> tuple[list[UnitEquipment], list[UnitEquipment]]: """Split units into 'normal' and 'mismatch' groups. Mismatched unit have installed equipment of lower quality then required. We need to treat them in different manner then normal while repairing. """ normal = [] mismatch = [] for unit in units: if unit.quality < unit.quality_required: mismatch.append(unit) else: normal.append(unit) return normal, mismatch
34.104478
92
0.679212
9c520307c63d7fc118bc65c38c0ef12159f02949
594
py
Python
day09/part2.py
mtn/advent16
0df34237485ee1246532e9eda0ef643e6950d13e
[ "MIT" ]
null
null
null
day09/part2.py
mtn/advent16
0df34237485ee1246532e9eda0ef643e6950d13e
[ "MIT" ]
null
null
null
day09/part2.py
mtn/advent16
0df34237485ee1246532e9eda0ef643e6950d13e
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import re with open("input.txt") as f: content = f.read().strip() print(ulen(content))
21.214286
64
0.481481
9c520e00d9b073d8aaafcc2b263b654b36c5fc45
17,397
py
Python
cirq-core/cirq/contrib/quimb/mps_simulator_test.py
Nexuscompute/Cirq
640ef8f82d6a56ec95361388ce7976e096cca906
[ "Apache-2.0" ]
null
null
null
cirq-core/cirq/contrib/quimb/mps_simulator_test.py
Nexuscompute/Cirq
640ef8f82d6a56ec95361388ce7976e096cca906
[ "Apache-2.0" ]
4
2022-01-16T14:12:15.000Z
2022-02-24T03:58:46.000Z
cirq-core/cirq/contrib/quimb/mps_simulator_test.py
Nexuscompute/Cirq
640ef8f82d6a56ec95361388ce7976e096cca906
[ "Apache-2.0" ]
null
null
null
# pylint: disable=wrong-or-nonexistent-copyright-notice import itertools import math import numpy as np import pytest import sympy import cirq import cirq.contrib.quimb as ccq import cirq.testing from cirq import value
34.724551
100
0.647755
9c5339840ef805187717d24fa9d11f6cf4589f50
6,574
py
Python
e2e_tests/tests/config.py
winding-lines/determined
231e1ac1df9d77cabc09b724ca2f8070eac0da73
[ "Apache-2.0" ]
null
null
null
e2e_tests/tests/config.py
winding-lines/determined
231e1ac1df9d77cabc09b724ca2f8070eac0da73
[ "Apache-2.0" ]
null
null
null
e2e_tests/tests/config.py
winding-lines/determined
231e1ac1df9d77cabc09b724ca2f8070eac0da73
[ "Apache-2.0" ]
null
null
null
import os from pathlib import Path from typing import Any, Dict from determined.common import util MASTER_SCHEME = "http" MASTER_IP = "localhost" MASTER_PORT = "8080" DET_VERSION = None DEFAULT_MAX_WAIT_SECS = 1800 MAX_TASK_SCHEDULED_SECS = 30 MAX_TRIAL_BUILD_SECS = 90 DEFAULT_TF1_CPU_IMAGE = "determinedai/environments:py-3.7-pytorch-1.7-tf-1.15-cpu-08f9c9b" DEFAULT_TF2_CPU_IMAGE = ( "determinedai/environments:py-3.8-pytorch-1.9-lightning-1.3-tf-2.4-cpu-08f9c9b" ) DEFAULT_TF1_GPU_IMAGE = "determinedai/environments:cuda-10.2-pytorch-1.7-tf-1.15-gpu-08f9c9b" DEFAULT_TF2_GPU_IMAGE = ( "determinedai/environments:cuda-11.1-pytorch-1.9-lightning-1.3-tf-2.4-gpu-08f9c9b" ) TF1_CPU_IMAGE = os.environ.get("TF1_CPU_IMAGE") or DEFAULT_TF1_CPU_IMAGE TF2_CPU_IMAGE = os.environ.get("TF2_CPU_IMAGE") or DEFAULT_TF2_CPU_IMAGE TF1_GPU_IMAGE = os.environ.get("TF1_GPU_IMAGE") or DEFAULT_TF1_GPU_IMAGE TF2_GPU_IMAGE = os.environ.get("TF2_GPU_IMAGE") or DEFAULT_TF2_GPU_IMAGE GPU_ENABLED = os.environ.get("DET_TEST_GPU_ENABLED", "1") not in ("0", "false") PROJECT_ROOT_PATH = Path(__file__).resolve().parents[2]
32.068293
100
0.702616
9c53aaaab36a01f9660d76573d43ecd12a07d0cb
7,340
py
Python
src/greenbudget/app/subaccount/serializers.py
nickmflorin/django-proper-architecture-testing
da7c4019697e85f921695144375d2f548f1e98ad
[ "MIT" ]
null
null
null
src/greenbudget/app/subaccount/serializers.py
nickmflorin/django-proper-architecture-testing
da7c4019697e85f921695144375d2f548f1e98ad
[ "MIT" ]
null
null
null
src/greenbudget/app/subaccount/serializers.py
nickmflorin/django-proper-architecture-testing
da7c4019697e85f921695144375d2f548f1e98ad
[ "MIT" ]
null
null
null
from django.contrib.contenttypes.models import ContentType from rest_framework import serializers, exceptions from greenbudget.lib.rest_framework_utils.fields import ModelChoiceField from greenbudget.lib.rest_framework_utils.serializers import ( EnhancedModelSerializer) from greenbudget.app.budget.models import BaseBudget from greenbudget.app.common.serializers import ( EntitySerializer, AbstractBulkUpdateSerializer, create_bulk_create_serializer ) from greenbudget.app.fringe.models import Fringe from greenbudget.app.group.models import ( BudgetSubAccountGroup, TemplateSubAccountGroup ) from .models import SubAccount, BudgetSubAccount, TemplateSubAccount def create_bulk_create_subaccounts_serializer(model_cls): data_serializer = BudgetSubAccountSerializer if model_cls is TemplateSubAccount: data_serializer = TemplateSubAccountSerializer base_serializer = create_bulk_create_serializer(data_serializer) return BulkCreateSubAccountsSerializer def create_subaccount_bulk_change_serializer(model_cls): base_serializer = BudgetSubAccountSerializer if model_cls is TemplateSubAccount: base_serializer = TemplateSubAccountSerializer return SubAccountBulkChangeSerializer def create_bulk_update_subaccounts_serializer(model_cls): return BulkUpdateSubAccountsSerializer
35.631068
80
0.674523
9c554c033e4e1ae5351bb05f507b9e976ca41041
13,152
py
Python
modules/dbnd/src/dbnd/_core/tracking/managers/callable_tracking.py
busunkim96/dbnd
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
[ "Apache-2.0" ]
224
2020-01-02T10:46:37.000Z
2022-03-02T13:54:08.000Z
modules/dbnd/src/dbnd/_core/tracking/managers/callable_tracking.py
busunkim96/dbnd
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
[ "Apache-2.0" ]
16
2020-03-11T09:37:58.000Z
2022-01-26T10:22:08.000Z
modules/dbnd/src/dbnd/_core/tracking/managers/callable_tracking.py
busunkim96/dbnd
0191fdcd4c4fbd35006f1026d1a55b2abab9097b
[ "Apache-2.0" ]
24
2020-03-24T13:53:50.000Z
2022-03-22T11:55:18.000Z
import contextlib import logging import typing from typing import Any, Dict, Tuple import attr from dbnd._core.configuration import get_dbnd_project_config from dbnd._core.constants import ( RESULT_PARAM, DbndTargetOperationStatus, DbndTargetOperationType, TaskRunState, ) from dbnd._core.current import ( current_task_run, get_databand_run, is_verbose, try_get_current_task, ) from dbnd._core.errors.errors_utils import log_exception from dbnd._core.log.external_exception_logging import log_exception_to_server from dbnd._core.parameter.parameter_definition import ParameterDefinition from dbnd._core.parameter.parameter_value import ParameterFilters from dbnd._core.settings import TrackingConfig from dbnd._core.task.tracking_task import TrackingTask from dbnd._core.task_build.task_context import try_get_current_task from dbnd._core.task_build.task_definition import TaskDefinition from dbnd._core.task_build.task_results import FuncResultParameter from dbnd._core.task_run.task_run import TaskRun from dbnd._core.task_run.task_run_error import TaskRunError from dbnd._core.utils.callable_spec import args_to_kwargs from dbnd._core.utils.timezone import utcnow from targets import InMemoryTarget, Target from targets.value_meta import ValueMetaConf from targets.values import get_value_type_of_obj if typing.TYPE_CHECKING: from dbnd._core.task_build.task_decorator import TaskDecorator logger = logging.getLogger(__name__) def _handle_tracking_error(msg, func_call=None): log_exception_to_server() location = " for %s" % func_call.callable if func_call else "" msg = "Failed during dbnd %s for %s, ignoring, and continue without tracking" % ( msg, location, ) if is_verbose(): logger.warning( msg, exc_info=True, ) else: logger.info(msg) def _do_nothing_decorator(f): return f def _log_inputs(task_run): """ For tracking mode. Logs InMemoryTarget inputs. """ try: params = task_run.task._params for param_value in params.get_param_values(ParameterFilters.INPUTS): param, value = param_value.parameter, param_value.value if isinstance(param_value, InMemoryTarget): try: param = param.modify( value_meta_conf=ValueMetaConf( log_preview=True, log_schema=True, ) ) task_run.tracker.log_parameter_data( parameter=param, target=param_value, value=value, operation_type=DbndTargetOperationType.read, operation_status=DbndTargetOperationStatus.OK, ) except Exception as ex: log_exception( "Failed to log input param to tracking store.", ex=ex, non_critical=True, ) except Exception as ex: log_exception( "Failed to log input params to tracking store.", ex=ex, non_critical=True ) def _log_result(task_run, result): # type: (TaskRun, Any) -> None """ For tracking mode. Logs the task result and adds it to the target_origin map to support relationships between dynamic tasks. """ try: result_param = task_run.task.task_params.get_param_value(RESULT_PARAM) if not result_param: logger.debug( "No result params to log for task {}".format(task_run.task_af_id) ) return # we now the parameter value is a target because this is an output param # the target is created in the task creation result_param_def, result_target = result_param.parameter, result_param.value # spread result into relevant fields. if isinstance(result_param_def, FuncResultParameter): # assign all returned values to relevant band Outputs if result is None: return for result_name, value in result_param_def.named_results(result): # we now the parameter value is a target because this is an output param # the target is created in the task creation parameter_value = task_run.task.task_params.get_param_value(result_name) _log_parameter_value( task_run, parameter_definition=parameter_value.parameter, target=parameter_value.value, value=value, ) else: _log_parameter_value( task_run, parameter_definition=result_param_def, target=result_target, value=result, ) except Exception as ex: log_exception( "Failed to log result to tracking store.", ex=ex, non_critical=True )
38.568915
113
0.633744
9c56d5b6165d77a3d76bfb27f03c0f747558ff24
5,534
py
Python
api.py
Benardi/redis-basics
614a15afe47780886bb6088f4ae45c6a7cbc6e22
[ "MIT" ]
null
null
null
api.py
Benardi/redis-basics
614a15afe47780886bb6088f4ae45c6a7cbc6e22
[ "MIT" ]
null
null
null
api.py
Benardi/redis-basics
614a15afe47780886bb6088f4ae45c6a7cbc6e22
[ "MIT" ]
null
null
null
import os import logging from json import loads, dumps from datetime import timedelta from argparse import ArgumentParser from redis import Redis from flask import Response, Flask, request app = Flask(__name__) log = logging.getLogger(__name__) parser = ArgumentParser() parser.add_argument("-a", "--address", action="store", dest="address", type=str, required=True, help="Address for api") parser.add_argument("-p", "--port", action="store", dest="port", type=str, required=True, help="Port for api") parser.add_argument("-c", "--crt", action="store", dest="cert", type=str, required=False, help="Path to certificate for this API") parser.add_argument("-k", "--key", action="store", dest="key", type=str, required=False, help="Path to key of certificate used by this API") parser.add_argument("-rp", "--redis-port", action="store", dest="redis-port", type=str, required=True, help="Port for Redis client") args = vars(parser.parse_args()) api_address = args["address"] api_port = args["port"] api_cert = args["cert"] api_key = args["key"] redis_port = args["redis-port"] r = Redis(port=redis_port, charset="utf-8", decode_responses=True) def start_api(address, port, clnt_cert=None, clnt_key=None): if clnt_cert is None or clnt_key is None: app.run(host=address, port=port, debug=False) else: app.run(host=address, port=port, ssl_context=(clnt_cert, clnt_key), debug=False) if api_cert is None or api_key is None: start_api(api_address, api_port) else: start_api(api_address, api_port, api_cert, api_key)
30.744444
85
0.632815
9c5842107ba44f69dd4be13f1db7dd944439eb70
6,071
py
Python
zhihu_spider/ZhihuSpider/spiders/zhihu.py
Ki-Seki/gadgets
6e031e1f6536a15b48e3beb80ba8bf31d2a3db7a
[ "MIT" ]
1
2022-02-24T12:48:47.000Z
2022-02-24T12:48:47.000Z
zhihu_spider/ZhihuSpider/spiders/zhihu.py
Ki-Seki/gadgets
6e031e1f6536a15b48e3beb80ba8bf31d2a3db7a
[ "MIT" ]
null
null
null
zhihu_spider/ZhihuSpider/spiders/zhihu.py
Ki-Seki/gadgets
6e031e1f6536a15b48e3beb80ba8bf31d2a3db7a
[ "MIT" ]
1
2022-02-24T12:51:20.000Z
2022-02-24T12:51:20.000Z
""" spider Chromecmd cd Chrome chrome.exe --remote-debugging-port=9222 http://127.0.0.1:9222/json json spider settings # ROBOTSTXT_OBEY = False # parse # COOKIES_ENABLED = True # Request cookies # USER_AGENT = # DOWNLOADER_MIDDLEWARES user agent """ import re import json import datetime import scrapy from scrapy.loader import ItemLoader from urllib import parse from ZhihuSpider.utils.browsezhihu import get_cookies from ZhihuSpider import settings from ZhihuSpider.items import ZhihuQuestionItem, ZhihuAnswerItem
46.7
813
0.680119
9c5b28789209abf7074e8e365fb1d2e93079992e
2,109
py
Python
tests/test_bindiff.py
Kyle-Kyle/angr
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
[ "BSD-2-Clause" ]
6,132
2015-08-06T23:24:47.000Z
2022-03-31T21:49:34.000Z
tests/test_bindiff.py
Kyle-Kyle/angr
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
[ "BSD-2-Clause" ]
2,272
2015-08-10T08:40:07.000Z
2022-03-31T23:46:44.000Z
tests/test_bindiff.py
Kyle-Kyle/angr
345b2131a7a67e3a6ffc7d9fd475146a3e12f837
[ "BSD-2-Clause" ]
1,155
2015-08-06T23:37:39.000Z
2022-03-31T05:54:11.000Z
import nose import angr import logging l = logging.getLogger("angr.tests.test_bindiff") import os test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests') # todo make a better test if __name__ == "__main__": logging.getLogger("angr.analyses.bindiff").setLevel(logging.DEBUG) import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
39.055556
106
0.719772
9c5b6f7b1147d0bfa29ae31ca75143f0f85b1910
523
py
Python
main/handle_file.py
nucluster/us_states
26cca38990b9afb6a2b8cc4d1365409428793c6d
[ "MIT" ]
null
null
null
main/handle_file.py
nucluster/us_states
26cca38990b9afb6a2b8cc4d1365409428793c6d
[ "MIT" ]
null
null
null
main/handle_file.py
nucluster/us_states
26cca38990b9afb6a2b8cc4d1365409428793c6d
[ "MIT" ]
null
null
null
from pathlib import Path BASE_DIR = Path(__file__).resolve().parent.parent # def handle_uploaded_file(f): # with open('screenshot.png', 'wb') as destination: # # for chunk in f.chunks(): # # destination.write(chunk) # destination.write(f) with open( BASE_DIR/'media'/'Greater_coat_of_arms_of_the_United_States.png', 'rb' ) as file: flag = file.read() # handle_uploaded_file(flag) print(type(flag)) print(len(flag)) # print(flag) # for place in sys.path: # print(place)
21.791667
74
0.659656
9c5ca9cec48517b47b0e018883a0875e922d1924
4,921
py
Python
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
iicarus-bit/google-ctf
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
[ "Apache-2.0" ]
2,757
2018-04-28T21:41:36.000Z
2022-03-29T06:33:36.000Z
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
iicarus-bit/google-ctf
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
[ "Apache-2.0" ]
20
2019-07-23T15:29:32.000Z
2022-01-21T12:53:04.000Z
2018/finals/pwn-gdb-as-a-service/web_challenge/challenge/gaas.py
iicarus-bit/google-ctf
4eb8742bca58ff071ff8f6814d41d9ec7eb1db4b
[ "Apache-2.0" ]
449
2018-05-09T05:54:05.000Z
2022-03-30T14:54:18.000Z
#!/usr/bin/env python3 # # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from aiohttp import web import capstone import functools from gdbproc import GDBProcess import socketio import asyncio import codecs import os enable_logging = False premium = 'PREMIUM' in os.environ if premium: access_key = os.getenv('PREMIUM_KEY') runnable = ['/home/user/printwebflag'] else: access_key = os.getenv('TRIAL_KEY') runnable = ['/bin/sleep', '20'] MAX_INSN_LEN = 15 capstone_md = capstone.Cs(capstone.CS_ARCH_X86, capstone.CS_MODE_64) sio = socketio.AsyncServer() app = web.Application() sio.attach(app) with open('index.html') as f: index_html = f.read() app.add_routes([web.get('/', index), web.get('/{name}', index)]) gdb_sessions = {} stop_queue_readers = {} app.on_shutdown.append(on_shutdown) if __name__ == '__main__': web.run_app(app)
27.960227
104
0.710018
9c5de31d5758cb655e6faea3c4a14331feb71111
4,960
py
Python
examples/multi_physics/piezo_elasticity.py
BubuLK/sfepy
3e8e2082c26d574dc334fe3a0e0eeb723f7a6657
[ "BSD-3-Clause" ]
null
null
null
examples/multi_physics/piezo_elasticity.py
BubuLK/sfepy
3e8e2082c26d574dc334fe3a0e0eeb723f7a6657
[ "BSD-3-Clause" ]
null
null
null
examples/multi_physics/piezo_elasticity.py
BubuLK/sfepy
3e8e2082c26d574dc334fe3a0e0eeb723f7a6657
[ "BSD-3-Clause" ]
null
null
null
r""" Piezo-elasticity problem - linear elastic material with piezoelectric effects. Find :math:`\ul{u}`, :math:`\phi` such that: .. math:: - \omega^2 \int_{Y} \rho\ \ul{v} \cdot \ul{u} + \int_{Y} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u}) - \int_{Y_2} g_{kij}\ e_{ij}(\ul{v}) \nabla_k \phi = 0 \;, \quad \forall \ul{v} \;, \int_{Y_2} g_{kij}\ e_{ij}(\ul{u}) \nabla_k \psi + \int_{Y} K_{ij} \nabla_i \psi \nabla_j \phi = 0 \;, \quad \forall \psi \;, where .. math:: D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) + \lambda \ \delta_{ij} \delta_{kl} \;. """ from __future__ import absolute_import import os import numpy as nm from sfepy import data_dir from sfepy.discrete.fem import MeshIO from sfepy.mechanics.matcoefs import stiffness_from_lame import six def post_process(out, pb, state, extend=False): """ Calculate and output the strain and stresses for the given state. """ from sfepy.base.base import Struct from sfepy.discrete.fem import extend_cell_data ev = pb.evaluate strain = ev('ev_cauchy_strain.i.Y(u)', mode='el_avg') stress = ev('ev_cauchy_stress.i.Y(inclusion.D, u)', mode='el_avg') piezo = -ev('ev_piezo_stress.i.Y2(inclusion.coupling, phi)', mode='el_avg') piezo = extend_cell_data(piezo, pb.domain, 'Y2', val=0.0) piezo_strain = ev('ev_piezo_strain.i.Y(inclusion.coupling, u)', mode='el_avg') out['cauchy_strain'] = Struct(name='output_data', mode='cell', data=strain, dofs=None) out['elastic_stress'] = Struct(name='output_data', mode='cell', data=stress, dofs=None) out['piezo_stress'] = Struct(name='output_data', mode='cell', data=piezo, dofs=None) out['piezo_strain'] = Struct(name='output_data', mode='cell', data=piezo_strain, dofs=None) out['total_stress'] = Struct(name='output_data', mode='cell', data=stress + piezo, dofs=None) return out filename_mesh = data_dir + '/meshes/2d/special/circle_in_square.mesh' ## filename_mesh = data_dir + '/meshes/2d/special/circle_in_square_small.mesh' ## filename_mesh = data_dir + '/meshes/3d/special/cube_sphere.mesh' ## filename_mesh = data_dir + '/meshes/2d/special/cube_cylinder.mesh' omega = 1 omega_squared = omega**2 conf_dir = os.path.dirname(__file__) io = MeshIO.any_from_filename(filename_mesh, prefix_dir=conf_dir) bbox, dim = io.read_bounding_box(ret_dim=True) geom = {3 : '3_4', 2 : '2_3'}[dim] x_left, x_right = bbox[:,0] options = { 'post_process_hook' : 'post_process', } regions = { 'Y' : 'all', 'Y1' : 'cells of group 1', 'Y2' : 'cells of group 2', 'Y2_Surface': ('r.Y1 *v r.Y2', 'facet'), 'Left' : ('vertices in (x < %f)' % (x_left + 1e-3), 'facet'), 'Right' : ('vertices in (x > %f)' % (x_right - 1e-3), 'facet'), } fields = { 'displacement' : ('real', dim, 'Y', 1), 'potential' : ('real', 1, 'Y', 1), } variables = { 'u' : ('unknown field', 'displacement', 0), 'v' : ('test field', 'displacement', 'u'), 'phi' : ('unknown field', 'potential', 1), 'psi' : ('test field', 'potential', 'phi'), } ebcs = { 'u1' : ('Left', {'u.all' : 0.0}), 'u2' : ('Right', {'u.0' : 0.1}), 'phi' : ('Y2_Surface', {'phi.all' : 0.0}), } def get_inclusion_pars(ts, coor, mode=None, **kwargs): """TODO: implement proper 3D -> 2D transformation of constitutive matrices.""" if mode == 'qp': _, dim = coor.shape sym = (dim + 1) * dim // 2 dielectric = nm.eye(dim, dtype=nm.float64) # !!! coupling = nm.ones((dim, sym), dtype=nm.float64) # coupling[0,1] = 0.2 out = { # Lame coefficients in 1e+10 Pa. 'D' : stiffness_from_lame(dim=2, lam=0.1798, mu=0.148), # dielectric tensor 'dielectric' : dielectric, # piezoelectric coupling 'coupling' : coupling, 'density' : nm.array([[0.1142]]), # in 1e4 kg/m3 } for key, val in six.iteritems(out): out[key] = val[None, ...] return out materials = { 'inclusion' : (None, 'get_inclusion_pars') } functions = { 'get_inclusion_pars' : (get_inclusion_pars,), } integrals = { 'i' : 2, } equations = { '1' : """- %f * dw_volume_dot.i.Y(inclusion.density, v, u) + dw_lin_elastic.i.Y(inclusion.D, v, u) - dw_piezo_coupling.i.Y2(inclusion.coupling, v, phi) = 0""" % omega_squared, '2' : """dw_piezo_coupling.i.Y2(inclusion.coupling, u, psi) + dw_diffusion.i.Y(inclusion.dielectric, psi, phi) = 0""", } solvers = { 'ls' : ('ls.scipy_direct', {}), 'newton' : ('nls.newton', {'i_max' : 1, 'eps_a' : 1e-10, }), }
29.349112
78
0.563105
9c5f1cf8cb3617f22a594d7ff47f26bbe868fb45
326
py
Python
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
null
null
null
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
null
null
null
01-logica-de-programacao-e-algoritmos/Aula 06/01 Tuplas/1.2 Desempacotamento de parametros em funcoes/ex01.py
rafaelbarretomg/Uninter
1f84b0103263177122663e991db3a8aeb106a959
[ "MIT" ]
null
null
null
# Desempacotamento de parametros em funcoes # somando valores de uma tupla # Programa principal print('Resultado: {}\n' .format(soma(1, 2))) print('Resultado: {}\n' .format(soma(1, 2, 3, 4, 5, 6, 7, 8, 9)))
23.285714
65
0.604294
9c60736edd01d240f5a84615880cdfb0c36592cb
2,876
py
Python
services/model.py
theallknowng/eKheti
85e74f26bde7454293617ba727002c5c81402140
[ "MIT" ]
1
2020-04-14T20:29:09.000Z
2020-04-14T20:29:09.000Z
services/model.py
theallknowng/eKheti
85e74f26bde7454293617ba727002c5c81402140
[ "MIT" ]
5
2020-09-26T01:11:17.000Z
2022-02-10T02:01:36.000Z
services/model.py
theallknowng/eKheti
85e74f26bde7454293617ba727002c5c81402140
[ "MIT" ]
null
null
null
import pandas from keras.models import Sequential from keras.layers import Dense from keras.wrappers.scikit_learn import KerasClassifier from keras.utils import np_utils from sklearn.model_selection import cross_val_score from sklearn.model_selection import KFold from sklearn.preprocessing import LabelEncoder from sklearn.pipeline import Pipeline import keras import sys import json import requests import numpy as np # define baseline model model = baseline_model() model.load_weights("../model1.h5") # data = sys.argv[1] # data = '{"pH min":5.7,"pH max":7,"nitrogen min":109,"nitrogen max":146,"phosphorus min":20,"phosphorus max":30,"potasium min":78,"potasium max":115,"calcium min":270,"calcium max":990,"magnesium min":46,"magnesium max":96,"sulphur min":10,"sulphur max":10,"iron min":44,"iron max":46,"zinc min":3.87,"zinc max":5.87,"manganese min":4.81,"manganese max":4.81,"copper min":21,"copper max":26,"boron min":1.25,"boron max":2.25,"temperature min":25,"temperature max":35,"precipitation min":50,"precipitation max":60,"irrigation":"yes ","region":"barshi"}' # data = '{"pH min":7.6,"pH max":7.6,"nitrogen min":150.53,"nitrogen max":150.53,"phosphorus min":55.96,"phosphorus max":55.96,"potasium min":728,"potasium max":728,"calcium min":45.56,"calcium max":45.56,"magnesium min":36.46,"magnesium max":36.46,"sulphur min":44.69,"sulphur max":44.69,"iron min":2.7,"iron max":2.7,"zinc min":0.49,"zinc max":0.49,"manganese min":2.16,"manganese max":2.16,"copper min":3.5,"copper max":3.5,"boron min":0.63,"boron max":0.63,"temperature min":21,"temperature max":31,"precipitation min":60.18,"precipitation max":60.18,"irrigation":"yes ","region":"barshi"}' data= '{"pH min":5.7,"pH max":7,"nitrogen min":109,"nitrogen max":146,"phosphorus min":20,"phosphorus max":30,"potasium min":78,"potasium max":115,"calcium min":270,"calcium max":990,"magnesium min":46,"magnesium max":96,"sulphur min":10,"sulphur max":10,"iron min":44,"iron max":46,"zinc min":3.87,"zinc max":5.87,"manganese min":4.81,"manganese max":4.81,"copper min":21,"copper max":26,"boron min":1.25,"boron max":2.25,"temperature min":25,"temperature max":35,"precipitation min":50,"precipitation max":60,"irrigation":"yes ","region":"barshi"}' data = json.loads(data) dataframe = pandas.DataFrame(data,index=[0]) dataset = dataframe.values X = dataset[:,0:28].astype(float) op=model.predict(X) #op = model.predict_classes(X) #print(op) #classes = np.argmax(op) #print(classes) best_n = np.argsort(op, axis=1)[:,-7:] print(best_n[0])
54.264151
594
0.719402
9c614378ccffafbcb6378e7da9d99a24c5b8ad0b
1,848
py
Python
tests/sentry/api/endpoints/test_project_details.py
erhuabushuo/sentry
8b3bad10155aaacfdff80910e5972e64304e880c
[ "BSD-3-Clause" ]
null
null
null
tests/sentry/api/endpoints/test_project_details.py
erhuabushuo/sentry
8b3bad10155aaacfdff80910e5972e64304e880c
[ "BSD-3-Clause" ]
null
null
null
tests/sentry/api/endpoints/test_project_details.py
erhuabushuo/sentry
8b3bad10155aaacfdff80910e5972e64304e880c
[ "BSD-3-Clause" ]
null
null
null
from django.core.urlresolvers import reverse from sentry.models import Project from sentry.testutils import APITestCase
33
88
0.65368
9c6258f2e73dfc4619740d301b9ae33bb12c5202
29,732
py
Python
tests/test_table.py
databook1/python-pptx
87ca6bf34f9ced17cc4f3c94cf141069429e7583
[ "MIT" ]
null
null
null
tests/test_table.py
databook1/python-pptx
87ca6bf34f9ced17cc4f3c94cf141069429e7583
[ "MIT" ]
12
2021-01-22T16:53:51.000Z
2022-02-23T13:57:43.000Z
tests/test_table.py
databook1/python-pptx
87ca6bf34f9ced17cc4f3c94cf141069429e7583
[ "MIT" ]
null
null
null
# encoding: utf-8 """Unit-test suite for `pptx.table` module.""" import pytest from pptx.dml.fill import FillFormat from pptx.dml.border import BorderFormat from pptx.enum.text import MSO_ANCHOR from pptx.oxml.ns import qn from pptx.oxml.table import CT_Table, CT_TableCell, TcRange from pptx.shapes.graphfrm import GraphicFrame from pptx.table import ( _Cell, _CellCollection, _Column, _ColumnCollection, _Row, _RowCollection, Table, ) from pptx.text.text import TextFrame from pptx.util import Inches, Length, Pt from .unitutil.cxml import element, xml from .unitutil.mock import call, class_mock, instance_mock, property_mock
35.995157
88
0.61227
9c62aef1446638f9fa0188683ca923feaaa75706
408
py
Python
imread/tests/test_bmp.py
luispedro/imread
7960b744623fe03e6d968893a539bca969715860
[ "MIT" ]
51
2015-01-09T14:07:37.000Z
2022-03-03T13:54:30.000Z
imread/tests/test_bmp.py
luispedro/imread
7960b744623fe03e6d968893a539bca969715860
[ "MIT" ]
16
2015-08-09T13:40:44.000Z
2020-04-14T10:01:41.000Z
imread/tests/test_bmp.py
luispedro/imread
7960b744623fe03e6d968893a539bca969715860
[ "MIT" ]
15
2015-05-22T10:41:52.000Z
2021-05-03T11:11:59.000Z
import numpy as np from imread import imread from . import file_path
24
54
0.622549
9c6344071efa98707250768a8a8a6346ceb89a33
6,612
py
Python
bl60x_flash/main.py
v3l0c1r4pt0r/bl60x-flash
065770004629c3e5bf98057677e7a6ca566e9c4a
[ "MIT" ]
null
null
null
bl60x_flash/main.py
v3l0c1r4pt0r/bl60x-flash
065770004629c3e5bf98057677e7a6ca566e9c4a
[ "MIT" ]
null
null
null
bl60x_flash/main.py
v3l0c1r4pt0r/bl60x-flash
065770004629c3e5bf98057677e7a6ca566e9c4a
[ "MIT" ]
null
null
null
from serial import Serial from tqdm import tqdm import binascii import hashlib import struct import time import sys import os if __name__ == "__main__": main()
28.25641
86
0.613581
9c638d8e037fbbc9a02c2c027a243f97a22ca4ba
19,638
py
Python
lang/py/test/test_avro_builder.py
zerofox-oss/yelp-avro
913f95a4c34386d0fe9aff843b1a8ea362a1a2c5
[ "Apache-2.0" ]
null
null
null
lang/py/test/test_avro_builder.py
zerofox-oss/yelp-avro
913f95a4c34386d0fe9aff843b1a8ea362a1a2c5
[ "Apache-2.0" ]
1
2021-08-02T17:22:28.000Z
2021-08-02T17:22:28.000Z
lang/py/test/test_avro_builder.py
riskive/yelp-avro
5a8835505d1f788585834b0a87940d30b76be3fb
[ "Apache-2.0" ]
1
2020-05-06T22:01:07.000Z
2020-05-06T22:01:07.000Z
# -*- coding: utf-8 -*- import unittest from avro import avro_builder from avro import schema if __name__ == '__main__': unittest.main()
33.626712
81
0.595936
9c63d06a1b4ade87729c096ceb91bf4dea5b367b
467
py
Python
monte_py/__init__.py
domluna/fun_with_ffi
9fc197b11a3470395db517657d624f0a3aa06958
[ "MIT" ]
1
2018-07-16T22:10:58.000Z
2018-07-16T22:10:58.000Z
monte_py/__init__.py
domluna/fun_with_ffi
9fc197b11a3470395db517657d624f0a3aa06958
[ "MIT" ]
null
null
null
monte_py/__init__.py
domluna/fun_with_ffi
9fc197b11a3470395db517657d624f0a3aa06958
[ "MIT" ]
null
null
null
import random # use a unit square
23.35
46
0.573876
9c6424690b87c4502fb44bc4e25fa64fa727a995
36,577
py
Python
tools/mpy_ld.py
UVA-DSI/circuitpython
35ee4add63a604320d2fbd4e30baef2b5675f9a7
[ "Unlicense", "BSD-3-Clause", "MIT-0", "MIT" ]
1
2021-10-20T12:21:44.000Z
2021-10-20T12:21:44.000Z
tools/mpy_ld.py
UVA-DSI/circuitpython
35ee4add63a604320d2fbd4e30baef2b5675f9a7
[ "Unlicense", "BSD-3-Clause", "MIT-0", "MIT" ]
null
null
null
tools/mpy_ld.py
UVA-DSI/circuitpython
35ee4add63a604320d2fbd4e30baef2b5675f9a7
[ "Unlicense", "BSD-3-Clause", "MIT-0", "MIT" ]
null
null
null
#!/usr/bin/env python3 # # This file is part of the MicroPython project, http://micropython.org/ # # The MIT License (MIT) # # Copyright (c) 2019 Damien P. George # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ Link .o files to .mpy """ import sys, os, struct, re from elftools.elf import elffile sys.path.append(os.path.dirname(__file__) + "/../py") import makeqstrdata as qstrutil # MicroPython constants MPY_VERSION = 5 MP_NATIVE_ARCH_X86 = 1 MP_NATIVE_ARCH_X64 = 2 MP_NATIVE_ARCH_ARMV7M = 5 MP_NATIVE_ARCH_ARMV7EMSP = 7 MP_NATIVE_ARCH_ARMV7EMDP = 8 MP_NATIVE_ARCH_XTENSA = 9 MP_NATIVE_ARCH_XTENSAWIN = 10 MP_CODE_BYTECODE = 2 MP_CODE_NATIVE_VIPER = 4 MP_SCOPE_FLAG_VIPERRELOC = 0x20 MP_SCOPE_FLAG_VIPERRODATA = 0x40 MP_SCOPE_FLAG_VIPERBSS = 0x80 MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE = 1 MICROPY_PY_BUILTINS_STR_UNICODE = 2 MP_SMALL_INT_BITS = 31 QSTR_WINDOW_SIZE = 32 # ELF constants R_386_32 = 1 R_X86_64_64 = 1 R_XTENSA_32 = 1 R_386_PC32 = 2 R_X86_64_PC32 = 2 R_ARM_ABS32 = 2 R_386_GOT32 = 3 R_ARM_REL32 = 3 R_386_PLT32 = 4 R_X86_64_PLT32 = 4 R_XTENSA_PLT = 6 R_386_GOTOFF = 9 R_386_GOTPC = 10 R_ARM_THM_CALL = 10 R_XTENSA_DIFF32 = 19 R_XTENSA_SLOT0_OP = 20 R_ARM_BASE_PREL = 25 # aka R_ARM_GOTPC R_ARM_GOT_BREL = 26 # aka R_ARM_GOT32 R_ARM_THM_JUMP24 = 30 R_X86_64_REX_GOTPCRELX = 42 R_386_GOT32X = 43 ################################################################################ # Architecture configuration ARCH_DATA = { "x86": ArchData( "EM_386", MP_NATIVE_ARCH_X86 << 2 | MICROPY_PY_BUILTINS_STR_UNICODE | MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE, 2, 4, (R_386_PC32, R_386_GOT32, R_386_GOT32X), asm_jump_x86, ), "x64": ArchData( "EM_X86_64", MP_NATIVE_ARCH_X64 << 2 | MICROPY_PY_BUILTINS_STR_UNICODE | MICROPY_OPT_CACHE_MAP_LOOKUP_IN_BYTECODE, 2, 8, (R_X86_64_REX_GOTPCRELX,), asm_jump_x86, ), "armv7m": ArchData( "EM_ARM", MP_NATIVE_ARCH_ARMV7M << 2 | MICROPY_PY_BUILTINS_STR_UNICODE, 2, 4, (R_ARM_GOT_BREL,), asm_jump_arm, ), "armv7emsp": ArchData( "EM_ARM", MP_NATIVE_ARCH_ARMV7EMSP << 2 | MICROPY_PY_BUILTINS_STR_UNICODE, 2, 4, (R_ARM_GOT_BREL,), asm_jump_arm, ), "armv7emdp": ArchData( "EM_ARM", MP_NATIVE_ARCH_ARMV7EMDP << 2 | MICROPY_PY_BUILTINS_STR_UNICODE, 2, 4, (R_ARM_GOT_BREL,), asm_jump_arm, ), "xtensa": ArchData( "EM_XTENSA", MP_NATIVE_ARCH_XTENSA << 2 | MICROPY_PY_BUILTINS_STR_UNICODE, 2, 4, (R_XTENSA_32, R_XTENSA_PLT), asm_jump_xtensa, ), "xtensawin": ArchData( "EM_XTENSA", MP_NATIVE_ARCH_XTENSAWIN << 2 | MICROPY_PY_BUILTINS_STR_UNICODE, 4, 4, (R_XTENSA_32, R_XTENSA_PLT), asm_jump_xtensa, ), } ################################################################################ # Helper functions # Smaller numbers are enabled first LOG_LEVEL_1 = 1 LOG_LEVEL_2 = 2 LOG_LEVEL_3 = 3 log_level = LOG_LEVEL_1 ################################################################################ # Qstr extraction ################################################################################ # Linker def build_got_generic(env): env.got_entries = {} for sec in env.sections: for r in sec.reloc: s = r.sym if not ( s.entry["st_info"]["bind"] == "STB_GLOBAL" and r["r_info_type"] in env.arch.arch_got ): continue s_type = s.entry["st_info"]["type"] assert s_type in ("STT_NOTYPE", "STT_FUNC", "STT_OBJECT"), s_type assert s.name if s.name in env.got_entries: continue env.got_entries[s.name] = GOTEntry(s.name, s) ################################################################################ # .mpy output ################################################################################ # main if __name__ == "__main__": main()
33.618566
109
0.576893
9c64d6e1ca9f65ffe83cf4a6cb96b5de160e7309
2,289
py
Python
ui_mant_libros.py
edzzn/Manejo_Liberia
c735d35b32fc53839acfc48d4e088e69983edf16
[ "MIT" ]
null
null
null
ui_mant_libros.py
edzzn/Manejo_Liberia
c735d35b32fc53839acfc48d4e088e69983edf16
[ "MIT" ]
null
null
null
ui_mant_libros.py
edzzn/Manejo_Liberia
c735d35b32fc53839acfc48d4e088e69983edf16
[ "MIT" ]
null
null
null
from PyQt4 import QtGui from ui_mant_libros_new import NewLibrosWindow from ui_mant_libros_edit import EditLibrosWindow from ui_mant_libros_id_edit import GetIdEditWindow # Debug only import inspect if __name__ == '__main__': import sys app = QtGui.QApplication(sys.argv) mainWin = MenuLibros() mainWin.show() sys.exit(app.exec_())
26.929412
69
0.671035
9c64e0be4c2600978945ef57f08d4ac03e9f96cf
6,583
py
Python
env/gym_poker_ai/envs/tests/holdem_calc/holdem_argparser.py
MrStonkus/PokerAi
9c43c3a7a9c3ac01f4ee9e3f1f95f0786c35de99
[ "MIT" ]
null
null
null
env/gym_poker_ai/envs/tests/holdem_calc/holdem_argparser.py
MrStonkus/PokerAi
9c43c3a7a9c3ac01f4ee9e3f1f95f0786c35de99
[ "MIT" ]
1
2020-05-09T20:27:33.000Z
2020-05-09T20:27:33.000Z
env/gym_poker_ai/envs/tests/holdem_calc/holdem_argparser.py
MrStonkus/PokerAi
9c43c3a7a9c3ac01f4ee9e3f1f95f0786c35de99
[ "MIT" ]
null
null
null
import argparse import re import holdem_calc.holdem_functions as holdem_functions # Wrapper class which holds the arguments for library calls # Mocks actual argparse object # Parses arguments passed to holdem_calc as a library call def parse_lib_args(args): error_check_arguments(args) # Parse hole cards and board hole_cards, board = None, None if not args.input: hole_cards, board = parse_cards(args.cards, args.board) return hole_cards, args.n, args.exact, board, args.input # Parses command line arguments to holdem_calc # Parses a line taken from the input file and returns the hole cards and board # Parses hole cards and board # Error check the command line arguments # Error check the command line arguments # Checking that the hole cards + board are formatted properly and unique # Returns tuple of two-tuple hole_cards: e.g. ((As, Ks), (Ad, Kd), (Jh, Th)) # Returns list of board cards: e.g. [As Ks Ad Kd] # Instantiates new cards from the arguments and returns them in a tuple
33.93299
78
0.628285
9c651d14eff8b0f1392964eb0805b7871c20c731
8,318
py
Python
qbay/controllers.py
KarlDorogy/Cisc-327-Course-Project-Group-20
0e2c003f78bbdd932381a7a8cbc3aa757da18b24
[ "MIT" ]
null
null
null
qbay/controllers.py
KarlDorogy/Cisc-327-Course-Project-Group-20
0e2c003f78bbdd932381a7a8cbc3aa757da18b24
[ "MIT" ]
null
null
null
qbay/controllers.py
KarlDorogy/Cisc-327-Course-Project-Group-20
0e2c003f78bbdd932381a7a8cbc3aa757da18b24
[ "MIT" ]
null
null
null
from flask import render_template, request, session, redirect from qbay.models import * from datetime import date from qbay import app def authenticate(inner_function): """ :param inner_function: any python function that accepts a user object Wrap any python function and check the current session to see if the user has logged in. If login, it will call the inner_function with the logged in user object. To wrap a function, we can put a decoration on that function. Example: @authenticate def home_page(user): pass """ # return the wrapped version of the inner_function: return wrapped_inner
34.658333
76
0.648593
9c6678445c5b8ffd9879e0f6a21e874c128e214d
6,998
py
Python
gbfs/serializers.py
stadtulm/cykel
b292d958330279654c49beafc3f95a0067274472
[ "MIT" ]
80
2019-08-20T17:41:31.000Z
2021-05-31T19:20:28.000Z
gbfs/serializers.py
transportkollektiv/cykel
b292d958330279654c49beafc3f95a0067274472
[ "MIT" ]
19
2019-08-24T15:17:33.000Z
2021-09-22T17:58:03.000Z
gbfs/serializers.py
stadtulm/cykel
b292d958330279654c49beafc3f95a0067274472
[ "MIT" ]
12
2019-08-21T17:55:14.000Z
2021-04-07T18:53:52.000Z
from datetime import timedelta from django.utils.timezone import now from preferences import preferences from rest_framework import fields, serializers from bikesharing.models import Bike, Station, VehicleType from cykel.serializers import EnumFieldSerializer
36.447917
84
0.642612
9c672aa16a64502ad882d71db5ffef21757f9d6f
1,095
py
Python
anime_downloader/extractors/vidstream.py
ngomile/anime-downloader
14d9cebe8aa4eb9d906b937d7c19fedfa737d184
[ "Unlicense" ]
2
2020-08-10T12:34:42.000Z
2020-11-19T08:13:48.000Z
anime_downloader/extractors/vidstream.py
ngomile/anime-downloader
14d9cebe8aa4eb9d906b937d7c19fedfa737d184
[ "Unlicense" ]
null
null
null
anime_downloader/extractors/vidstream.py
ngomile/anime-downloader
14d9cebe8aa4eb9d906b937d7c19fedfa737d184
[ "Unlicense" ]
null
null
null
import logging import re from anime_downloader.extractors.base_extractor import BaseExtractor from anime_downloader.sites import helpers logger = logging.getLogger(__name__)
31.285714
160
0.552511
9c67ab6dcf7da8380a3c1b1759e1c7f496809cce
2,799
py
Python
gui/sum_v1/views.py
time-crunched/nlp-toolbox
b732abd0b2c6b265971efe04a4d70ebe20d2ee8f
[ "MIT" ]
null
null
null
gui/sum_v1/views.py
time-crunched/nlp-toolbox
b732abd0b2c6b265971efe04a4d70ebe20d2ee8f
[ "MIT" ]
3
2020-06-05T18:58:57.000Z
2021-06-10T20:50:13.000Z
gui/sum_v1/views.py
time-crunched/nlp-toolbox
b732abd0b2c6b265971efe04a4d70ebe20d2ee8f
[ "MIT" ]
1
2019-12-01T16:56:41.000Z
2019-12-01T16:56:41.000Z
import time import os from django.shortcuts import render, redirect from django.http import JsonResponse from django.views import View from django.conf import settings from .forms import File_uploadForm from .models import File_upload, SummaryRes from sim_v1.textsummary import TEXTSummary summary_document_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'media','sum_v1','upload') #summary_document_dir = r'C:\Users\ERDIG\Dropbox\Python\nlp_v1\media\sum_v1\upload' summary_extraction_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),'media','sum_v1','temp') #summary_extraction_dir = r'C:\Users\ERDIG\Dropbox\Python\nlp_v1\media\sum_v1\temp' summary_ratio = 0.01
29.15625
133
0.679171
9c67af820f4a5f09ac6dce61683f07d3e73f1273
1,290
py
Python
homeassistant/components/websocket_api/__init__.py
dannyqwertz/home-assistant
688bdc6532e514afbdc8efd1f574a7b5c9e8d280
[ "Apache-2.0" ]
4
2019-01-10T14:47:54.000Z
2021-04-22T02:06:27.000Z
homeassistant/components/websocket_api/__init__.py
dannyqwertz/home-assistant
688bdc6532e514afbdc8efd1f574a7b5c9e8d280
[ "Apache-2.0" ]
6
2021-02-08T21:02:40.000Z
2022-03-12T00:52:16.000Z
homeassistant/components/websocket_api/__init__.py
dannyqwertz/home-assistant
688bdc6532e514afbdc8efd1f574a7b5c9e8d280
[ "Apache-2.0" ]
1
2019-08-13T11:54:30.000Z
2019-08-13T11:54:30.000Z
""" Websocket based API for Home Assistant. For more details about this component, please refer to the documentation at https://developers.home-assistant.io/docs/external_api_websocket.html """ from homeassistant.core import callback from homeassistant.loader import bind_hass from . import commands, connection, const, decorators, http, messages DOMAIN = const.DOMAIN DEPENDENCIES = ('http',) # Backwards compat / Make it easier to integrate # pylint: disable=invalid-name ActiveConnection = connection.ActiveConnection BASE_COMMAND_MESSAGE_SCHEMA = messages.BASE_COMMAND_MESSAGE_SCHEMA error_message = messages.error_message result_message = messages.result_message async_response = decorators.async_response require_admin = decorators.require_admin ws_require_user = decorators.ws_require_user # pylint: enable=invalid-name async def async_setup(hass, config): """Initialize the websocket API.""" hass.http.register_view(http.WebsocketAPIView) commands.async_register_commands(hass) return True
30
75
0.784496
9c68432891ca359406f39890007723907fdc1968
4,181
py
Python
test_app/settings.py
Lenders-Cooperative/Django-DocuSign
676d966065f6e1e64e1f0db9b7691b9f0c5d73a5
[ "BSD-3-Clause" ]
null
null
null
test_app/settings.py
Lenders-Cooperative/Django-DocuSign
676d966065f6e1e64e1f0db9b7691b9f0c5d73a5
[ "BSD-3-Clause" ]
null
null
null
test_app/settings.py
Lenders-Cooperative/Django-DocuSign
676d966065f6e1e64e1f0db9b7691b9f0c5d73a5
[ "BSD-3-Clause" ]
2
2021-12-03T19:40:35.000Z
2021-12-03T19:47:59.000Z
# # Created on Tue Dec 21 2021 # # Copyright (c) 2021 Lenders Cooperative, a division of Summit Technology Group, Inc. # """ Django settings for test_app project. Generated by 'django-admin startproject' using Django 3.1.7. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ from pathlib import Path import environ env = environ.Env() # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "uhtgm(e2y3@5%0x!wy#re#fn+51h*ck88^ocm7d1=hx^@(&7$7" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", "los_docusign.apps.LosDocusignConfig", "test_app.test_organization.apps.TestOrganizationConfig", "django_lc_utils", ] MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] ROOT_URLCONF = "test_app.urls" TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [], "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", ], }, }, ] WSGI_APPLICATION = "test_app.wsgi.application" # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "docusign_new_poc", "USER": "postgres", "PASSWORD": "admin", "HOST": "localhost", "PORT": "5432", } } # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = "en-us" TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = "/static/" BASE_DIR = Path(__file__).resolve().parent DOCUSIGN_API_ACCOUNT_ID = env( "DOCUSIGN_API_ACCOUNT_ID", default="<Docusign API Account Id >" ) DOCUSIGN_CLIENT_ID = env("DOCUSIGN_CLIENT_ID", default="<Docusign Client Id>") DOCUSIGN_API_ENDPOINT = env( "DOCUSIGN_API_ENDPOINT", default="https://demo.docusign.net/restapi/v2.1/accounts/" ) DOCUSIGN_TOKEN_EXPIRY_IN_SECONDS = env("DOCUSIGN_TOKEN_EXPIRY_IN_SECONDS", default=3600) DOCUSIGN_AUTHORIZATION_SERVER = env( "DOCUSIGN_AUTHORIZATION_SERVER", default="account-d.docusign.com" ) DOCUSIGN_PRIVATE_KEY_FILE = env( "DOCUSIGN_PRIVATE_KEY_FILE", default="<Private Key file data>", ) DOCUSIGN_ENABLE_KBA = env("DOCUSIGN_ENABLE_KBA", default=False)
27.326797
91
0.708443
9c68e55390ec5a85f2cfdfcd46e61487ba6ce000
9,871
py
Python
tests/unit/ppr/test_search_query.py
doug-lovett/test-schemas-dl
a05e87b983f2c3559c081dd65aff05e2c67e6186
[ "Apache-2.0" ]
null
null
null
tests/unit/ppr/test_search_query.py
doug-lovett/test-schemas-dl
a05e87b983f2c3559c081dd65aff05e2c67e6186
[ "Apache-2.0" ]
null
null
null
tests/unit/ppr/test_search_query.py
doug-lovett/test-schemas-dl
a05e87b983f2c3559c081dd65aff05e2c67e6186
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test Suite to ensure the PPR Search Query schema is valid. """ import copy from registry_schemas import validate from registry_schemas.example_data.ppr import SEARCH_QUERY def test_valid_search_query_ind_debtor(): """Assert that the schema is performing as expected for a search by individual debtor.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'INDIVIDUAL_DEBTOR' del query['criteria']['debtorName']['business'] del query['criteria']['value'] del query['clientReferenceId'] del query['startDateTime'] del query['endDateTime'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_valid_search_query_bus_debtor(): """Assert that the schema is performing as expected for a search by business debtor.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'BUSINESS_DEBTOR' del query['criteria']['debtorName']['first'] del query['criteria']['debtorName']['second'] del query['criteria']['debtorName']['last'] del query['criteria']['value'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_valid_search_query_airdot(): """Assert that the schema is performing as expected for a search by aircraft DOT.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'AIRCRAFT_DOT' del query['criteria']['debtorName'] query['criteria']['value'] = 'CFYXW' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_valid_search_query_regnum(): """Assert that the schema is performing as expected for a search by registration number.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'REGISTRATION_NUMBER' del query['criteria']['debtorName'] query['criteria']['value'] = '023001B' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_valid_search_query_mhrnum(): """Assert that the schema is performing as expected for a search by MHR number.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'MHR_NUMBER' del query['criteria']['debtorName'] query['criteria']['value'] = '21324' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_valid_search_query_serialnum(): """Assert that the schema is performing as expected for a search by serial number.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'SERIAL_NUMBER' del query['criteria']['debtorName'] query['criteria']['value'] = 'KM8J3CA46JU622994' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert is_valid def test_invalid_search_query_missing_type(): """Assert that an invalid search query fails - type is missing.""" query = copy.deepcopy(SEARCH_QUERY) del query['type'] del query['criteria']['debtorName']['business'] del query['criteria']['value'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_missing_criteria(): """Assert that an invalid search query fails - criteria is missing.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_type(): """Assert that an invalid search query fails - type is invalid.""" query = copy.deepcopy(SEARCH_QUERY) query['type'] = 'XXXXXXXX' del query['criteria']['debtorName']['business'] del query['criteria']['value'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_criteria(): """Assert that an invalid search query fails - criteria is invalid.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['debtorName']['business'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_busname(): """Assert that an invalid search query fails - business name is too short.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['debtorName']['first'] del query['criteria']['debtorName']['second'] del query['criteria']['debtorName']['last'] del query['criteria']['value'] query['criteria']['debtorName']['business'] = 'XXXX' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_value(): """Assert that an invalid search query fails - value is too long.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['debtorName'] query['criteria']['value'] = 'XxxxxxxxxxxxxxxxxxxxXxxxxxxxxxxxxxxxxxxxXxxxxxxxxxx' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_debtor(): """Assert that an invalid search query fails - debtor name is invalid.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_firstname(): """Assert that an invalid search query fails - debtor first name is too long.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['criteria']['debtorName']['first'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_secondname(): """Assert that an invalid search query fails - debtor second name is too long.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['criteria']['debtorName']['second'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_lastname(): """Assert that an invalid search query fails - debtor last name is too long.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['criteria']['debtorName']['last'] = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_clientref(): """Assert that an invalid search query fails - client reference id is too long.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['clientReferenceId'] = 'XxxxxxxxxxXxxxxxxxxxX' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_startts(): """Assert that an invalid search query fails - start date time format is invalid.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['startDateTime'] = 'Xxxxxxxxxx' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid def test_invalid_search_query_endts(): """Assert that an invalid search query fails - end date time format is invalid.""" query = copy.deepcopy(SEARCH_QUERY) del query['criteria']['value'] del query['criteria']['debtorName']['business'] query['endDateTime'] = 'Xxxxxxxxxx' is_valid, errors = validate(query, 'searchQuery', 'ppr') if errors: for err in errors: print(err.message) print(errors) assert not is_valid
28.042614
95
0.668524
9c69f20de39f2f2cbc5461ca8a9902a34b2bc1f3
165
py
Python
devopsipy/decorators.py
kharnam/devopsipy
c3379b1dd5f66e71c826045afde1702030e495d4
[ "MIT" ]
null
null
null
devopsipy/decorators.py
kharnam/devopsipy
c3379b1dd5f66e71c826045afde1702030e495d4
[ "MIT" ]
2
2018-10-31T01:43:42.000Z
2018-10-31T02:05:55.000Z
devopsipy/decorators.py
kharnam/devopsipy
c3379b1dd5f66e71c826045afde1702030e495d4
[ "MIT" ]
null
null
null
""" Module to contain Pywork decorators """ __author__ = 'sergey kharnam' import re import time import itertools import logging log = logging.getLogger(__name__)
12.692308
35
0.769697
9c6a899bfa0fce8fa48384ca11c89371d3bdbbc4
10,449
py
Python
tests/test_decorators.py
stephenfin/django-rest-framework
9d001cd84c1239d708b1528587c183ef30e38c31
[ "BSD-3-Clause" ]
1
2019-01-11T13:56:41.000Z
2019-01-11T13:56:41.000Z
tests/test_decorators.py
stephenfin/django-rest-framework
9d001cd84c1239d708b1528587c183ef30e38c31
[ "BSD-3-Clause" ]
null
null
null
tests/test_decorators.py
stephenfin/django-rest-framework
9d001cd84c1239d708b1528587c183ef30e38c31
[ "BSD-3-Clause" ]
1
2019-06-29T12:46:16.000Z
2019-06-29T12:46:16.000Z
from __future__ import unicode_literals import pytest from django.test import TestCase from rest_framework import status from rest_framework.authentication import BasicAuthentication from rest_framework.decorators import ( action, api_view, authentication_classes, detail_route, list_route, parser_classes, permission_classes, renderer_classes, schema, throttle_classes ) from rest_framework.parsers import JSONParser from rest_framework.permissions import IsAuthenticated from rest_framework.renderers import JSONRenderer from rest_framework.response import Response from rest_framework.schemas import AutoSchema from rest_framework.test import APIRequestFactory from rest_framework.throttling import UserRateThrottle from rest_framework.views import APIView def test_method_mapping_already_mapped(self): msg = "Method 'get' has already been mapped to '.test_action'." with self.assertRaisesMessage(AssertionError, msg): def test_method_mapping_overwrite(self): msg = ("Method mapping does not behave like the property decorator. You " "cannot use the same method name for each mapping declaration.") with self.assertRaisesMessage(AssertionError, msg): def test_detail_route_deprecation(self): with pytest.warns(DeprecationWarning) as record: assert len(record) == 1 assert str(record[0].message) == ( "`detail_route` is deprecated and will be removed in " "3.10 in favor of `action`, which accepts a `detail` bool. Use " "`@action(detail=True)` instead." ) def test_list_route_deprecation(self): with pytest.warns(DeprecationWarning) as record: assert len(record) == 1 assert str(record[0].message) == ( "`list_route` is deprecated and will be removed in " "3.10 in favor of `action`, which accepts a `detail` bool. Use " "`@action(detail=False)` instead." ) def test_route_url_name_from_path(self): # pre-3.8 behavior was to base the `url_name` off of the `url_path` with pytest.warns(DeprecationWarning): assert view.url_path == 'foo_bar' assert view.url_name == 'foo-bar'
31.954128
92
0.624175
9c6c043e7e279ee40586854016feb8a49ecc6e3c
661
py
Python
tamilmorse/morse_encode.py
CRE2525/open-tamil
ffc02509f7b8a6a17644c85799a475a8ba623954
[ "MIT" ]
1
2021-08-03T19:35:18.000Z
2021-08-03T19:35:18.000Z
tamilmorse/morse_encode.py
CRE2525/open-tamil
ffc02509f7b8a6a17644c85799a475a8ba623954
[ "MIT" ]
null
null
null
tamilmorse/morse_encode.py
CRE2525/open-tamil
ffc02509f7b8a6a17644c85799a475a8ba623954
[ "MIT" ]
null
null
null
## -*- coding: utf-8 -*- #(C) 2018 Muthiah Annamalai # This file is part of Open-Tamil project # You may use or distribute this file under terms of MIT license import codecs import json import tamil import sys import os #e.g. python morse_encode.py CURRDIR = os.path.dirname(os.path.realpath(__file__)) if __name__ == u"__main__": encode(u" ".join([i.decode("utf-8") for i in sys.argv[1:]]))
30.045455
95
0.688351
9c6c3991eeee7dfdd77baaa787b34e6799b4425e
1,355
py
Python
Leetcode/Python/_1721.py
Xrenya/algorithms
aded82cacde2f4f2114241907861251e0e2e5638
[ "MIT" ]
null
null
null
Leetcode/Python/_1721.py
Xrenya/algorithms
aded82cacde2f4f2114241907861251e0e2e5638
[ "MIT" ]
null
null
null
Leetcode/Python/_1721.py
Xrenya/algorithms
aded82cacde2f4f2114241907861251e0e2e5638
[ "MIT" ]
null
null
null
# Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next # Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next
27.653061
81
0.500369
9c6d0e66968fb03790c987f71d690038a6f4abfa
3,029
py
Python
contrib/functional_tests/functional/test_reorg.py
electrumsv/electrumsv
a2d9027ccec338cadfca778888e6ef7f077b1651
[ "MIT" ]
136
2019-01-10T15:49:09.000Z
2022-02-20T04:46:39.000Z
contrib/functional_tests/functional/test_reorg.py
electrumsv/electrumsv
a2d9027ccec338cadfca778888e6ef7f077b1651
[ "MIT" ]
790
2019-01-07T01:53:35.000Z
2022-03-30T23:04:28.000Z
contrib/functional_tests/functional/test_reorg.py
electrumsv/electrumsv
a2d9027ccec338cadfca778888e6ef7f077b1651
[ "MIT" ]
65
2019-01-10T23:55:30.000Z
2021-12-19T06:47:13.000Z
""" Warning - this will reset all components back to a blank state before running the simulation Runs node1, electrumx1 and electrumsv1 and loads the default wallet on the daemon (so that newly submitted blocks will be synchronized by ElectrumSV reorged txid: 'a1fa9460ca105c1396cd338f7fa202bf79a9d244d730e91e19f6302a05b2f07a' """ import asyncio import os from pathlib import Path import pytest import pytest_asyncio from electrumsv_node import electrumsv_node from electrumsv_sdk import utils import logging import requests from contrib.functional_tests.websocket_client import TxStateWSClient MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger("simulate-fresh-reorg")
35.22093
96
0.678772
9c6e78ca230293ad0a6075105e0e0da44e90fcbd
25,892
py
Python
Pyrado/pyrado/environments/mujoco/wam_bic.py
KhanhThiVo/SimuRLacra
fdeaf2059c2ed80ea696f018c29290510b5c4cb9
[ "DOC", "Zlib", "BSD-3-Clause" ]
null
null
null
Pyrado/pyrado/environments/mujoco/wam_bic.py
KhanhThiVo/SimuRLacra
fdeaf2059c2ed80ea696f018c29290510b5c4cb9
[ "DOC", "Zlib", "BSD-3-Clause" ]
null
null
null
Pyrado/pyrado/environments/mujoco/wam_bic.py
KhanhThiVo/SimuRLacra
fdeaf2059c2ed80ea696f018c29290510b5c4cb9
[ "DOC", "Zlib", "BSD-3-Clause" ]
1
2020-11-24T15:25:26.000Z
2020-11-24T15:25:26.000Z
# Copyright (c) 2020, Fabio Muratore, Honda Research Institute Europe GmbH, and # Technical University of Darmstadt. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of Fabio Muratore, Honda Research Institute Europe GmbH, # or Technical University of Darmstadt, nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL FABIO MURATORE, HONDA RESEARCH INSTITUTE EUROPE GMBH, # OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER # IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import mujoco_py import numpy as np import os.path as osp from init_args_serializer import Serializable from typing import Optional import pyrado from pyrado.environments.barrett_wam import ( goal_pos_init_sim_4dof, goal_pos_init_sim_7dof, init_qpos_des_4dof, init_qpos_des_7dof, act_space_bic_4dof, act_space_bic_7dof, wam_q_limits_up_7dof, wam_q_limits_lo_7dof, torque_space_wam_4dof, torque_space_wam_7dof, wam_pgains_7dof, wam_dgains_7dof, wam_pgains_4dof, wam_dgains_4dof, ) from pyrado.environments.mujoco.base import MujocoSimEnv from pyrado.spaces.base import Space from pyrado.spaces.box import BoxSpace from pyrado.spaces.singular import SingularStateSpace from pyrado.tasks.base import Task from pyrado.tasks.condition_only import ConditionOnlyTask from pyrado.tasks.desired_state import DesStateTask from pyrado.tasks.final_reward import BestStateFinalRewTask, FinalRewTask, FinalRewMode from pyrado.tasks.goalless import GoallessTask from pyrado.tasks.masked import MaskedTask from pyrado.tasks.parallel import ParallelTasks from pyrado.tasks.reward_functions import ZeroPerStepRewFcn, ExpQuadrErrRewFcn, QuadrErrRewFcn from pyrado.tasks.sequential import SequentialTasks from pyrado.utils.data_types import EnvSpec from pyrado.utils.input_output import print_cbt def check_ball_collisions(self, verbose: bool = False) -> bool: """ Check if an undesired collision with the ball occurs. :param verbose: print messages on collision :return: `True` if the ball collides with something else than the central parts of the cup """ for i in range(self.sim.data.ncon): # Get current contact object contact = self.sim.data.contact[i] # Extract body-id and body-name of both contact geoms body1 = self.model.geom_bodyid[contact.geom1] body1_name = self.model.body_names[body1] body2 = self.model.geom_bodyid[contact.geom2] body2_name = self.model.body_names[body2] # Evaluate if the ball collides with part of the WAM (collision bodies) # or the connection of WAM and cup (geom_ids) c1 = body1_name == "ball" and ( body2_name in self._collision_bodies or contact.geom2 in self._collision_geom_ids ) c2 = body2_name == "ball" and ( body1_name in self._collision_bodies or contact.geom1 in self._collision_geom_ids ) if c1 or c2: if verbose: print_cbt( f"Undesired collision of {body1_name} and {body2_name} detected!", "y", ) return True return False def check_ball_in_cup(self, *args, verbose: bool = False): """ Check if the ball is in the cup. :param verbose: print messages when ball is in the cup :return: `True` if the ball is in the cup """ for i in range(self.sim.data.ncon): # Get current contact object contact = self.sim.data.contact[i] # Extract body-id and body-name of both contact geoms body1 = self.model.geom_bodyid[contact.geom1] body1_name = self.model.body_names[body1] body2 = self.model.geom_bodyid[contact.geom2] body2_name = self.model.body_names[body2] # Evaluate if the ball collides with part of the WAM (collision bodies) # or the connection of WAM and cup (geom_ids) cup_inner_id = self.model._geom_name2id["cup_inner"] c1 = body1_name == "ball" and contact.geom2 == cup_inner_id c2 = body2_name == "ball" and contact.geom1 == cup_inner_id if c1 or c2: if verbose: print_cbt(f"The ball is in the cup at time step {self.curr_step}.", "y") return True return False
49.037879
120
0.632551
9c6edb66b25f5b7d6f691984d70d7a69bf328bdb
469
py
Python
pyRasp.py
ToninoTarsi/pyRasp
a46bb1dc38c7547b60e24189ecf34310da770042
[ "MIT" ]
null
null
null
pyRasp.py
ToninoTarsi/pyRasp
a46bb1dc38c7547b60e24189ecf34310da770042
[ "MIT" ]
null
null
null
pyRasp.py
ToninoTarsi/pyRasp
a46bb1dc38c7547b60e24189ecf34310da770042
[ "MIT" ]
null
null
null
# pyRasp # Copyright (c) Tonino Tarsi 2020. Licensed under MIT. # requirement : # Python 3 # pip install pyyaml # pip install request # pip install f90nml from downloadGFSA import downloadGFSA from prepare_wps import prepare_wps from ungrib import ungrib from metgrid import metgrid from prepare_wrf import prepare_wrf from real import real from wrf import wrf result = downloadGFSA(True) prepare_wps(result) ungrib() metgrid() prepare_wrf(result) real() wrf()
16.75
54
0.784648