hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4c61ecd42ed59f6a2c7fd49a38719e52edaf2a1f
| 845 |
py
|
Python
|
orion/modules/active/wolfram.py
|
isathish/ai_opesource
|
cdccd882306c45712fcdd40e15937b5a9571028a
|
[
"MIT"
] | null | null | null |
orion/modules/active/wolfram.py
|
isathish/ai_opesource
|
cdccd882306c45712fcdd40e15937b5a9571028a
|
[
"MIT"
] | null | null | null |
orion/modules/active/wolfram.py
|
isathish/ai_opesource
|
cdccd882306c45712fcdd40e15937b5a9571028a
|
[
"MIT"
] | null | null | null |
"""
Handles most general questions (including math!)
Requires:
- WolframAlpha API key
Usage Examples:
- "How tall is Mount Everest?"
- "What is the derivative of y = 2x?"
"""
import wolframalpha
from orion.classes.module import Module
from orion.classes.task import ActiveTask
from orion import settings
wolfram_client = wolframalpha.Client(settings.WOLFRAM_KEY)
| 21.666667 | 68 | 0.639053 |
4c624ee7a6d344a15a579b043c3cb6fef1c9aa3b
| 1,035 |
py
|
Python
|
polymatch/matchers/standard.py
|
linuxdaemon/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | null | null | null |
polymatch/matchers/standard.py
|
linuxdaemon/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | 26 |
2020-05-13T17:46:45.000Z
|
2022-03-18T16:07:14.000Z
|
polymatch/matchers/standard.py
|
TotallyNotRobots/poly-match
|
66d967999de982d5ee9463c46b0ff8040d91dc67
|
[
"MIT"
] | null | null | null |
from polymatch import PolymorphicMatcher
| 23.522727 | 46 | 0.696618 |
4c6289a028d756ccd03ac220d11a9d33117ee573
| 6,530 |
py
|
Python
|
djcorsche/settings_default.py
|
carthage-college/django-djcorsche
|
c43db6e634f5b3fc9c8b0cff80ced8382ca6643c
|
[
"BSD-3-Clause"
] | null | null | null |
djcorsche/settings_default.py
|
carthage-college/django-djcorsche
|
c43db6e634f5b3fc9c8b0cff80ced8382ca6643c
|
[
"BSD-3-Clause"
] | null | null | null |
djcorsche/settings_default.py
|
carthage-college/django-djcorsche
|
c43db6e634f5b3fc9c8b0cff80ced8382ca6643c
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Django settings for project.
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
# Debug
#DEBUG = False
DEBUG = True
TEMPLATE_DEBUG = DEBUG
INFORMIX_DEBUG = "debug"
ADMINS = (
('', ''),
)
MANAGERS = ADMINS
SECRET_KEY = ''
ALLOWED_HOSTS = []
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Chicago'
SITE_ID = 1
USE_I18N = False
USE_L10N = False
USE_TZ = False
DEFAULT_CHARSET = 'utf-8'
FILE_CHARSET = 'utf-8'
SERVER_URL = ""
API_URL = "%s/%s" % (SERVER_URL, "api")
LIVEWHALE_API_URL = "https://%s" % (SERVER_URL)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
ROOT_DIR = os.path.dirname(__file__)
ROOT_URL = "/djskeletor/"
ROOT_URLCONF = 'djskeletor.core.urls'
WSGI_APPLICATION = 'djskeletor.wsgi.application'
MEDIA_ROOT = ''
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATIC_ROOT = ''
STATIC_URL = "/static/"
STATICFILES_DIRS = ()
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
DATABASES = {
'default': {
'HOST': '127.0.0.1',
'PORT': '3306',
'NAME': 'django_djskeletor',
'ENGINE': 'django.db.backends.mysql',
#'ENGINE': 'django.db.backends.dummy',
'USER': '',
'PASSWORD': ''
},
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.formtools',
'django.contrib.humanize',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'djskeletor',
'djskeletor.core',
'djskeletor.myapp',
'djtools',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# the following should be uncommented unless you are
# embedding your apps in iframes
#'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# template stuff
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
"/data2/django_projects/djskeletor/templates/",
"/data2/django_templates/djkorra/",
"/data2/django_templates/djcher/",
"/data2/django_templates/",
)
TEMPLATE_CONTEXT_PROCESSORS = (
"djtools.context_processors.sitevars",
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.media",
)
# caching
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
#'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
#'LOCATION': '127.0.0.1:11211',
#'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
#'LOCATION': '/var/tmp/django_djskeletor_cache',
#'TIMEOUT': 60*20,
#'KEY_PREFIX': "DJSKELETOR_",
#'OPTIONS': {
# 'MAX_ENTRIES': 80000,
#}
}
}
CACHE_MIDDLEWARE_ANONYMOUS_ONLY = True
# LDAP Constants
LDAP_SERVER = ''
LDAP_SERVER_PWM = ''
LDAP_PORT = ''
LDAP_PORT_PWM = ''
LDAP_PROTOCOL = ""
LDAP_PROTOCOL_PWM = ""
LDAP_BASE = ""
LDAP_USER = ""
LDAP_PASS = ""
LDAP_EMAIL_DOMAIN = ""
LDAP_OBJECT_CLASS = ""
LDAP_OBJECT_CLASS_LIST = []
LDAP_GROUPS = {}
LDAP_RETURN = []
LDAP_RETURN_PWM = []
LDAP_ID_ATTR = ""
LDAP_CHALLENGE_ATTR = ""
# auth backends
AUTHENTICATION_BACKENDS = (
'djauth.ldapBackend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_URL = '/djskeletor/accounts/login/'
LOGIN_REDIRECT_URL = '/djskeletor/'
USE_X_FORWARDED_HOST = True
#SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
SESSION_COOKIE_DOMAIN=".carthage.edu"
SESSION_COOKIE_NAME ='django_djskeletor_cookie'
SESSION_COOKIE_AGE = 86400
# SMTP settings
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = True
EMAIL_PORT = 587
EMAIL_FAIL_SILENTLY = False
DEFAULT_FROM_EMAIL = ''
SERVER_EMAIL = ''
SERVER_MAIL=''
# logging
LOG_FILEPATH = os.path.join(os.path.dirname(__file__), "logs/")
LOG_FILENAME = LOG_FILEPATH + "debug.log"
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%Y/%b/%d %H:%M:%S"
},
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s',
'datefmt' : "%Y/%b/%d %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'null': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
'logfile': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': LOG_FILENAME,
'maxBytes': 50000,
'backupCount': 2,
'formatter': 'standard',
},
'console':{
'level':'INFO',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'include_html': True,
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'djskeletor': {
'handlers':['logfile'],
'propagate': True,
'level':'DEBUG',
},
'django': {
'handlers':['console'],
'propagate': True,
'level':'WARN',
},
'django.db.backends': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| 27.552743 | 96 | 0.620214 |
4c6339b396838bba425536d8c48a53a76850151f
| 956 |
py
|
Python
|
records/12-09/ffff.py
|
AaronYang2333/CSCI_570
|
03e34ce5ff192fc94612bc3afb51dcab3e854462
|
[
"Apache-2.0"
] | 36 |
2020-07-25T00:13:25.000Z
|
2022-02-28T17:48:15.000Z
|
records/12-09/ffff.py
|
AaronYang2333/LeetCode
|
03e34ce5ff192fc94612bc3afb51dcab3e854462
|
[
"Apache-2.0"
] | 6 |
2020-06-06T04:39:37.000Z
|
2021-04-03T01:45:39.000Z
|
records/12-09/ffff.py
|
AaronYang2333/LeetCode
|
03e34ce5ff192fc94612bc3afb51dcab3e854462
|
[
"Apache-2.0"
] | 16 |
2020-12-16T02:38:16.000Z
|
2022-02-28T17:48:16.000Z
|
__author__ = 'Aaron Yang'
__email__ = '[email protected]'
__date__ = '12/9/2020 4:18 PM'
from abc import abstractmethod
if __name__ == '__main__':
ss = Factory().produce()
pc = PCFactory().produce()
laptop = LAPTOPFactory().produce()
pc.info()
laptop.info()
ss.info()
| 16.20339 | 41 | 0.624477 |
4c63d036bfd0e51ade860a3521aecee117e88f7d
| 7,064 |
py
|
Python
|
tests/test_users.py
|
fastapi-users/fastapi-users-db-sqlmodel
|
3a46b80399f129aa07a834a1b40bf49d08c37be1
|
[
"MIT"
] | 18 |
2021-09-09T09:35:30.000Z
|
2022-03-19T04:58:17.000Z
|
tests/test_users.py
|
fastapi-users/fastapi-users-db-sqlmodel
|
3a46b80399f129aa07a834a1b40bf49d08c37be1
|
[
"MIT"
] | null | null | null |
tests/test_users.py
|
fastapi-users/fastapi-users-db-sqlmodel
|
3a46b80399f129aa07a834a1b40bf49d08c37be1
|
[
"MIT"
] | 3 |
2021-11-01T16:58:54.000Z
|
2022-02-15T16:17:11.000Z
|
import uuid
from typing import AsyncGenerator
import pytest
from sqlalchemy import exc
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel import Session, SQLModel, create_engine
from fastapi_users_db_sqlmodel import (
NotSetOAuthAccountTableError,
SQLModelUserDatabase,
SQLModelUserDatabaseAsync,
)
from tests.conftest import OAuthAccount, UserDB, UserDBOAuth
safe_uuid = uuid.UUID("a9089e5d-2642-406d-a7c0-cbc641aca0ec")
| 30.982456 | 88 | 0.709513 |
4c64a40785307d838c76dd7877d9296fa9590e81
| 623 |
py
|
Python
|
copy_reg.py
|
rtbo/vkdgen
|
04a228961bb091b59dc6f741eee703cd81724ca3
|
[
"MIT"
] | 2 |
2021-01-08T15:05:27.000Z
|
2021-10-12T08:44:01.000Z
|
copy_reg.py
|
rtbo/vkdgen
|
04a228961bb091b59dc6f741eee703cd81724ca3
|
[
"MIT"
] | null | null | null |
copy_reg.py
|
rtbo/vkdgen
|
04a228961bb091b59dc6f741eee703cd81724ca3
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
import os
from os import path
root_dir = path.dirname(path.realpath(__file__))
local_reg_dir = path.join(root_dir, 'registry')
os.makedirs(local_reg_dir, exist_ok=True)
vk_files = [ 'registry/vk.xml', 'registry/reg.py', 'registry/generator.py' ]
copy_reg(path.join(root_dir, 'Vulkan-Headers'), vk_files)
| 31.15 | 76 | 0.704655 |
4c64de6df990440fb9bf292eb702bdb614dfcfae
| 22,653 |
py
|
Python
|
utils.py
|
atward424/ASCVD_ML
|
39404dd5f50a527576b91e8f53f5157f76382712
|
[
"Apache-2.0"
] | 1 |
2021-04-08T07:05:18.000Z
|
2021-04-08T07:05:18.000Z
|
utils.py
|
atward424/ASCVD_ML
|
39404dd5f50a527576b91e8f53f5157f76382712
|
[
"Apache-2.0"
] | null | null | null |
utils.py
|
atward424/ASCVD_ML
|
39404dd5f50a527576b91e8f53f5157f76382712
|
[
"Apache-2.0"
] | 1 |
2021-04-08T07:07:53.000Z
|
2021-04-08T07:07:53.000Z
|
import numpy as np
import pandas as pd
import scipy.stats as st
#from medical_ML import Experiment
import matplotlib.pyplot as plt
import xgboost as xgb
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.dummy import DummyClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression, Lasso
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn import linear_model
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.dummy import DummyRegressor
def split_cohort(datafile, to_exclude = None, test_ind_col = None, drop = 'some'):
""" Load and clean the dataset
"""
if isinstance(datafile, str):
data = pd.read_csv(datafile)
else:
data = datafile
test_data = None
if to_exclude is not None:
for k in to_exclude.keys():
if k == 'race':
data = data[data[k].isin(to_exclude[k])]
elif k == 'agebl':
data = data[data[k] >= to_exclude[k]]
elif to_exclude[k]:
data = data[data[k] == 0]
if drop == 'some':
data = data.drop(k, axis = 1)
if drop == 'all':
if (k != 'race') & (k != 'agebl'):
data = data.drop(k, axis = 1)
# self.data = self.data[self.data['year'] <= 2010]
# self.data = self.data.drop(['year'], axis = 1)
if test_ind_col is not None:
test_data = data[data[test_ind_col] == 1]
test_data = test_data.drop(test_ind_col, axis = 1)
data = data[data[test_ind_col] == 0]
data = data.drop(test_ind_col, axis = 1)
return(data, test_data)
# ax.patch.set_facecolor("0.85")
| 48.821121 | 147 | 0.382201 |
4c656802f3785c807e752895a2d07dd94b79c82b
| 4,377 |
py
|
Python
|
cloud/caasp-admin-setup/lib/caaspadminsetup/utils.py
|
hwoarang/caasp-container-manifests
|
6df831d6b4f4218f96e552c416d86eabcfad46c0
|
[
"Apache-2.0"
] | 5 |
2017-03-16T10:47:39.000Z
|
2018-01-17T13:07:03.000Z
|
cloud/caasp-admin-setup/lib/caaspadminsetup/utils.py
|
hwoarang/caasp-container-manifests
|
6df831d6b4f4218f96e552c416d86eabcfad46c0
|
[
"Apache-2.0"
] | 138 |
2017-03-08T12:43:51.000Z
|
2019-04-15T12:57:30.000Z
|
cloud/caasp-admin-setup/lib/caaspadminsetup/utils.py
|
hwoarang/caasp-container-manifests
|
6df831d6b4f4218f96e552c416d86eabcfad46c0
|
[
"Apache-2.0"
] | 26 |
2017-03-09T08:24:03.000Z
|
2019-03-08T00:26:52.000Z
|
import json
import logging
import re
import susepubliccloudinfoclient.infoserverrequests as ifsrequest
import yaml
import sys
RELEASE_DATE = re.compile('^.*-v(\d{8})-*.*')
def get_caasp_release_version():
"""Return the version from os-release"""
os_release = open('/etc/os-release', 'r').readlines()
for entry in os_release:
if entry.startswith('VERSION_ID'):
version_id = entry.split('=')[-1].strip()
# We assume that os-release will always have '"' as
# version delimiters
version = version_id.strip('"\'')
logging.info('Release version: "%s"' % version)
return version
def get_cloud_config_path():
"""Return the path for the cloud configuration file"""
return '/etc/salt/pillar/cloud.sls'
def get_from_config(config_option):
"""Get the value for the given config option"""
# Expected low usage of this method, re-read the file on an as needed
# basis. If this turns out to be an issue cache the content
config_path = get_cloud_config_path()
with open(config_path) as config_file:
config = yaml.load(config_file.read())
settings = config.get('cloud')
if not settings:
return
return settings.get(config_option)
def get_cluster_image_identifier(framework, region):
"""Return the identifier for the latest cluster node image"""
cluster_image = get_from_config('cluster_image')
if cluster_image:
# The data returned in this code path has built in knowledge
# about the information consumed by the client from the
# full pint data
image_data = {}
image_data['id'] = cluster_image
image_data['name'] = cluster_image
if framework == 'microsoft' and cluster_image.count(':') == 3:
image_data['urn'] = cluster_image
msg = 'Using cluster image from configuration. '
msg += 'Image data for cluster node image: "%s"'
logging.info(msg % image_data)
return image_data
name_filter = 'name~caasp,name~cluster'
flavor = get_from_config('procurement_flavor')
if flavor == 'byos':
name_filter += ',name~byos'
else:
name_filter += ',name!byos'
version = get_caasp_release_version()
name_filter += ',name~' + version.replace('.', '-')
# The cluster image we choose depends on the admin node version,
# thus we cannot just query for active images. We need to get all
# images and then process accordingly.
try:
image_info = ifsrequest.get_image_data(
framework,
None,
'json',
region,
name_filter
)
except Exception as e:
logging.error('Pint server access failed: "%s"' % e.message)
# This message will bubble up through salt
return 'See /var/log/caasp_cloud_setup.log'
try:
image_data = json.loads(image_info)
available_images = image_data.get('images', [])
target_image = None
target_image_date = 0
for image in available_images:
image_name = image.get('name')
try:
date = int(RELEASE_DATE.match(image_name).group(1))
if date > target_image_date:
# If we have multiple images with the same date that
# match our filter criteria we have a serious data problem
# we cannot really recover, the first one wins
target_image = image
except Exception:
# Image name with no date stamp skip it
continue
except Exception as e:
logging.error('Could not load json data from pint: "%s"' % e.message)
# This message will bubble up through salt
return 'See /var/log/caasp_cloud_setup.log'
if not target_image:
logging.error('Could not determine image identifier for cluster node.')
logging.error('This implies that the pint server is unreachable or the '
'data is incomplete, please report the issue, exiting.')
sys.exit('pint lookup failed')
logging.info('Image data for cluster node image: "%s"' % target_image)
return target_image
| 37.732759 | 80 | 0.631026 |
4c66a4345821de6dcbba5bb0bbb633c3ee79daa3
| 2,219 |
py
|
Python
|
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
|
simewu/bitcoin_researcher
|
b9fd2efdb8ae8467c5bd4b3320713a541635df16
|
[
"MIT"
] | 1 |
2020-02-15T21:44:04.000Z
|
2020-02-15T21:44:04.000Z
|
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
|
SimeoW/bitcoin
|
3644405f06c8b16a437513e8c02f0f061b91be2e
|
[
"MIT"
] | null | null | null |
tools/Bitcoin Parser/blockchain_parser/tests/test_block.py
|
SimeoW/bitcoin
|
3644405f06c8b16a437513e8c02f0f061b91be2e
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2015-2016 The bitcoin-blockchain-parser developers
#
# This file is part of bitcoin-blockchain-parser.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of bitcoin-blockchain-parser, including this file, may be copied,
# modified, propagated, or distributed except according to the terms contained
# in the LICENSE file.
import unittest
from datetime import datetime
from .utils import read_test_data
from blockchain_parser.block import Block
| 43.509804 | 79 | 0.68995 |
4c66dd7c5bb758efe86e3099648aa0be4405bfa6
| 75 |
py
|
Python
|
genegenie/admin/__init__.py
|
genegeniebio/genegenie-admin
|
93e9253febc14b17d17a5fbc2eb0e22f1c974083
|
[
"MIT"
] | null | null | null |
genegenie/admin/__init__.py
|
genegeniebio/genegenie-admin
|
93e9253febc14b17d17a5fbc2eb0e22f1c974083
|
[
"MIT"
] | null | null | null |
genegenie/admin/__init__.py
|
genegeniebio/genegenie-admin
|
93e9253febc14b17d17a5fbc2eb0e22f1c974083
|
[
"MIT"
] | null | null | null |
'''
DNA++ (c) DNA++ 2017
All rights reserved.
@author: neilswainston
'''
| 9.375 | 22 | 0.626667 |
4c69488448856c1dbc829f26d69379083cb5c7c7
| 600 |
py
|
Python
|
tests/conftest.py
|
pkavousi/similar-users
|
8434e0a03dc8dfa218a34601431c564dff3e80b6
|
[
"FTL",
"RSA-MD"
] | null | null | null |
tests/conftest.py
|
pkavousi/similar-users
|
8434e0a03dc8dfa218a34601431c564dff3e80b6
|
[
"FTL",
"RSA-MD"
] | null | null | null |
tests/conftest.py
|
pkavousi/similar-users
|
8434e0a03dc8dfa218a34601431c564dff3e80b6
|
[
"FTL",
"RSA-MD"
] | null | null | null |
import os
import pandas as pd
import pytest
from user_similarity_model.config.core import DATASET_DIR, config
| 27.272727 | 78 | 0.716667 |
4c6a6e28161a83ca0b9ef2212d453c1bc1cfcfd6
| 232 |
py
|
Python
|
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | 1 |
2020-12-05T05:23:26.000Z
|
2020-12-05T05:23:26.000Z
|
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | null | null | null |
weather/apps.py
|
chrisjen83/rfb_weather_obs
|
8eab16358c5059655d208ef41aa38692fa21776f
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
import logging
logger = logging.getLogger(__name__)
| 17.846154 | 43 | 0.715517 |
4c6a77a19021a586afe308be8abcbb50f2c090fd
| 26 |
py
|
Python
|
projects/django-filer/test.py
|
fleimgruber/python
|
2e735762c73651cffc027ca850b2a58d87d54b49
|
[
"Unlicense"
] | 25 |
2021-10-30T19:54:59.000Z
|
2022-03-29T06:11:02.000Z
|
projects/django-filer/test.py
|
fleimgruber/python
|
2e735762c73651cffc027ca850b2a58d87d54b49
|
[
"Unlicense"
] | 21 |
2021-10-19T01:09:38.000Z
|
2022-03-24T16:08:53.000Z
|
projects/django-filer/test.py
|
fleimgruber/python
|
2e735762c73651cffc027ca850b2a58d87d54b49
|
[
"Unlicense"
] | 3 |
2022-01-25T20:25:13.000Z
|
2022-03-08T02:58:50.000Z
|
import filer
import tests
| 8.666667 | 12 | 0.846154 |
4c6c5b767e3d2e7d380bed49701614a213de873b
| 8,063 |
py
|
Python
|
examples/plots/plot_pass_network.py
|
DymondFormation/mplsoccer
|
544300857ec5936781e12fda203cf2df8a3d00b9
|
[
"MIT"
] | null | null | null |
examples/plots/plot_pass_network.py
|
DymondFormation/mplsoccer
|
544300857ec5936781e12fda203cf2df8a3d00b9
|
[
"MIT"
] | null | null | null |
examples/plots/plot_pass_network.py
|
DymondFormation/mplsoccer
|
544300857ec5936781e12fda203cf2df8a3d00b9
|
[
"MIT"
] | null | null | null |
"""
============
Pass Network
============
This example shows how to plot passes between players in a set formation.
"""
import pandas as pd
from mplsoccer.pitch import Pitch
from matplotlib.colors import to_rgba
import numpy as np
from mplsoccer.statsbomb import read_event, EVENT_SLUG
##############################################################################
# Set team and match info, and get event and tactics dataframes for the defined match_id
match_id = 15946
team = 'Barcelona'
opponent = 'Alavs (A), 2018/19 La Liga'
event_dict = read_event(f'{EVENT_SLUG}/{match_id}.json', warn=False)
players = event_dict['tactics_lineup']
events = event_dict['event']
##############################################################################
# Adding on the last tactics id and formation for the team for each event
events.loc[events.tactics_formation.notnull(), 'tactics_id'] = events.loc[
events.tactics_formation.notnull(), 'id']
events[['tactics_id', 'tactics_formation']] = events.groupby('team_name')[[
'tactics_id', 'tactics_formation']].ffill()
##############################################################################
# Add the abbreviated player position to the players dataframe
formation_dict = {1: 'GK', 2: 'RB', 3: 'RCB', 4: 'CB', 5: 'LCB', 6: 'LB', 7: 'RWB',
8: 'LWB', 9: 'RDM', 10: 'CDM', 11: 'LDM', 12: 'RM', 13: 'RCM',
14: 'CM', 15: 'LCM', 16: 'LM', 17: 'RW', 18: 'RAM', 19: 'CAM',
20: 'LAM', 21: 'LW', 22: 'RCF', 23: 'ST', 24: 'LCF', 25: 'SS'}
players['position_abbreviation'] = players.player_position_id.map(formation_dict)
##############################################################################
# Add on the subsitutions to the players dataframe, i.e. where players are subbed on
# but the formation doesn't change
sub = events.loc[events.type_name == 'Substitution',
['tactics_id', 'player_id', 'substitution_replacement_id',
'substitution_replacement_name']]
players_sub = players.merge(sub.rename({'tactics_id': 'id'}, axis='columns'),
on=['id', 'player_id'], how='inner', validate='1:1')
players_sub = (players_sub[['id', 'substitution_replacement_id', 'position_abbreviation']]
.rename({'substitution_replacement_id': 'player_id'}, axis='columns'))
players = pd.concat([players, players_sub])
players.rename({'id': 'tactics_id'}, axis='columns', inplace=True)
players = players[['tactics_id', 'player_id', 'position_abbreviation']]
##############################################################################
# Add player position information to the events dataframe
# add on the position the player was playing in the formation to the events dataframe
events = events.merge(players, on=['tactics_id', 'player_id'], how='left', validate='m:1')
# add on the position the receipient was playing in the formation to the events dataframe
events = events.merge(players.rename({'player_id': 'pass_recipient_id'},
axis='columns'), on=['tactics_id', 'pass_recipient_id'],
how='left', validate='m:1', suffixes=['', '_receipt'])
##############################################################################
# Create dataframes for passes and player locations
# get a dataframe with all passes
mask_pass = (events.team_name == team) & (events.type_name == 'Pass')
to_keep = ['id', 'match_id', 'player_id', 'player_name', 'outcome_name', 'pass_recipient_id',
'pass_recipient_name', 'x', 'y', 'end_x', 'end_y', 'tactics_id', 'tactics_formation',
'position_abbreviation', 'position_abbreviation_receipt']
passes = events.loc[mask_pass, to_keep].copy()
print('Formations used by {} in match: '.format(team), passes['tactics_formation'].unique())
##############################################################################
# Filter passes by chosen formation, then group all passes and receipts to
# calculate avg x, avg y, count of events for each slot in the formation
formation = 433
passes_formation = passes[(passes.tactics_formation == formation) &
(passes.position_abbreviation_receipt.notnull())].copy()
passer_passes = passes_formation[['position_abbreviation', 'x', 'y']].copy()
recipient_passes = passes_formation[['position_abbreviation_receipt', 'end_x', 'end_y']].copy()
# rename columns to match those in passer_passes
recipient_passes.rename({'position_abbreviation_receipt': 'position_abbreviation',
'end_x': 'x', 'end_y': 'y'}, axis='columns', inplace=True)
# create a new dataframe containing all individual passes and receipts from passes_formation
appended_passes = pd.concat(objs=[passer_passes, recipient_passes], ignore_index=True)
average_locs_and_count = appended_passes.groupby('position_abbreviation').agg({
'x': ['mean'], 'y': ['mean', 'count']})
average_locs_and_count.columns = ['x', 'y', 'count']
##############################################################################
# Group the passes by unique pairings of players and add the avg player positions to this dataframe
# calculate the number of passes between each position (using min/ max so we get passes both ways)
passes_formation['pos_max'] = passes_formation[['position_abbreviation',
'position_abbreviation_receipt']].max(axis='columns')
passes_formation['pos_min'] = passes_formation[['position_abbreviation',
'position_abbreviation_receipt']].min(axis='columns')
passes_between = passes_formation.groupby(['pos_min', 'pos_max']).id.count().reset_index()
passes_between.rename({'id': 'pass_count'}, axis='columns', inplace=True)
# add on the location of each player so we have the start and end positions of the lines
passes_between = passes_between.merge(average_locs_and_count, left_on='pos_min', right_index=True)
passes_between = passes_between.merge(average_locs_and_count, left_on='pos_max', right_index=True,
suffixes=['', '_end'])
##############################################################################
# Calculate the line width and marker sizes relative to the largest counts
max_line_width = 18
max_marker_size = 3000
passes_between['width'] = passes_between.pass_count / passes_between.pass_count.max() * max_line_width
average_locs_and_count['marker_size'] = (average_locs_and_count['count']
/ average_locs_and_count['count'].max() * max_marker_size)
##############################################################################
# Set color to make the lines more transparent when fewer passes are made
min_transparency = 0.3
color = np.array(to_rgba('white'))
color = np.tile(color, (len(passes_between), 1))
c_transparency = passes_between.pass_count / passes_between.pass_count.max()
c_transparency = (c_transparency * (1 - min_transparency)) + min_transparency
color[:, 3] = c_transparency
##############################################################################
# Plotting
pitch = Pitch(pitch_type='statsbomb', orientation='horizontal',
pitch_color='#22312b', line_color='#c7d5cc', figsize=(16, 11),
constrained_layout=True, tight_layout=False)
fig, ax = pitch.draw()
pass_lines = pitch.lines(passes_between.x, passes_between.y,
passes_between.x_end, passes_between.y_end, lw=passes_between.width,
color=color, zorder=1, ax=ax)
pass_nodes = pitch.scatter(average_locs_and_count.x, average_locs_and_count.y, s=average_locs_and_count.marker_size,
color='red', edgecolors='black', linewidth=1, alpha=1, ax=ax)
for index, row in average_locs_and_count.iterrows():
pitch.annotate(row.name, xy=(row.x, row.y), c='white', va='center', ha='center', size=16, weight='bold', ax=ax)
title = ax.set_title("{} {} Formation vs {}".format(team, formation, opponent), size=28, y=0.97, color='#c7d5cc')
fig.set_facecolor("#22312b")
| 55.226027 | 116 | 0.615032 |
4c6cd0ca287f397e656cbb934079a5d03bb867b9
| 2,786 |
py
|
Python
|
jsfiddle_factory/__init__.py
|
andrewp-as-is/jsfiddle-factory.py
|
7b8b883676f3330f5714b15157819b583a753ba1
|
[
"Unlicense"
] | null | null | null |
jsfiddle_factory/__init__.py
|
andrewp-as-is/jsfiddle-factory.py
|
7b8b883676f3330f5714b15157819b583a753ba1
|
[
"Unlicense"
] | null | null | null |
jsfiddle_factory/__init__.py
|
andrewp-as-is/jsfiddle-factory.py
|
7b8b883676f3330f5714b15157819b583a753ba1
|
[
"Unlicense"
] | null | null | null |
__all__ = ['Factory']
import jsfiddle_build
import jsfiddle_github
import jsfiddle_generator
import jsfiddle_readme_generator
import getdirs
import getfiles
import os
import popd
import yaml
class Factory:
"""attrs: `path`. methods: `detox()`, `init()`, `build()`, `readme()`, `update_resources()`"""
path = None
| 30.955556 | 98 | 0.578248 |
4c6d7d5083c40236ec67c12d5db46eb9b81e4185
| 5,774 |
py
|
Python
|
spellnn/train.py
|
MartinXPN/SpellNN
|
e3226fbff359ef60360e63bf7b80a7e1c909e7d8
|
[
"MIT"
] | null | null | null |
spellnn/train.py
|
MartinXPN/SpellNN
|
e3226fbff359ef60360e63bf7b80a7e1c909e7d8
|
[
"MIT"
] | null | null | null |
spellnn/train.py
|
MartinXPN/SpellNN
|
e3226fbff359ef60360e63bf7b80a7e1c909e7d8
|
[
"MIT"
] | null | null | null |
import logging
import os
from datetime import datetime
from inspect import signature, Parameter
from pathlib import Path
from pprint import pprint
from textwrap import dedent
from typing import Optional, Union
import fire
import tensorflow as tf
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard, TerminateOnNaN
from tensorflow.keras import Model
from spellnn import models
from spellnn.data import alphabet
from spellnn.data.alphabet import get_chars
from spellnn.data.processing import DataProcessor
from spellnn.data.util import nb_lines
from spellnn.layers.mapping import CharMapping
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # FATAL
logging.getLogger('tensorflow').setLevel(logging.FATAL)
if __name__ == '__main__':
cli = Gym()
fire.Fire(cli)
| 46.943089 | 118 | 0.662799 |
4c6e61959c8414eed50a9b983937c8b1f9cf4b26
| 3,711 |
py
|
Python
|
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
flax/core/frozen_dict.py
|
juliuskunze/flax
|
929395cf5c7391bca3e33ef6760ff9591401d19e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Frozen Dictionary."""
from typing import TypeVar, Mapping, Dict, Tuple
from flax import serialization
import jax
K = TypeVar('K')
V = TypeVar('V')
def freeze(xs: Dict[K, V]) -> FrozenDict[K, V]:
"""Freeze a nested dict.
Makes a nested `dict` immutable by transforming it into `FrozenDict`.
"""
# Turn the nested FrozenDict into a dict. This way the internal data structure
# of FrozenDict does not contain any FrozenDicts.
# instead we create those lazily in `__getitem__`.
# As a result tree_flatten/unflatten will be fast
# because it operates on native dicts.
xs = unfreeze(xs)
return FrozenDict(xs)
def unfreeze(x: FrozenDict[K, V]) -> Dict[K, V]:
"""Unfreeze a FrozenDict.
Makes a mutable copy of a `FrozenDict` mutable by transforming
it into (nested) dict.
"""
if not isinstance(x, (FrozenDict, dict)):
return x
ys = {}
for key, value in x.items():
ys[key] = unfreeze(value)
return ys
serialization.register_serialization_state(
FrozenDict,
_frozen_dict_state_dict,
_restore_frozen_dict)
| 25.770833 | 80 | 0.68041 |
4c72d8c0b48b4984dfd1c6e64ae6bd05f864f9ea
| 1,273 |
py
|
Python
|
pybb/middleware.py
|
grigi/pybbm
|
9ecc5e7fadf4da820d2fc2c22914e14f3545047d
|
[
"BSD-2-Clause"
] | null | null | null |
pybb/middleware.py
|
grigi/pybbm
|
9ecc5e7fadf4da820d2fc2c22914e14f3545047d
|
[
"BSD-2-Clause"
] | null | null | null |
pybb/middleware.py
|
grigi/pybbm
|
9ecc5e7fadf4da820d2fc2c22914e14f3545047d
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.utils import translation
from django.db.models import ObjectDoesNotExist
from pybb import util
from pybb.signals import user_saved
| 37.441176 | 69 | 0.624509 |
4c72f9ae2886173a745e73873beb49821cbc3a3f
| 691 |
py
|
Python
|
streetlite/common/constants.py
|
s0h3ck/streetlite
|
21db388702f828417dd3dc0fbfa5af757216e1e0
|
[
"MIT"
] | null | null | null |
streetlite/common/constants.py
|
s0h3ck/streetlite
|
21db388702f828417dd3dc0fbfa5af757216e1e0
|
[
"MIT"
] | 1 |
2021-06-01T22:23:13.000Z
|
2021-06-01T22:23:13.000Z
|
streetlite/common/constants.py
|
s0h3ck/streetlite
|
21db388702f828417dd3dc0fbfa5af757216e1e0
|
[
"MIT"
] | null | null | null |
from enum import Enum
| 18.675676 | 55 | 0.622287 |
4c73a2fb986309ca0a2f6912149adaf74509a6fc
| 716 |
py
|
Python
|
day5.py
|
achien/advent-of-code-2021
|
8851e1727975ea8124db78b54fe577fbf2e5883d
|
[
"MIT"
] | null | null | null |
day5.py
|
achien/advent-of-code-2021
|
8851e1727975ea8124db78b54fe577fbf2e5883d
|
[
"MIT"
] | null | null | null |
day5.py
|
achien/advent-of-code-2021
|
8851e1727975ea8124db78b54fe577fbf2e5883d
|
[
"MIT"
] | null | null | null |
import fileinput
counts = {}
for line in fileinput.input():
line = line.strip()
p1, p2 = line.split('>')
p1 = p1[:-2]
x1, y1 = p1.split(',')
x1 = int(x1)
y1 = int(y1)
p2 = p2[1:]
x2, y2 = p2.split(',')
x2 = int(x2)
y2 = int(y2)
if x1 == x2:
dx = 0
elif x1 > x2:
dx = -1
else:
dx = 1
if y1 == y2:
dy = 0
elif y1 > y2:
dy = -1
else:
dy = 1
x = x1
y = y1
while True:
pt = (x, y)
counts[pt] = counts.get(pt, 0) + 1
if x == x2 and y == y2:
break
x += dx
y += dy
n = 0
for _, ct in counts.items():
if ct > 1:
n += 1
print(n)
| 15.911111 | 42 | 0.391061 |
4c73c6bd43cad4b6997238ea62e6e2c529f20e54
| 1,635 |
py
|
Python
|
meditation_example.py
|
sodapopinsky/dfk
|
be48e89d4b054ad8abbb009d0e1ea4c10f559af5
|
[
"MIT"
] | 90 |
2021-10-17T19:36:45.000Z
|
2022-03-31T17:19:43.000Z
|
meditation_example.py
|
sodapopinsky/dfk
|
be48e89d4b054ad8abbb009d0e1ea4c10f559af5
|
[
"MIT"
] | 13 |
2021-11-13T00:19:31.000Z
|
2022-03-20T15:13:22.000Z
|
meditation_example.py
|
sodapopinsky/dfk
|
be48e89d4b054ad8abbb009d0e1ea4c10f559af5
|
[
"MIT"
] | 71 |
2021-11-05T03:00:41.000Z
|
2022-03-30T06:16:25.000Z
|
import logging
from web3 import Web3
import sys
import time
import meditation.meditation as meditation
if __name__ == "__main__":
log_format = '%(asctime)s|%(name)s|%(levelname)s: %(message)s'
logger = logging.getLogger("DFK-meditation")
logger.setLevel(logging.DEBUG)
logging.basicConfig(level=logging.INFO, format=log_format, stream=sys.stdout)
rpc_server = 'https://api.harmony.one'
logger.info("Using RPC server " + rpc_server)
private_key = None # set private key
account_address = '0x2E7669F61eA77F02445A015FBdcFe2DE47083E02'
gas_price_gwei = 10
tx_timeout_seconds = 30
w3 = Web3(Web3.HTTPProvider(rpc_server))
active_meditations = meditation.get_active_meditations(account_address, rpc_server)
logger.info("Pending meditation on address " + str(account_address) + ": "+str(active_meditations))
level = 1
hero_id = 1
required_runes = meditation.get_required_runes(level, rpc_server)
meditation.start_meditation(1, meditation.stat2id('strength'), meditation.stat2id('endurance'), meditation.stat2id('luck'),
meditation.ZERO_ADDRESS, private_key, w3.eth.getTransactionCount(account_address),
gas_price_gwei, tx_timeout_seconds, rpc_server, logger)
hero_meditation = meditation.get_hero_meditation(hero_id, rpc_server)
logger.info("Pending meditation "+str(hero_meditation))
time.sleep(5)
meditation.complete_meditation(hero_id, private_key, w3.eth.getTransactionCount(account_address),
gas_price_gwei, tx_timeout_seconds, rpc_server, logger)
| 41.923077 | 127 | 0.720489 |
4c752c3e0e33ba7c7da469ab66cb6adfa9bb958a
| 669 |
py
|
Python
|
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | 3 |
2018-02-28T14:22:53.000Z
|
2022-01-24T02:38:22.000Z
|
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | null | null | null |
python/johnstarich/interval.py
|
JohnStarich/dotfiles
|
eaa07b09aa02fc2fa2516cebdd3628b4daf506e4
|
[
"Apache-2.0"
] | null | null | null |
import time
| 24.777778 | 67 | 0.606876 |
4c76367fcd11568b786d20b9e43e17b970ff6e48
| 2,329 |
py
|
Python
|
servers/python/coweb/bot/wrapper/object.py
|
opencoweb/coweb
|
7b3a87ee9eda735a859447d404ee16edde1c5671
|
[
"AFL-2.1"
] | 83 |
2015-01-05T19:02:57.000Z
|
2021-11-19T02:48:09.000Z
|
servers/python/coweb/bot/wrapper/object.py
|
xuelingxiao/coweb
|
7b3a87ee9eda735a859447d404ee16edde1c5671
|
[
"AFL-2.1"
] | 3 |
2015-12-16T13:49:33.000Z
|
2019-06-17T13:38:50.000Z
|
servers/python/coweb/bot/wrapper/object.py
|
xuelingxiao/coweb
|
7b3a87ee9eda735a859447d404ee16edde1c5671
|
[
"AFL-2.1"
] | 14 |
2015-04-29T22:36:53.000Z
|
2021-11-18T03:24:29.000Z
|
'''
Copyright (c) The Dojo Foundation 2011. All Rights Reserved.
Copyright (c) IBM Corporation 2008, 2011. All Rights Reserved.
'''
# tornado
import tornado.ioloop
# std lib
import logging
import time
import weakref
import functools
# coweb
from .base import BotWrapperBase
log = logging.getLogger('coweb.bot')
| 35.287879 | 78 | 0.653499 |
4c76baa8499aec4813a3d47e851bd3cbe62268bf
| 6,193 |
py
|
Python
|
battle_tut5.py
|
lankotiAditya/RPG_battle_main
|
0063941d023ff1c18a6b050fab4d0c7ec583b11a
|
[
"MIT"
] | 22 |
2021-01-13T10:21:42.000Z
|
2022-03-10T00:06:05.000Z
|
battle_tut5.py
|
lankotiAditya/RPG_battle_main
|
0063941d023ff1c18a6b050fab4d0c7ec583b11a
|
[
"MIT"
] | 1 |
2021-01-14T17:02:41.000Z
|
2021-01-14T20:23:38.000Z
|
battle_tut5.py
|
lankotiAditya/RPG_battle_main
|
0063941d023ff1c18a6b050fab4d0c7ec583b11a
|
[
"MIT"
] | 33 |
2021-01-17T08:52:38.000Z
|
2022-03-28T10:36:36.000Z
|
import pygame
import random
pygame.init()
clock = pygame.time.Clock()
fps = 60
#game window
bottom_panel = 150
screen_width = 800
screen_height = 400 + bottom_panel
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption('Battle')
#define game variables
current_fighter = 1
total_fighters = 3
action_cooldown = 0
action_wait_time = 90
attack = False
potion = False
clicked = False
#define fonts
font = pygame.font.SysFont('Times New Roman', 26)
#define colours
red = (255, 0, 0)
green = (0, 255, 0)
#load images
#background image
background_img = pygame.image.load('img/Background/background.png').convert_alpha()
#panel image
panel_img = pygame.image.load('img/Icons/panel.png').convert_alpha()
#sword image
sword_img = pygame.image.load('img/Icons/sword.png').convert_alpha()
#create function for drawing text
#function for drawing background
#function for drawing panel
#fighter class
knight = Fighter(200, 260, 'Knight', 30, 10, 3)
bandit1 = Fighter(550, 270, 'Bandit', 20, 6, 1)
bandit2 = Fighter(700, 270, 'Bandit', 20, 6, 1)
bandit_list = []
bandit_list.append(bandit1)
bandit_list.append(bandit2)
knight_health_bar = HealthBar(100, screen_height - bottom_panel + 40, knight.hp, knight.max_hp)
bandit1_health_bar = HealthBar(550, screen_height - bottom_panel + 40, bandit1.hp, bandit1.max_hp)
bandit2_health_bar = HealthBar(550, screen_height - bottom_panel + 100, bandit2.hp, bandit2.max_hp)
run = True
while run:
clock.tick(fps)
#draw background
draw_bg()
#draw panel
draw_panel()
knight_health_bar.draw(knight.hp)
bandit1_health_bar.draw(bandit1.hp)
bandit2_health_bar.draw(bandit2.hp)
#draw fighters
knight.update()
knight.draw()
for bandit in bandit_list:
bandit.update()
bandit.draw()
#control player actions
#reset action variables
attack = False
potion = False
target = None
#make sure mouse is visible
pygame.mouse.set_visible(True)
pos = pygame.mouse.get_pos()
for count, bandit in enumerate(bandit_list):
if bandit.rect.collidepoint(pos):
#hide mouse
pygame.mouse.set_visible(False)
#show sword in place of mouse cursor
screen.blit(sword_img, pos)
if clicked == True:
attack = True
target = bandit_list[count]
#player action
if knight.alive == True:
if current_fighter == 1:
action_cooldown += 1
if action_cooldown >= action_wait_time:
#look for player action
#attack
if attack == True and target != None:
knight.attack(target)
current_fighter += 1
action_cooldown = 0
#enemy action
for count, bandit in enumerate(bandit_list):
if current_fighter == 2 + count:
if bandit.alive == True:
action_cooldown += 1
if action_cooldown >= action_wait_time:
#attack
bandit.attack(knight)
current_fighter += 1
action_cooldown = 0
else:
current_fighter += 1
#if all fighters have had a turn then reset
if current_fighter > total_fighters:
current_fighter = 1
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if event.type == pygame.MOUSEBUTTONDOWN:
clicked = True
else:
clicked = False
pygame.display.update()
pygame.quit()
| 23.911197 | 101 | 0.707089 |
4c791be103564830f1d4250200840c0dccc964ac
| 651 |
py
|
Python
|
curso_em_video/0087a.py
|
marinaoliveira96/python-exercises
|
13fc0ec30dec9bb6531cdeb41c80726971975835
|
[
"MIT"
] | null | null | null |
curso_em_video/0087a.py
|
marinaoliveira96/python-exercises
|
13fc0ec30dec9bb6531cdeb41c80726971975835
|
[
"MIT"
] | null | null | null |
curso_em_video/0087a.py
|
marinaoliveira96/python-exercises
|
13fc0ec30dec9bb6531cdeb41c80726971975835
|
[
"MIT"
] | null | null | null |
matriz = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
soma = col3 = maior = 0
for l in range(0, 3):
for c in range(0, 3):
matriz[l][c] = int(input(f'[{l}][{c}]: '))
for l in range(0, 3):
for c in range(0, 3):
print(f'[{matriz[l][c]:^5}]', end='')
if matriz[l][c] % 2 == 0:
soma += matriz[l][c]
print()
for l in range(0, 3):
col3 += matriz[l][2]
for c in range(0, 3):
if c == 0:
maior = matriz[1][c]
elif matriz[1][c] > maior:
maior = matriz[1][c]
print(f'A soma dos numeros pares {soma}')
print(f'A soma dos valores da 3 coluna {col3}')
print(f'O maior numero da 2 linha {maior}')
| 31 | 50 | 0.506912 |
4c79ab828e049f734329ac9fd7817c526a06676d
| 6,777 |
py
|
Python
|
custom_components/tapo_control/utils.py
|
david-kalbermatten/HomeAssistant-Tapo-Control
|
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
|
[
"Apache-2.0"
] | null | null | null |
custom_components/tapo_control/utils.py
|
david-kalbermatten/HomeAssistant-Tapo-Control
|
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
|
[
"Apache-2.0"
] | null | null | null |
custom_components/tapo_control/utils.py
|
david-kalbermatten/HomeAssistant-Tapo-Control
|
3f9f8316cf7e176bb6f8d798d709f3c6d346a527
|
[
"Apache-2.0"
] | null | null | null |
import onvif
import os
import asyncio
import urllib.parse
from onvif import ONVIFCamera
from pytapo import Tapo
from .const import ENABLE_MOTION_SENSOR, DOMAIN, LOGGER, CLOUD_PASSWORD
from homeassistant.const import CONF_IP_ADDRESS, CONF_USERNAME, CONF_PASSWORD
from homeassistant.components.onvif.event import EventManager
from homeassistant.components.ffmpeg import DATA_FFMPEG
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
| 34.93299 | 86 | 0.665044 |
4c79db5803090229f5cee46e595e5f692bd63c32
| 1,652 |
py
|
Python
|
camd3/infrastructure/component/tests/test_uidattr.py
|
mamrhein/CAmD3
|
d20f62295771a297c3fbb314beef314e5ec7a2b5
|
[
"BSD-2-Clause"
] | null | null | null |
camd3/infrastructure/component/tests/test_uidattr.py
|
mamrhein/CAmD3
|
d20f62295771a297c3fbb314beef314e5ec7a2b5
|
[
"BSD-2-Clause"
] | null | null | null |
camd3/infrastructure/component/tests/test_uidattr.py
|
mamrhein/CAmD3
|
d20f62295771a297c3fbb314beef314e5ec7a2b5
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Name: test_uidattr
# Purpose: Test driver for module 'uidattr'
#
# Author: Michael Amrhein ([email protected])
#
# Copyright: (c) 2018 Michael Amrhein
# ----------------------------------------------------------------------------
# $Source$
# $Revision$
"""Test driver for module 'uidattr'"""
import unittest
from uuid import uuid1
from camd3.infrastructure.component import (
Component, register_utility, UniqueIdAttribute)
from camd3.infrastructure.component.idfactories import (
UUIDGenerator, uuid_generator)
# factory for UUIDs
if __name__ == '__main__': # pragma: no cover
unittest.main()
| 23.6 | 78 | 0.578087 |
4c7a9873c160d856f0a448855b2b79215e8191fc
| 883 |
py
|
Python
|
s.py
|
tn012604409/HW3_chatRobot
|
97762e53bfccd8b30c6b263792919c679e53b404
|
[
"MIT"
] | null | null | null |
s.py
|
tn012604409/HW3_chatRobot
|
97762e53bfccd8b30c6b263792919c679e53b404
|
[
"MIT"
] | null | null | null |
s.py
|
tn012604409/HW3_chatRobot
|
97762e53bfccd8b30c6b263792919c679e53b404
|
[
"MIT"
] | null | null | null |
import requests
import time
from bs4 import BeautifulSoup
| 22.641026 | 76 | 0.551529 |
4c7abb53711251283db1d2b1869388b7608f3858
| 21,493 |
py
|
Python
|
awstin/dynamodb/orm.py
|
k2bd/awstin
|
7360cc20d3c72a6aa87de57146b9c5f4247c58d5
|
[
"MIT"
] | 1 |
2020-12-29T20:49:27.000Z
|
2020-12-29T20:49:27.000Z
|
awstin/dynamodb/orm.py
|
k2bd/awstin
|
7360cc20d3c72a6aa87de57146b9c5f4247c58d5
|
[
"MIT"
] | 69 |
2020-11-16T21:16:44.000Z
|
2021-04-14T17:16:33.000Z
|
awstin/dynamodb/orm.py
|
k2bd/awstin
|
7360cc20d3c72a6aa87de57146b9c5f4247c58d5
|
[
"MIT"
] | null | null | null |
import uuid
from abc import ABC, abstractmethod
from collections import defaultdict
from typing import Union
from boto3.dynamodb.conditions import Attr as BotoAttr
from boto3.dynamodb.conditions import Key as BotoKey
from awstin.dynamodb.utils import from_decimal, to_decimal
NOT_SET = NotSet()
def size_query(self, *args, **kwargs):
return BotoAttr(self._awstin_name).size()
# ---- Update Operators
class CombineOperator(UpdateOperator):
"""
Combine two update expressions
"""
class SetOperator(UpdateOperator):
"""
Support for SET
"""
class AddOperator(UpdateOperator):
class RemoveOperator(UpdateOperator):
class DeleteOperator(UpdateOperator):
# ---- Update Operands
def list_append(left, right):
"""
Set a value to the combination of two lists in an update expression
"""
return ListAppendOperand(UpdateOperand(left), UpdateOperand(right))
| 27.912987 | 88 | 0.579258 |
d5b22ea34f0bbc299fab73839184251258eecd69
| 310 |
py
|
Python
|
Losses/__init__.py
|
SimonTheVillain/ActiveStereoNet
|
708bddce844998b366be1a1ec8a72a31ccd26f8c
|
[
"MIT"
] | 17 |
2019-08-23T04:00:32.000Z
|
2022-02-06T13:37:02.000Z
|
Losses/__init__.py
|
SimonTheVillain/ActiveStereoNet
|
708bddce844998b366be1a1ec8a72a31ccd26f8c
|
[
"MIT"
] | null | null | null |
Losses/__init__.py
|
SimonTheVillain/ActiveStereoNet
|
708bddce844998b366be1a1ec8a72a31ccd26f8c
|
[
"MIT"
] | 7 |
2019-12-20T07:46:41.000Z
|
2021-11-01T04:18:19.000Z
|
from .supervise import *
| 22.142857 | 79 | 0.580645 |
d5b25fcda4db3927e0504a3caa222468f8e2eb7c
| 6,766 |
py
|
Python
|
model/src/recurrent.py
|
qkaren/converse_reading_cmr
|
d06d981be12930cff8458e2b1b81be4f5df3a329
|
[
"MIT"
] | 87 |
2019-06-07T18:16:30.000Z
|
2021-11-27T08:18:45.000Z
|
model/src/recurrent.py
|
qkaren/converse_reading_cmr
|
d06d981be12930cff8458e2b1b81be4f5df3a329
|
[
"MIT"
] | 11 |
2019-06-19T20:53:27.000Z
|
2021-05-07T01:05:01.000Z
|
model/src/recurrent.py
|
qkaren/converse_reading_cmr
|
d06d981be12930cff8458e2b1b81be4f5df3a329
|
[
"MIT"
] | 17 |
2019-06-08T01:50:23.000Z
|
2022-02-16T07:12:15.000Z
|
import torch
import torch.nn as nn
from torch.nn.parameter import Parameter
from torch.nn.utils.rnn import pad_packed_sequence as unpack
from torch.nn.utils.rnn import pack_padded_sequence as pack
from .my_optim import weight_norm as WN
# TODO: use system func to bind ~
RNN_MAP = {'lstm': nn.LSTM, 'gru': nn.GRU, 'rnn': nn.RNN}
#------------------------------
# Contextual embedding
# TODO: remove packing to speed up
# Credit from: https://github.com/salesforce/cove
#------------------------------
| 45.716216 | 134 | 0.619125 |
d5b27d5f6e6878759cb3ab473c4702b3507a5b67
| 2,810 |
py
|
Python
|
kmcsim/sim/events_old.py
|
vlcekl/kmcpy
|
b55a23f64d4b6d2871671f4a16346cc897c4a2a5
|
[
"MIT"
] | null | null | null |
kmcsim/sim/events_old.py
|
vlcekl/kmcpy
|
b55a23f64d4b6d2871671f4a16346cc897c4a2a5
|
[
"MIT"
] | null | null | null |
kmcsim/sim/events_old.py
|
vlcekl/kmcpy
|
b55a23f64d4b6d2871671f4a16346cc897c4a2a5
|
[
"MIT"
] | null | null | null |
#!//anaconda/envs/py36/bin/python
#
# File name: kmc_pld.py
# Date: 2018/08/03 09:07
# Author: Lukas Vlcek
#
# Description:
#
import numpy as np
from collections import Counter
| 26.509434 | 82 | 0.56548 |
d5b2899060598acf5361fb2c9db968e61435c9da
| 2,181 |
py
|
Python
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 5,079 |
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 1,623 |
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
env/lib/python3.6/site-packages/odf/meta.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 2,033 |
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Sren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from odf.namespaces import METANS
from odf.element import Element
# Autogenerated
| 32.073529 | 80 | 0.707474 |
d5b2a5e3c1f4caec8e1b4e760aef349c24f989cf
| 7,293 |
py
|
Python
|
scripts/my_inference.py
|
Mr-TalhaIlyas/Scaled-YOLOv4
|
2b0326a6bc1eba386eb1a78b56727dcf29c77bac
|
[
"MIT"
] | null | null | null |
scripts/my_inference.py
|
Mr-TalhaIlyas/Scaled-YOLOv4
|
2b0326a6bc1eba386eb1a78b56727dcf29c77bac
|
[
"MIT"
] | null | null | null |
scripts/my_inference.py
|
Mr-TalhaIlyas/Scaled-YOLOv4
|
2b0326a6bc1eba386eb1a78b56727dcf29c77bac
|
[
"MIT"
] | null | null | null |
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
import torch
torch.rand(10)
import torch.nn as nn
import torch.nn.functional as F
import glob
from tqdm import tqdm, trange
print(torch.cuda.is_available())
print(torch.cuda.get_device_name())
print(torch.cuda.current_device())
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print('Using device:', device)
print()
#Additional Info when using cuda
if device.type == 'cuda':
print(torch.cuda.get_device_name(0))
print('Memory Usage:')
print('Allocated:', round(torch.cuda.memory_allocated(0)/1024**3,1), 'GB')
print('Cached: ', round(torch.cuda.memory_reserved(0)/1024**3,1), 'GB')
import torch.backends.cudnn as cudnn
import numpy as np
import os, cv2
from tqdm import tqdm, trange
import seaborn as sns
from models.experimental import attempt_load
from utils.datasets import LoadStreams, LoadImages
from utils.general import (
check_img_size, non_max_suppression, apply_classifier, scale_coords, xyxy2xywh, plot_one_box, strip_optimizer)
from utils.torch_utils import select_device, load_classifier, time_synchronized
from my_utils import xyxy_2_xyxyo, draw_boxes
# Initialize
device = select_device('')
half = device.type != 'cpu' # half precision only supported on CUDA
#%%
# Directories
out = '/home/user01/data_ssd/Talha/yolo/op/'
weights = '/home/user01/data_ssd/Talha/yolo/ScaledYOLOv4/runs/exp2_yolov4-csp-results/weights/best_yolov4-csp-results.pt'
source = '/home/user01/data_ssd/Talha/yolo/paprika_y5/valid/images/'
imgsz = 416
conf_thres = 0.4
iou_thres = 0.5
classes = [0,1,2,3,4,5]
class_names = ["blossom_end_rot", "graymold","powdery_mildew","spider_mite",
"spotting_disease", "snails_and_slugs"]
# deleting files in op_dir
filelist = [ f for f in os.listdir(out)]# if f.endswith(".png") ]
for f in tqdm(filelist, desc = 'Deleting old files fro directory'):
os.remove(os.path.join(out, f))
# Load model
model = attempt_load(weights, map_location=device) # load FP32 model
imgsz = check_img_size(imgsz, s=model.stride.max()) # check img_size
if half:
model.half() # to FP16
# Load model
model = attempt_load(weights, map_location=device) # load FP32 model
imgsz = check_img_size(imgsz, s=model.stride.max()) # check img_size
img_paths = glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.png') + \
glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.jpg')
# Run inference
if device.type != 'cpu':
model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once
#%%
for i in trange(len(img_paths)):
path = img_paths[i]
img1 = cv2.imread(path)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2RGB)
img_h, img_w, _ = img1.shape
img2 = prepare_input(img1, 416, half)
# get file name
name = os.path.basename(path)[:-4]
# Inference
t1 = time_synchronized()
pred = model(img2, augment=False)[0]
# Apply NMS
pred = non_max_suppression(pred, conf_thres, iou_thres, classes=classes, agnostic=True)
if pred[0] is not None:
boxes = pred[0].cpu().detach().numpy() # <xmin><ymin><xmax><ymax><confd><class_id>
else:
boxes = np.array([10.0, 20.0, 30.0, 50.0, 0.75, 0]).reshape(1,6) # dummy values
coords_minmax = np.zeros((boxes.shape[0], 4)) # droping 5th value
confd = np.zeros((boxes.shape[0], 1))
class_ids = np.zeros((boxes.shape[0], 1))
# assign
coords_minmax = boxes[:,0:4] # coords
confd = boxes[:,4] # confidence
class_ids = boxes[:,5] # class id
coords_xyminmax = []
det_classes = []
for i in range(boxes.shape[0]):
coords_xyminmax.append(xyxy_2_xyxyo(img_w, img_h, coords_minmax[i]))
det_classes.append(class_names[int(class_ids[i])])
all_bounding_boxnind = []
for i in range(boxes.shape[0]):
bounding_box = [0.0] * 6
bounding_box[0] = det_classes[i]
bounding_box[1] = confd[i]
bounding_box[2] = coords_xyminmax[i][0]
bounding_box[3] = coords_xyminmax[i][1]
bounding_box[4] = coords_xyminmax[i][2]
bounding_box[5] = coords_xyminmax[i][3]
bounding_box = str(bounding_box)[1:-1]# remove square brackets
bounding_box = bounding_box.replace("'",'')# removing inverted commas around class name
bounding_box = "".join(bounding_box.split())# remove spaces in between **here dont give space inbetween the inverted commas "".
all_bounding_boxnind.append(bounding_box)
all_bounding_boxnind = ' '.join(map(str, all_bounding_boxnind))# convert list to string
all_bounding_boxnind=list(all_bounding_boxnind.split(' ')) # convert strin to list
# replacing commas with spaces
for i in range(len(all_bounding_boxnind)):
all_bounding_boxnind[i] = all_bounding_boxnind[i].replace(',',' ')
for i in range(len(all_bounding_boxnind)):
# check if file exiscts else make new
with open(out +'{}.txt'.format(name), "a+") as file_object:
# Move read cursor to the start of file.
file_object.seek(0)
# If file is not empty then append '\n'
data = file_object.read(100)
if len(data) > 0 :
file_object.write("\n")
# Append text at the end of file
file_object.write(all_bounding_boxnind[i])
#%%
import glob, random
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['figure.dpi'] = 300
img_paths = glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.png') + \
glob.glob('/home/user01/data_ssd/Talha/yolo/paprika_y5/test/images/*.jpg')
img_path = random.choice(img_paths)
img1 = cv2.imread(img_path)
img1 = cv2.cvtColor(img1, cv2.COLOR_BGR2RGB)
img_h, img_w, _ = img1.shape
img2 = prepare_input(img1, 416, half)
pred = model(img2, augment=False)[0]
# Apply NMS
pred = non_max_suppression(pred, conf_thres, iou_thres, classes=classes, agnostic=True)
boxes = pred[0].cpu().detach().numpy() # <xmin><ymin><xmax><ymax><confd><class_id>
coords_minmax = np.zeros((boxes.shape[0], 4)) # droping 5th value
confd = np.zeros((boxes.shape[0], 1))
class_ids = np.zeros((boxes.shape[0], 1))
# assign
coords_minmax = boxes[:,0:4] # coords
confd = boxes[:,4] # confidence
class_ids = boxes[:,5] # class id
coords_xyminmax = []
det_classes = []
for i in range(boxes.shape[0]):
coords_xyminmax.append(xyxy_2_xyxyo(img_w, img_h, coords_minmax[i]))
det_classes.append(class_names[int(class_ids[i])])
t = np.asarray(coords_xyminmax)
op = draw_boxes(img1, confd, t, det_classes, class_names, order='xy_minmax', analysis=False)
plt.imshow(op)
print('='*50)
print('Image Name: ', os.path.basename(img_path),img1.shape)
print('\nClass_name ', '| B_box Coords ', '| Confidence')
print('_'*50)
for k in range(len(det_classes)):
print(det_classes[k], t[k], confd[k])
print('='*50)
| 36.833333 | 135 | 0.680927 |
d5b2ddd3598b303bcb8230980f8ef5b2b4388ef0
| 5,712 |
py
|
Python
|
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
|
fslds/carbon-black-cloud-sdk-python
|
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
|
[
"MIT"
] | 24 |
2020-10-16T22:07:38.000Z
|
2022-03-24T14:58:03.000Z
|
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
|
fslds/carbon-black-cloud-sdk-python
|
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
|
[
"MIT"
] | 63 |
2020-10-26T18:26:15.000Z
|
2022-03-31T17:31:02.000Z
|
src/tests/unit/fixtures/endpoint_standard/mock_recommendation.py
|
fslds/carbon-black-cloud-sdk-python
|
248a3c63d6b36d6fcdbcb3f51fb7751f062ed372
|
[
"MIT"
] | 10 |
2020-11-09T11:54:23.000Z
|
2022-03-24T20:44:00.000Z
|
"""Mock responses for recommendations."""
SEARCH_REQ = {
"criteria": {
"policy_type": ['reputation_override'],
"status": ['NEW', 'REJECTED', 'ACCEPTED'],
"hashes": ['111', '222']
},
"rows": 50,
"sort": [
{
"field": "impact_score",
"order": "DESC"
}
]
}
SEARCH_RESP = {
"results": [
{
"recommendation_id": "91e9158f-23cc-47fd-af7f-8f56e2206523",
"rule_type": "reputation_override",
"policy_id": 0,
"new_rule": {
"override_type": "SHA256",
"override_list": "WHITE_LIST",
"sha256_hash": "32d2be78c00056b577295aa0943d97a5c5a0be357183fcd714c7f5036e4bdede",
"filename": "XprotectService",
"application": {
"type": "EXE",
"value": "FOO"
}
},
"workflow": {
"status": "NEW",
"changed_by": "[email protected]",
"create_time": "2021-05-18T16:37:07.000Z",
"update_time": "2021-08-31T20:53:39.000Z",
"comment": "Ours is the fury"
},
"impact": {
"org_adoption": "LOW",
"impacted_devices": 45,
"event_count": 76,
"impact_score": 0,
"update_time": "2021-05-18T16:37:07.000Z"
}
},
{
"recommendation_id": "bd50c2b2-5403-4e9e-8863-9991f70df026",
"rule_type": "reputation_override",
"policy_id": 0,
"new_rule": {
"override_type": "SHA256",
"override_list": "WHITE_LIST",
"sha256_hash": "0bbc082cd8b3ff62898ad80a57cb5e1f379e3fcfa48fa2f9858901eb0c220dc0",
"filename": "sophos ui.msi"
},
"workflow": {
"status": "NEW",
"changed_by": "[email protected]",
"create_time": "2021-05-18T16:37:07.000Z",
"update_time": "2021-08-31T20:53:09.000Z",
"comment": "Always pay your debts"
},
"impact": {
"org_adoption": "HIGH",
"impacted_devices": 8,
"event_count": 25,
"impact_score": 0,
"update_time": "2021-05-18T16:37:07.000Z"
}
},
{
"recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68",
"rule_type": "reputation_override",
"policy_id": 0,
"new_rule": {
"override_type": "SHA256",
"override_list": "WHITE_LIST",
"sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124",
"filename": "mimecast for outlook 7.8.0.125 (x86).msi"
},
"workflow": {
"status": "NEW",
"changed_by": "[email protected]",
"create_time": "2021-05-18T16:37:07.000Z",
"update_time": "2021-08-31T15:13:40.000Z",
"comment": "Winter is coming"
},
"impact": {
"org_adoption": "MEDIUM",
"impacted_devices": 45,
"event_count": 79,
"impact_score": 0,
"update_time": "2021-05-18T16:37:07.000Z"
}
}
],
"num_found": 3
}
ACTION_INIT = {
"recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68",
"rule_type": "reputation_override",
"policy_id": 0,
"new_rule": {
"override_type": "SHA256",
"override_list": "WHITE_LIST",
"sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124",
"filename": "mimecast for outlook 7.8.0.125 (x86).msi"
},
"workflow": {
"status": "NEW",
"changed_by": "[email protected]",
"create_time": "2021-05-18T16:37:07.000Z",
"update_time": "2021-08-31T15:13:40.000Z",
"comment": "Winter is coming"
},
"impact": {
"org_adoption": "MEDIUM",
"impacted_devices": 45,
"event_count": 79,
"impact_score": 0,
"update_time": "2021-05-18T16:37:07.000Z"
}
}
ACTION_REQS = [
{
"action": "ACCEPT",
"comment": "Alpha"
},
{
"action": "RESET"
},
{
"action": "REJECT",
"comment": "Charlie"
},
]
ACTION_REFRESH_SEARCH = {
"criteria": {
"status": ['NEW', 'REJECTED', 'ACCEPTED'],
"policy_type": ['reputation_override']
},
"rows": 50
}
ACTION_SEARCH_RESP = {
"results": [ACTION_INIT],
"num_found": 1
}
ACTION_REFRESH_STATUS = ['ACCEPTED', 'NEW', 'REJECTED']
ACTION_INIT_ACCEPTED = {
"recommendation_id": "0d9da444-cfa7-4488-9fad-e2abab099b68",
"rule_type": "reputation_override",
"policy_id": 0,
"new_rule": {
"override_type": "SHA256",
"override_list": "WHITE_LIST",
"sha256_hash": "2272c5221e90f9762dfa38786da01b36a28a7da5556b07dec3523d1abc292124",
"filename": "mimecast for outlook 7.8.0.125 (x86).msi"
},
"workflow": {
"status": "ACCEPTED",
"ref_id": "e9410b754ea011ebbfd0db2585a41b07",
"changed_by": "[email protected]",
"create_time": "2021-05-18T16:37:07.000Z",
"update_time": "2021-08-31T15:13:40.000Z",
"comment": "Winter is coming"
},
"impact": {
"org_adoption": "MEDIUM",
"impacted_devices": 45,
"event_count": 79,
"impact_score": 0,
"update_time": "2021-05-18T16:37:07.000Z"
}
}
| 31.043478 | 98 | 0.500525 |
d5b36222e5f117b24edaf10265aa3e6b8fc6c46c
| 7,351 |
py
|
Python
|
monasca/microservice/notification_engine.py
|
TeamZenith/python-monasca
|
badc86fbe2c4424deb15b84eabd3248e899ef4ee
|
[
"Apache-2.0"
] | null | null | null |
monasca/microservice/notification_engine.py
|
TeamZenith/python-monasca
|
badc86fbe2c4424deb15b84eabd3248e899ef4ee
|
[
"Apache-2.0"
] | null | null | null |
monasca/microservice/notification_engine.py
|
TeamZenith/python-monasca
|
badc86fbe2c4424deb15b84eabd3248e899ef4ee
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Carnegie Mellon University
#
# Author: Han Chen <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import json
from oslo.config import cfg
from stevedore import driver
from monasca.common import es_conn
from monasca.common import email_sender
from monasca.common import kafka_conn
from monasca.openstack.common import log
from monasca.openstack.common import service as os_service
es_opts = [
cfg.StrOpt('topic',
default='alarm',
help=('The topic that messages will be retrieved from.'
'This also will be used as a doc type when saved '
'to ElasticSearch.')),
cfg.StrOpt('topic2',
default='notification_methods',
help=('The topic that messages will be retrieved from.'
'This also will be used as a doc type when saved '
'to ElasticSearch.')),
cfg.StrOpt('doc_type',
default='',
help=('The document type which defines what document '
'type the messages will be save into. If not '
'specified, then the topic will be used.')),
cfg.StrOpt('processor',
default='',
help=('The message processer to load to process the message.'
'If the message does not need to be process anyway,'
'leave the default')),
]
es_group = cfg.OptGroup(name='notification', title='notification')
cfg.CONF.register_group(es_group)
cfg.CONF.register_opts(es_opts, es_group)
LOG = log.getLogger(__name__)
| 39.951087 | 119 | 0.522786 |
d5b52472e7e5df33cf0c5865ffdc86c08a3ea627
| 1,881 |
py
|
Python
|
dhf_wrapper/base_client.py
|
Enflow-io/dhf-pay-python
|
7c32461d3b2a5018151b2a16a0cc0ad6850b88b1
|
[
"Apache-2.0"
] | null | null | null |
dhf_wrapper/base_client.py
|
Enflow-io/dhf-pay-python
|
7c32461d3b2a5018151b2a16a0cc0ad6850b88b1
|
[
"Apache-2.0"
] | null | null | null |
dhf_wrapper/base_client.py
|
Enflow-io/dhf-pay-python
|
7c32461d3b2a5018151b2a16a0cc0ad6850b88b1
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional, Callable
import requests
from requests.auth import AuthBase
from requests.exceptions import RequestException
| 25.767123 | 94 | 0.576289 |
d5b58f8a34e9535374ceecc69e4b47358c97ddb9
| 1,395 |
py
|
Python
|
flametree/utils.py
|
Edinburgh-Genome-Foundry/Flametree
|
a189de5d83ca1eb3526a439320e41df9e2a1162e
|
[
"MIT"
] | 165 |
2017-02-04T00:40:01.000Z
|
2021-06-08T03:51:58.000Z
|
flametree/utils.py
|
Edinburgh-Genome-Foundry/Flametree
|
a189de5d83ca1eb3526a439320e41df9e2a1162e
|
[
"MIT"
] | 8 |
2017-02-10T00:47:09.000Z
|
2021-05-30T04:38:41.000Z
|
flametree/utils.py
|
Edinburgh-Genome-Foundry/Flametree
|
a189de5d83ca1eb3526a439320e41df9e2a1162e
|
[
"MIT"
] | 19 |
2017-02-09T17:38:31.000Z
|
2021-03-23T16:04:32.000Z
|
import os
import shutil
from .ZipFileManager import ZipFileManager
from .DiskFileManager import DiskFileManager
from .Directory import Directory
import string
printable = set(string.printable) - set("\x0b\x0c")
def file_tree(target, replace=False):
"""Open a connection to a file tree which can be either a disk folder, a
zip archive, or an in-memory zip archive.
Parameters
----------
target
Either the path to a target folder, or a zip file, or '@memory' to write
a zip file in memory (at which case a string of the zip file is returned)
If the target is already a flametree directory, it is returned as-is.
replace
If True, will remove the target if it already exists. If False, new files
will be written inside the target and some files may be overwritten.
"""
if isinstance(target, Directory):
return target
if (not isinstance(target, str)) or is_hex(target):
return Directory(file_manager=ZipFileManager(source=target))
elif target == "@memory":
return Directory("@memory", file_manager=ZipFileManager("@memory"))
elif target.lower().endswith(".zip"):
return Directory(target, file_manager=ZipFileManager(target, replace=replace))
else:
return Directory(target, file_manager=DiskFileManager(target))
| 32.44186 | 86 | 0.703226 |
d5b74bc11e212074f29e2869fb5c41c2c3cd585b
| 628 |
py
|
Python
|
audio/audio_client.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
audio/audio_client.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
audio/audio_client.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
import sys
import socket
import time
ip = '127.0.0.1'
port = 9001
if (len(sys.argv)>1):
ip = sys.argv[1]
if (len(sys.argv)>2):
port = int(sys.argv[2])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip,port))
sock.send('bip\n\r')
data = sock.recv(80)
print data
sock.send('TTS[it-IT] ciao, come stai?\n\r')
data = sock.recv(80)
print data
sock.send('TTS[en-US] very well, thank you!\n\r')
data = sock.recv(80)
print data
sock.send('TTS default language is english!\n\r')
data = sock.recv(80)
print data
sock.send('bop\n\r')
data = sock.recv(80)
print data
time.sleep(1)
sock.close()
| 14.604651 | 56 | 0.66879 |
d5b8242c634dcf60f9e745fdadd1c86fe716bf6e
| 3,461 |
py
|
Python
|
qmotor/message/matcher.py
|
yulinfeng000/qmotor
|
ad3e9eea291f5b87e09fcdd5e42f1eb13d752565
|
[
"MIT"
] | null | null | null |
qmotor/message/matcher.py
|
yulinfeng000/qmotor
|
ad3e9eea291f5b87e09fcdd5e42f1eb13d752565
|
[
"MIT"
] | null | null | null |
qmotor/message/matcher.py
|
yulinfeng000/qmotor
|
ad3e9eea291f5b87e09fcdd5e42f1eb13d752565
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import List
from .common import (
AtCell,
BasicMessage,
GroupMessage,
FriendMessage,
MsgCellType,
MessageType,
PlainCell,
)
from ..utils import is_str_blank, str_contains
if __name__ == "__main__":
msg_matcher = JustAtMeMsg(123)
msg = {
"type": "GroupMessage",
"sender": {"id": 123, "nickname": "", "remark": ""},
"messageChain": [
{"type": "Source", "id": 123456, "time": 123456},
{"type": "At", "target": 1234, "display": "@Mirai"},
{"type": "Plain", "text": " "},
],
}
print(msg_matcher.match(Ctx(msg)))
| 25.637037 | 74 | 0.612251 |
d5b96915a161658ab58f977d3518461eda8624b2
| 1,407 |
py
|
Python
|
main/admin.py
|
sinahmr/childf
|
4e01f46867425b36b6431713b79debf585d69d37
|
[
"MIT"
] | null | null | null |
main/admin.py
|
sinahmr/childf
|
4e01f46867425b36b6431713b79debf585d69d37
|
[
"MIT"
] | null | null | null |
main/admin.py
|
sinahmr/childf
|
4e01f46867425b36b6431713b79debf585d69d37
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin
from django.contrib.auth.models import Group
from django.utils.translation import ugettext_lazy as _
from main.models import UserInfo, User, Child, Volunteer, Donor, Letter, Need, PurchaseForInstitute, PurchaseForNeed, \
Activity, OngoingUserInfo
admin.site.unregister(Group)
admin.site.register(Child)
admin.site.register(Volunteer)
admin.site.register(Donor)
admin.site.register(Letter)
admin.site.register(Need)
admin.site.register(PurchaseForInstitute)
admin.site.register(PurchaseForNeed)
admin.site.register(Activity)
admin.site.register(OngoingUserInfo)
| 31.977273 | 119 | 0.687278 |
d5b9d02c239d39cdf1dcff5670b5cc5e359e73a5
| 2,515 |
py
|
Python
|
hunting/display/render.py
|
MoyTW/RL_Arena_Experiment
|
fb79c67576cd4de3e4a58278b4515098f38fb584
|
[
"MIT"
] | null | null | null |
hunting/display/render.py
|
MoyTW/RL_Arena_Experiment
|
fb79c67576cd4de3e4a58278b4515098f38fb584
|
[
"MIT"
] | null | null | null |
hunting/display/render.py
|
MoyTW/RL_Arena_Experiment
|
fb79c67576cd4de3e4a58278b4515098f38fb584
|
[
"MIT"
] | null | null | null |
import tdl
import time
import hunting.constants as c
| 36.985294 | 120 | 0.603579 |
d5ba579f0453b95d1e8c11d5b88d94830943af72
| 1,732 |
py
|
Python
|
ideas/models.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | 1 |
2021-02-12T10:25:28.000Z
|
2021-02-12T10:25:28.000Z
|
ideas/models.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | 7 |
2020-02-21T00:53:38.000Z
|
2022-02-10T12:22:53.000Z
|
ideas/models.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
| 39.363636 | 108 | 0.560624 |
d5ba81a91490ddb0a286042ea3d0c0e723e0af52
| 2,348 |
py
|
Python
|
section2/out/src/data_prep/SlicesDataset.py
|
ssheikh85/AIHCND_c3_3d_imaging
|
6502985d4199244328a683459b4d819090d58f3c
|
[
"MIT"
] | null | null | null |
section2/out/src/data_prep/SlicesDataset.py
|
ssheikh85/AIHCND_c3_3d_imaging
|
6502985d4199244328a683459b4d819090d58f3c
|
[
"MIT"
] | null | null | null |
section2/out/src/data_prep/SlicesDataset.py
|
ssheikh85/AIHCND_c3_3d_imaging
|
6502985d4199244328a683459b4d819090d58f3c
|
[
"MIT"
] | null | null | null |
"""
Module for Pytorch dataset representations
"""
import torch
from torch.utils.data import Dataset
| 35.044776 | 103 | 0.609881 |
d5bb9bbb0fed4afc892e132a8963124e532f19f2
| 845 |
py
|
Python
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 2 |
2020-12-30T07:46:12.000Z
|
2022-02-01T16:37:34.000Z
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 7 |
2021-04-14T16:17:29.000Z
|
2022-01-25T11:48:18.000Z
|
zenslackchat/zendesk_webhooks.py
|
uktrade/zenslackchat
|
8071757e1ea20a433783c6a7c47f25b046692682
|
[
"MIT"
] | 1 |
2021-06-06T09:46:47.000Z
|
2021-06-06T09:46:47.000Z
|
from zenslackchat.zendesk_base_webhook import BaseWebHook
from zenslackchat.zendesk_email_to_slack import email_from_zendesk
from zenslackchat.zendesk_comments_to_slack import comments_from_zendesk
| 33.8 | 72 | 0.744379 |
d5bbaeac59cde7e794de669fe4ec0942d528fc8d
| 699 |
py
|
Python
|
Examples/PagesOperations/MovePage.py
|
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
|
af736c94240eeefef28bd81012c96ab2ea779088
|
[
"MIT"
] | null | null | null |
Examples/PagesOperations/MovePage.py
|
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
|
af736c94240eeefef28bd81012c96ab2ea779088
|
[
"MIT"
] | null | null | null |
Examples/PagesOperations/MovePage.py
|
groupdocs-merger-cloud/groupdocs-merger-cloud-python-samples
|
af736c94240eeefef28bd81012c96ab2ea779088
|
[
"MIT"
] | null | null | null |
# Import modules
import groupdocs_merger_cloud
from Common import Common
# This example demonstrates how to move document page to a new position
| 36.789474 | 93 | 0.711016 |
d5bbb325b8069e32756e2756a7150bcc81d9e24f
| 221 |
py
|
Python
|
src/models/predict_model.py
|
joseluistello/Regression-Analysis-Apple-Data
|
85952edd22ba8c382f43357efc510763185fd6d1
|
[
"MIT"
] | null | null | null |
src/models/predict_model.py
|
joseluistello/Regression-Analysis-Apple-Data
|
85952edd22ba8c382f43357efc510763185fd6d1
|
[
"MIT"
] | null | null | null |
src/models/predict_model.py
|
joseluistello/Regression-Analysis-Apple-Data
|
85952edd22ba8c382f43357efc510763185fd6d1
|
[
"MIT"
] | null | null | null |
y_pred=ml.predict(x_test)
print(y_pred)
from sklearn.metrics import r2_score
r2_score(y_test,y_pred)
pred_y_df=pd.DataFrame({'Actual Value':y_test,'Predicted Value':y_pred, 'Difference': y_test-y_pred})
pred_y_df[0:20]
| 24.555556 | 101 | 0.791855 |
d5bd90ba6b204f06ed13dd7eaecdd9ec577e33cb
| 5,512 |
py
|
Python
|
src/models/utils_func.py
|
Soufiane-Fartit/cars-prices
|
8eee8aa168251adab7f4947c45a78752e4145041
|
[
"MIT"
] | null | null | null |
src/models/utils_func.py
|
Soufiane-Fartit/cars-prices
|
8eee8aa168251adab7f4947c45a78752e4145041
|
[
"MIT"
] | null | null | null |
src/models/utils_func.py
|
Soufiane-Fartit/cars-prices
|
8eee8aa168251adab7f4947c45a78752e4145041
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""" This module offers util functions to be called and used
in other modules
"""
from datetime import datetime
import os
import json
import pickle
import string
import random
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
from sklearn import tree
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
"""GENERATE A RANDOM STRING TO BE USED AS AN ID
Args:
size (int, optional): size of the string. Defaults to 6.
chars (str, optional): charachters to be used to generate the string.
Defaults to string.ascii_lowercase+string.digits.
Returns:
[str]: a random chain of charachters
"""
return "".join(random.choice(chars) for _ in range(size))
def save_model(path, model):
"""SAVE MODEL INTO PICKLE FILE
Args:
path (str): path where to save the model
model (binary): the model to be saved
"""
with open(path, "wb") as file:
pickle.dump(model, file)
def update_history(models_hist_path, model_id, model_name, model, params):
"""SAVE METADATA RELATED TO THE TRAINED MODEL INTO THE HISTORY FILE
Args:
models_hist_path (str): path to the history file
model_id (str): unique id of the model
model_name (str): model name = "model_"+model_id+".pkl"
model (binary): binary file of the model
params (dict): dictionnary containing the hyper-parameters
used to fit the model
"""
model_metadata = dict()
model_metadata["trained"] = str(datetime.now())
model_metadata["model_type"] = type(model).__name__
model_metadata["model_id"] = model_id
model_metadata["params"] = params
print(model_metadata)
with open(models_hist_path, "r+") as outfile:
try:
hist = json.load(outfile)
hist[model_name] = model_metadata
outfile.seek(0)
json.dump(hist, outfile, indent=4)
except json.decoder.JSONDecodeError:
json.dump({model_name: model_metadata}, outfile, indent=4)
def update_history_add_eval(
models_hist_path, model_id=None, model_name=None, metrics=None
):
"""ADD EVALUATION METRICS THE HISTORY FILE FOR THE SPECIFIED MODEL
Args:
models_hist_path (str): path to the history file
model_id (str, optional): the id of the model. Defaults to None.
model_name (str, optional): the name of the model. Defaults to None.
metrics (dict, optional): a dictionnary containing metadata related
to the model evaluation. Defaults to None.
"""
assert (
model_id is not None or model_name is not None
), "At least the model id or name must be given"
assert models_hist_path is not None, "You must specify the path to the history file"
if not model_name:
model_name = "model_" + model_id + ".pkl"
eval_metadata = dict()
eval_metadata["datetime"] = str(datetime.now())
eval_metadata["metrics"] = metrics
with open(models_hist_path, "r+") as outfile:
try:
hist = json.load(outfile)
hist[model_name]["evaluation"] = eval_metadata
outfile.seek(0)
json.dump(hist, outfile, indent=4)
except json.decoder.JSONDecodeError:
print("cannot save evaluation metadata")
def generate_features_importance_plot(model, features, model_id):
"""GENERATES A PLOT DESCRIBING FEATURES IMPORTANCE FOR THE MODEL
TO MAKE THE PREDICTION.
Args:
model (tree-based model): a tree based model (decision tree, random forest ...)
features (pandas dataframe): a table of the features on which we trained the model
model_id (str): the unique id of the model
"""
mean_importances = model.feature_importances_
importances_indices = np.argsort(mean_importances)[::-1]
ordered_columns = [features.columns[i] for i in importances_indices]
importances = pd.DataFrame(
[tree.feature_importances_ for tree in model.estimators_],
columns=features.columns,
)
importances = importances[ordered_columns]
_, ax = plt.subplots(figsize=(12, 8))
sns.boxplot(x="variable", y="value", ax=ax, data=pd.melt(importances))
figure = ax.get_figure()
figure.savefig(
"models/models-training/run_" + model_id + "/features_importance.png"
)
def plot_trees(rf, feature_names, target_names, model_id):
"""GENERATES A PLOT THAT SHOWS THE DECISION MAKING OF THE TREES
Args:
rf (model): a tree based model (random forest ...)
feature_names (list): names of the columns of the training set
target_names (str): name of the target columns
model_id (str): unique id of the model
"""
fn = feature_names
cn = target_names
fig, axes = plt.subplots(nrows=1, ncols=5, figsize=(10, 2), dpi=900)
for index in range(0, 5):
tree.plot_tree(
rf.estimators_[index],
feature_names=fn,
class_names=cn,
filled=True,
ax=axes[index],
)
axes[index].set_title("Estimator: " + str(index), fontsize=11)
fig.savefig("models/models-training/run_" + model_id + "/Trees.png")
| 33.815951 | 90 | 0.649492 |
d5c0292ca1d781849b4c6bb27642731423800d86
| 7,504 |
py
|
Python
|
modules/finance.py
|
KpaBap/palbot
|
38d2b7958e310f45a28cf1b3173967b92f819946
|
[
"MIT"
] | null | null | null |
modules/finance.py
|
KpaBap/palbot
|
38d2b7958e310f45a28cf1b3173967b92f819946
|
[
"MIT"
] | null | null | null |
modules/finance.py
|
KpaBap/palbot
|
38d2b7958e310f45a28cf1b3173967b92f819946
|
[
"MIT"
] | null | null | null |
import asyncio
import discord
from discord.ext import commands
import re
import sqlite3
from urllib.parse import quote as uriquote
import html
CURR = ["AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK", "DKK", "EUR",
"GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD",
"THB", "TRY", "TWD", "ZAR"]
| 40.344086 | 166 | 0.518124 |
d5c051b72ce68a91896ab21b2fd4b6e93e7e9a10
| 174 |
py
|
Python
|
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | 7 |
2015-04-01T12:41:55.000Z
|
2019-08-01T18:13:56.000Z
|
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | null | null | null |
SG_GetDataForClassifier.py
|
shubha1593/MovieReviewAnalysis
|
c485eea0c8b35e554027cce7a431212b406e672c
|
[
"MIT"
] | null | null | null |
from SG_GetFeatureMatrix import *
from SG_VectorY import *
featureMatrix = featureMatrixFromReviews()
Y = getYVector()
| 21.75 | 42 | 0.804598 |
d5c05a70d2bfb21530d973639155b0914281d250
| 1,882 |
py
|
Python
|
greenbounty/bounties/migrations/0001_initial.py
|
Carnales/green-bounty
|
beb765082b32c096139463bf75ccc1ec3d530692
|
[
"MIT"
] | 1 |
2021-01-18T21:43:05.000Z
|
2021-01-18T21:43:05.000Z
|
greenbounty/bounties/migrations/0001_initial.py
|
Thinkr3/green-bounty
|
c74fe79121d211728c9f70ffd87e239c8ba5d131
|
[
"MIT"
] | 1 |
2021-01-18T06:35:07.000Z
|
2021-01-18T06:35:07.000Z
|
greenbounty/bounties/migrations/0001_initial.py
|
Thinkr3/green-bounty
|
c74fe79121d211728c9f70ffd87e239c8ba5d131
|
[
"MIT"
] | 2 |
2021-01-18T06:22:50.000Z
|
2021-01-18T06:24:22.000Z
|
# Generated by Django 3.1.4 on 2021-01-17 19:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 40.042553 | 144 | 0.584485 |
d5c06f16c3fcc96993938e0c35fe7c62d8dfa422
| 8,621 |
py
|
Python
|
nova/tests/virt/docker/test_driver.py
|
osrg/nova
|
14b6bc655145c832bd9c822e48f877818e0e53ff
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/virt/docker/test_driver.py
|
osrg/nova
|
14b6bc655145c832bd9c822e48f877818e0e53ff
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/virt/docker/test_driver.py
|
osrg/nova
|
14b6bc655145c832bd9c822e48f877818e0e53ff
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2013 dotCloud, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import socket
import mock
from nova import context
from nova import exception
from nova.openstack.common import jsonutils
from nova.openstack.common import units
from nova import test
from nova.tests import utils
import nova.tests.virt.docker.mock_client
from nova.tests.virt.test_virt_drivers import _VirtDriverTestCase
from nova.virt.docker import hostinfo
from nova.virt.docker import network
| 40.85782 | 79 | 0.629741 |
d5c1a9c69d580b85cf1676ca01e443acef7eb239
| 9,048 |
py
|
Python
|
pyx/tests/test_http.py
|
l04m33/pyx
|
b70efec605832ba3c7079e991584db3f5d1da8cb
|
[
"MIT"
] | 2 |
2015-08-25T11:31:42.000Z
|
2015-10-16T11:30:15.000Z
|
pyx/tests/test_http.py
|
l04m33/pyx
|
b70efec605832ba3c7079e991584db3f5d1da8cb
|
[
"MIT"
] | null | null | null |
pyx/tests/test_http.py
|
l04m33/pyx
|
b70efec605832ba3c7079e991584db3f5d1da8cb
|
[
"MIT"
] | null | null | null |
import unittest
import unittest.mock as mock
import asyncio
import pyx.http as http
| 33.511111 | 92 | 0.59527 |
d5c40e739be914cd8694a4a6735e497e975d7778
| 1,791 |
py
|
Python
|
tests/test_webdriver_chrome.py
|
kidosoft/splinter
|
6d5052fd73c0a626299574cea76924e367c67faa
|
[
"BSD-3-Clause"
] | 1 |
2016-09-21T19:32:47.000Z
|
2016-09-21T19:32:47.000Z
|
tests/test_webdriver_chrome.py
|
kidosoft/splinter
|
6d5052fd73c0a626299574cea76924e367c67faa
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_webdriver_chrome.py
|
kidosoft/splinter
|
6d5052fd73c0a626299574cea76924e367c67faa
|
[
"BSD-3-Clause"
] | 1 |
2019-12-02T15:19:07.000Z
|
2019-12-02T15:19:07.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2013 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import os
import unittest
from splinter import Browser
from .fake_webapp import EXAMPLE_APP
from .base import WebDriverTests
from selenium.common.exceptions import WebDriverException
def test_attach_file(self):
"should provide a way to change file field value"
file_path = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
'mockfile.txt'
)
self.browser.attach_file('file', file_path)
self.browser.find_by_name('upload').click()
html = self.browser.html
self.assertIn('text/plain', html)
self.assertIn(open(file_path).read().encode('utf-8'), html)
class ChromeBrowserFullscreenTest(WebDriverTests, unittest.TestCase):
def setUp(self):
self.browser.visit(EXAMPLE_APP)
def test_should_support_with_statement(self):
with Browser('chrome', fullscreen=True) as internet:
pass
| 25.225352 | 69 | 0.672808 |
d5c480f55405e4b344842fed3a1082b875de03dd
| 1,349 |
py
|
Python
|
main.py
|
DuskXi/ArkX
|
7b416ae0c4ec2b383c6f414ed475930dd228909f
|
[
"Apache-2.0"
] | 2 |
2022-02-18T03:08:38.000Z
|
2022-03-03T04:20:08.000Z
|
main.py
|
DuskXi/ArkX
|
7b416ae0c4ec2b383c6f414ed475930dd228909f
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
DuskXi/ArkX
|
7b416ae0c4ec2b383c6f414ed475930dd228909f
|
[
"Apache-2.0"
] | null | null | null |
import os
import json
from File.file import File
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
if __name__ == "__main__":
main()
| 30.659091 | 127 | 0.681987 |
d5c5f4f5c79da67180264a12457b76158e4ccc4b
| 4,814 |
py
|
Python
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 4 |
2017-11-03T14:48:52.000Z
|
2019-03-07T03:48:11.000Z
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 2 |
2017-11-27T15:40:01.000Z
|
2021-01-30T08:40:51.000Z
|
tests/test_simple.py
|
cprogrammer1994/miniglm
|
696764ff200dd106dd533264ff45a060d5f7b230
|
[
"MIT"
] | 3 |
2017-11-27T15:25:07.000Z
|
2021-03-02T10:31:30.000Z
|
import struct
import numpy as np
import pytest
import miniglm
| 28.826347 | 91 | 0.623598 |
d5c61844c85a34a814f44efd7ddfec47f1e2a5e5
| 1,131 |
py
|
Python
|
flaskbb/plugins/news/views.py
|
konstantin1985/forum
|
7d4de24ccc932e9764699d89c8cc9d210b7fac7f
|
[
"BSD-3-Clause"
] | null | null | null |
flaskbb/plugins/news/views.py
|
konstantin1985/forum
|
7d4de24ccc932e9764699d89c8cc9d210b7fac7f
|
[
"BSD-3-Clause"
] | null | null | null |
flaskbb/plugins/news/views.py
|
konstantin1985/forum
|
7d4de24ccc932e9764699d89c8cc9d210b7fac7f
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from flask import Blueprint, redirect
from flaskbb.utils.helpers import render_template
from .forms import AddForm, DeleteForm
from .models import MyPost
from flaskbb.extensions import db
news = Blueprint("news", __name__, template_folder="templates")
| 26.928571 | 72 | 0.660477 |
d5c64f687d9f59ed689fc14b8df6d5ee61f23931
| 23,742 |
py
|
Python
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
nkhetia31/stix-shifter
|
ace07581cb227fd35e450b2f8871475227a041d0
|
[
"Apache-2.0"
] | 33 |
2018-05-25T17:07:28.000Z
|
2019-09-30T10:08:53.000Z
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
nkhetia31/stix-shifter
|
ace07581cb227fd35e450b2f8871475227a041d0
|
[
"Apache-2.0"
] | 54 |
2018-06-01T18:17:24.000Z
|
2019-09-30T18:36:15.000Z
|
stix_shifter_modules/aws_athena/tests/stix_translation/test_aws_athena_json_to_stix.py
|
subbyte/stix-shifter
|
36d71c172a5fc5b97d872e623753b0dd1bf4fe6c
|
[
"Apache-2.0"
] | 37 |
2018-07-24T13:29:46.000Z
|
2019-09-29T19:06:27.000Z
|
from stix_shifter_utils.stix_translation.src.json_to_stix import json_to_stix_translator
from stix_shifter_utils.stix_translation.src.utils.transformer_utils import get_module_transformers
from stix_shifter_modules.aws_athena.entry_point import EntryPoint
import unittest
MODULE = "aws_athena"
entry_point = EntryPoint()
map_data = entry_point.get_results_translator().map_data
data_source = {
"type": "identity",
"id": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"name": "aws_athena",
"identity_class": "events"
}
options = {}
def test_vpc_flow_network_json_to_stix(self):
"""to test network stix object properties"""
data = {
"vpcflow": {
"account": 979326520502,
"interfaceid": "eni-04b762de832716892",
"sourceaddress": "89.248.172.85",
"destinationaddress": "172.31.62.249",
"sourceport": 58387,
"destinationport": 51289,
"protocol": "tcp",
"starttime": 1592547796,
"endtime": 1592547798,
"action": "REJECT",
"date": "2020-06-19",
"logstatus": "OK",
"numbytes": 40,
"region": "us-east-1",
"version": 2
}
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic')
assert network_obj is not None, 'network-traffic object type not found'
assert network_obj.keys() == {'type', 'src_ref', 'dst_ref', 'src_port', 'dst_port', 'protocols', 'start', 'end'}
assert network_obj['type'] == 'network-traffic'
assert network_obj['src_ref'] == '1'
assert network_obj['dst_ref'] == '4'
assert network_obj['src_port'] == 58387
assert network_obj['dst_port'] == 51289
assert network_obj['protocols'] == ['tcp']
assert network_obj['start'] == '2020-06-19T06:23:16.000Z'
assert network_obj['end'] == '2020-06-19T06:23:18.000Z'
def test_vpc_flow_custom_attr_json_to_stix(self):
"""to test network stix object properties"""
data = {
"vpcflow": {
"account": 979326520502,
"interfaceid": "eni-04b762de832716892",
"sourceaddress": "89.248.172.85",
"destinationaddress": "172.31.62.249",
"sourceport": 58387,
"destinationport": 51289,
"protocol": "tcp",
"starttime": 1592547796,
"endtime": 1592547798,
"action": "REJECT",
"date": "2020-06-19",
"logstatus": "OK",
"numbytes": 40,
"region": "us-east-1",
"version": 2
}
}
options = {"unmapped_fallback": True}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena')
assert custom_object.keys() == {'type', 'interfaceid', 'date', 'logstatus', 'numbytes', 'region', 'version'}
assert custom_object['date'] == '2020-06-19'
assert custom_object['logstatus'] == 'OK'
assert custom_object['numbytes'] == 40
assert custom_object['region'] == 'us-east-1'
assert custom_object['version'] == 2
def test_guardduty_network_json_to_stix(self):
"""to test network stix object properties"""
data = {
"guardduty": {
"accountid": 979326520502,
"region": "us-east-1",
"type": "UnauthorizedAccess:EC2/SSHBruteForce",
"resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal",
"resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104",
"resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4",
"resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1."
"amazonaws.com",
"resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a",
"resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128",
"resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13",
"resource_instancedetails_imageid": "ami-0015fcaa5516c75ed",
"resource_instancedetails_instanceid": "i-031cb81e1f32a36e1",
"resource_instancedetails_availabilityzone": "us-east-1f",
"service_eventfirstseen": "2020-07-31T06:19:09Z",
"service_action_networkconnectionaction_protocol": "TCP",
"service_action_networkconnectionaction_remoteportdetails_port": "38420",
"service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden",
"service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94",
"service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro",
"service_action_networkconnectionaction_localportdetails_port": "22",
"service_eventlastseen": "2020-09-12T09:19:40Z",
"severity": 2,
"title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.",
"arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding"
"/7ab9d1cb6248e05a0e419a79528761cb",
"createdat": "2020-07-31T06:37:13.745Z",
"description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1. "
"Brute force attacks are used to gain unauthorized access to your instance by "
"guessing the SSH password.",
"finding_id": "7ab9d1cb6248e05a0e419a79528761cb",
"partition": "aws",
"resource": {
"instancedetails": {
"imagedescription": "Provided by Red Hat, Inc.",
"instancestate": "running",
"instancetype": "t2.large",
"launchtime": "2020-09-11T23:16:03Z",
"tags": {
"0": {
"key": "Name",
"value": "ArcSight Logger"
}
}
},
"resourcetype": "Instance"
},
"schemaversion": 2.0,
"service": {
"action": {
"actiontype": "NETWORK_CONNECTION",
"networkconnectionaction": {
"connectiondirection": "INBOUND",
"localportdetails": {
"portname": "SSH"
},
"remoteipdetails": {
"geolocation": {
"lat": "59.2741",
"lon": "15.2066"
},
"organization": {
"asn": "2119",
"asnorg": "Telenor Norge AS",
"isp": "Telenor Sverige AB",
"org": "Telenor Sverige AB"
}
},
"remoteportdetails": {
"portname": "Unknown"
}
}
},
"count": "20",
"detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df",
"resourcerole": "TARGET",
"servicename": "guardduty"
},
"updatedat": "2020-09-12T09:25:34.086Z"
}
}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
network_obj = TestAwsResultsToStix.get_first_of_type(objects.values(), 'network-traffic')
assert network_obj is not None, 'network-traffic object type not found'
assert network_obj.keys() == {'type', 'dst_port', 'src_ref', 'dst_ref', 'src_port', 'protocols'}
assert network_obj['type'] == 'network-traffic'
assert network_obj['dst_port'] == 38420
assert network_obj['src_ref'] == '3'
assert network_obj['dst_ref'] == '9'
assert network_obj['src_port'] == 22
assert network_obj['protocols'] == ['tcp']
def test_guardduty_custom_attr_json_to_stix(self):
"""to test network stix object properties"""
data = {
"guardduty": {
"accountid": 979326520502,
"region": "us-east-1",
"type": "UnauthorizedAccess:EC2/SSHBruteForce",
"resource_instancedetails_networkinterfaces_0_privatednsname": "ip-172-31-60-104.ec2.internal",
"resource_instancedetails_networkinterfaces_0_privateipaddress": "172.31.60.104",
"resource_instancedetails_networkinterfaces_0_subnetid": "subnet-ea9d6be4",
"resource_instancedetails_networkinterfaces_0_publicdnsname": "ec2-18-210-22-128.compute-1."
"amazonaws.com",
"resource_instancedetails_networkinterfaces_0_vpcid": "vpc-10db926a",
"resource_instancedetails_networkinterfaces_0_publicip": "18.210.22.128",
"resource_instancedetails_networkinterfaces_0_networkinterfaceid": "eni-0203098cca62c3f21",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupid": "sg-018edb43fcc81525f",
"resource_instancedetails_networkinterfaces_0_securitygroups_0_groupname": "launch-wizard-13",
"resource_instancedetails_imageid": "ami-0015fcaa5516c75ed",
"resource_instancedetails_instanceid": "i-031cb81e1f32a36e1",
"resource_instancedetails_availabilityzone": "us-east-1f",
"service_eventfirstseen": "2020-07-31T06:19:09Z",
"service_action_networkconnectionaction_protocol": "TCP",
"service_action_networkconnectionaction_remoteportdetails_port": "38420",
"service_action_networkconnectionaction_remoteipdetails_country_countryname": "Sweden",
"service_action_networkconnectionaction_remoteipdetails_ipaddressv4": "85.224.242.94",
"service_action_networkconnectionaction_remoteipdetails_city_cityname": "rebro",
"service_action_networkconnectionaction_localportdetails_port": "22",
"service_eventlastseen": "2020-09-12T09:19:40Z",
"severity": 2,
"title": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1.",
"arn": "arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed494f3b7ca56acdc74df/finding/"
"7ab9d1cb6248e05a0e419a79528761cb",
"createdat": "2020-07-31T06:37:13.745Z",
"description": "85.224.242.94 is performing SSH brute force attacks against i-031cb81e1f32a36e1."
" Brute force attacks are used to gain unauthorized access to your instance by guessing "
"the SSH password.",
"finding_id": "7ab9d1cb6248e05a0e419a79528761cb",
"partition": "aws",
"resource": {
"instancedetails": {
"imagedescription": "Provided by Red Hat, Inc.",
"instancestate": "running",
"instancetype": "t2.large",
"launchtime": "2020-09-11T23:16:03Z",
"tags": {
"0": {
"key": "Name",
"value": "ArcSight Logger"
}
}
},
"resourcetype": "Instance"
},
"schemaversion": 2.0,
"service": {
"action": {
"actiontype": "NETWORK_CONNECTION",
"networkconnectionaction": {
"connectiondirection": "INBOUND",
"localportdetails": {
"portname": "SSH"
},
"remoteipdetails": {
"geolocation": {
"lat": "59.2741",
"lon": "15.2066"
},
"organization": {
"asn": "2119",
"asnorg": "Telenor Norge AS",
"isp": "Telenor Sverige AB",
"org": "Telenor Sverige AB"
}
},
"remoteportdetails": {
"portname": "Unknown"
}
}
},
"count": "20",
"detectorid": "6ab6e6ee780ed494f3b7ca56acdc74df",
"resourcerole": "TARGET",
"servicename": "guardduty"
},
"updatedat": "2020-09-12T09:25:34.086Z"
}
}
options = {"unmapped_fallback": True}
result_bundle = json_to_stix_translator.convert_to_stix(
data_source, map_data, [data], get_module_transformers(MODULE), options)
result_bundle_objects = result_bundle['objects']
result_bundle_identity = result_bundle_objects[0]
assert result_bundle_identity['type'] == data_source['type']
observed_data = result_bundle_objects[1]
assert 'objects' in observed_data
objects = observed_data['objects']
custom_object = TestAwsResultsToStix.get_first_of_type(objects.values(), 'x-aws-athena')
assert custom_object.keys() == {'type', 'service_action_networkconnectionaction_remoteipdetails_country_countryname',
'finding_id', 'arn', 'createdat', 'partition', 'resource',
'schemaversion', 'service', 'updatedat'}
assert custom_object['arn'] == 'arn:aws:guardduty:us-east-1:979326520502:detector/6ab6e6ee780ed' \
'494f3b7ca56acdc74df/finding/7ab9d1cb6248e05a0e419a79528761cb'
assert custom_object['finding_id'] == '7ab9d1cb6248e05a0e419a79528761cb'
assert custom_object['createdat'] == '2020-07-31T06:37:13.745Z'
assert custom_object['partition'] == 'aws'
assert custom_object['schemaversion'] == 2.0
assert custom_object['updatedat'] == '2020-09-12T09:25:34.086Z'
| 52.18022 | 126 | 0.539087 |
d5c68966a759ee86d163e95dee1679657c063de3
| 2,236 |
py
|
Python
|
Python Spider/xpath/03 login.py
|
CodingGorit/Coding-with-Python
|
b0f1d5d704b816a85b0ae57b46d00314de2a67b9
|
[
"Apache-2.0"
] | 1 |
2020-01-31T15:57:29.000Z
|
2020-01-31T15:57:29.000Z
|
Python Spider/xpath/03 login.py
|
CodingGorit/Coding-with-Python
|
b0f1d5d704b816a85b0ae57b46d00314de2a67b9
|
[
"Apache-2.0"
] | null | null | null |
Python Spider/xpath/03 login.py
|
CodingGorit/Coding-with-Python
|
b0f1d5d704b816a85b0ae57b46d00314de2a67b9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#file: 03 login.py
#@author: Gorit
#@contact: [email protected]
#@time: 2020/1/20 12:44
import requests
from lxml import etree
#
obj = lMonKey()
| 27.268293 | 175 | 0.549195 |
d5c72a3c1f9827cd7d71f3da809f2313db6f0a32
| 9,730 |
py
|
Python
|
src/gui/MultiplayerPlayerInfo.py
|
fireclawthefox/AnkandoraLight
|
05b71e1a2919141cce02cb1aade95fbac682614b
|
[
"BSD-2-Clause"
] | 3 |
2020-07-31T10:27:06.000Z
|
2022-01-11T20:28:55.000Z
|
src/gui/MultiplayerPlayerInfo.py
|
fireclawthefox/AnkandoraLight
|
05b71e1a2919141cce02cb1aade95fbac682614b
|
[
"BSD-2-Clause"
] | null | null | null |
src/gui/MultiplayerPlayerInfo.py
|
fireclawthefox/AnkandoraLight
|
05b71e1a2919141cce02cb1aade95fbac682614b
|
[
"BSD-2-Clause"
] | 1 |
2020-07-30T08:23:28.000Z
|
2020-07-30T08:23:28.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file was created using the DirectGUI Designer
from direct.gui import DirectGuiGlobals as DGG
from direct.gui.DirectFrame import DirectFrame
from direct.gui.DirectLabel import DirectLabel
from direct.gui.DirectButton import DirectButton
from direct.gui.DirectOptionMenu import DirectOptionMenu
from panda3d.core import (
LPoint3f,
LVecBase3f,
LVecBase4f,
TextNode
)
| 35.126354 | 104 | 0.528777 |
d5c7e9662e071c24633307f69bc18856ffa49ecf
| 634 |
py
|
Python
|
publications/time_mag.py
|
mkoo21/rss-review-scraper
|
4adde8586ce55d7bb211bcfbb9bcccd1edc8b6a5
|
[
"BSD-3-Clause"
] | null | null | null |
publications/time_mag.py
|
mkoo21/rss-review-scraper
|
4adde8586ce55d7bb211bcfbb9bcccd1edc8b6a5
|
[
"BSD-3-Clause"
] | 1 |
2021-06-01T23:47:57.000Z
|
2021-06-01T23:47:57.000Z
|
publications/time_mag.py
|
mkoo21/rss-review-scraper
|
4adde8586ce55d7bb211bcfbb9bcccd1edc8b6a5
|
[
"BSD-3-Clause"
] | null | null | null |
from . import FROM_FEED_PUBLISHED_TODAY, STRINGIFY
| 28.818182 | 93 | 0.594637 |
d5c8ad01f8962aad9216b71e8846b60294d68306
| 3,017 |
py
|
Python
|
2020/21/code.py
|
irobin591/advent-of-code-2019
|
279c28a2863558bd014b289802fff4b444c5d6cf
|
[
"MIT"
] | null | null | null |
2020/21/code.py
|
irobin591/advent-of-code-2019
|
279c28a2863558bd014b289802fff4b444c5d6cf
|
[
"MIT"
] | null | null | null |
2020/21/code.py
|
irobin591/advent-of-code-2019
|
279c28a2863558bd014b289802fff4b444c5d6cf
|
[
"MIT"
] | null | null | null |
# Advent of Code 2020
# Day 21
# Author: irobin591
import os
import doctest
import re
re_entry = re.compile(r'^([a-z ]+) \(contains ([a-z, ]*)\)$')
with open(os.path.join(os.path.dirname(__file__), "input.txt"), 'r') as input_file:
input_data = input_file.read().strip().split('\n')
def part1(input_data):
"""
>>> part1(open(os.path.join(os.path.dirname(__file__), "test_part1.txt"), 'r').read().strip().split('\\n'))
5
"""
# dict['allergen'] = ['asdfa', 'agbsfb']
allergens = {}
ingredients = []
# map strings to allergens
for entry in input_data:
r = re_entry.match(entry)
if not r:
raise RuntimeError("")
contents = set(r.group(1).split(' '))
ingredients.extend(contents)
for allergen in r.group(2).split(', '):
if allergen not in allergens:
allergens[allergen] = contents
else:
# only keep already added ingredients
allergens[allergen] = [ingredient for ingredient in contents if ingredient in allergens[allergen]]
# print(allergens)
# print(ingredients)
ingredients_with_allergens = set([y for x in allergens.values() for y in x])
# print(list(filter(lambda i: i not in ingredients_with_allergens, ingredients)))
return len(list(filter(lambda i: i not in ingredients_with_allergens, ingredients)))
def part2(input_data):
"""
>>> part2(open(os.path.join(os.path.dirname(__file__), "test_part1.txt"), 'r').read().strip().split('\\n'))
'mxmxvkd,sqjhc,fvjkl'
"""
# dict['allergen'] = ['asdfa', 'agbsfb']
allergens = {}
ingredients = []
# map strings to allergens
for entry in input_data:
r = re_entry.match(entry)
if not r:
raise RuntimeError("")
contents = set(r.group(1).split(' '))
ingredients.extend(contents)
for allergen in r.group(2).split(', '):
if allergen not in allergens:
allergens[allergen] = list(contents)
else:
# only keep already added ingredients
allergens[allergen] = [ingredient for ingredient in contents if ingredient in allergens[allergen]]
# print(allergens)
# (allergen, ingredient)
assigned_allergens = []
while sum([len(ingreds) for ingreds in allergens.values()]) > 0:
for allergen in allergens:
if len(allergens[allergen]) == 1:
ingredient = allergens[allergen][0]
assigned_allergens.append((allergen, ingredient))
for allergen2 in allergens:
if ingredient in allergens[allergen2]:
allergens[allergen2].remove(ingredient)
assigned_allergens.sort(key=lambda x: x[0])
return ",".join([x[1] for x in assigned_allergens])
if __name__ == "__main__":
doctest.testmod()
print("Part One: {}".format(part1(input_data)))
print("Part Two: {}".format(part2(input_data)))
pass
| 30.785714 | 114 | 0.599271 |
d5c9c3dcfd93144a733bdffa2a7d7a7dc364d51d
| 2,807 |
py
|
Python
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 43 |
2017-12-27T05:57:00.000Z
|
2022-03-18T10:07:28.000Z
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 10 |
2018-02-07T11:20:37.000Z
|
2021-04-22T21:44:19.000Z
|
tests/test_html_escaping.py
|
copart/pandoc-mustache
|
f6ace29cd0c8d6b4d8f182eedcf36ad38a2412fa
|
[
"CC0-1.0"
] | 8 |
2018-11-05T13:10:35.000Z
|
2021-08-30T18:14:02.000Z
|
"""
Test that escaping characters for HTML is disabled.
"""
import os, subprocess
| 28.642857 | 139 | 0.617385 |
d5cb7cb45edf1a90b51258da74fc6a1d2b6758fa
| 2,761 |
py
|
Python
|
app.py
|
iandees/microdata2osm
|
1505b8072880055033ddbb85626fcdb857c97d4e
|
[
"MIT"
] | 1 |
2019-11-05T16:02:17.000Z
|
2019-11-05T16:02:17.000Z
|
app.py
|
iandees/microdata2osm
|
1505b8072880055033ddbb85626fcdb857c97d4e
|
[
"MIT"
] | null | null | null |
app.py
|
iandees/microdata2osm
|
1505b8072880055033ddbb85626fcdb857c97d4e
|
[
"MIT"
] | null | null | null |
from flask import Flask, jsonify, request
from w3lib.html import get_base_url
import extruct
import requests
app = Flask(__name__)
| 25.803738 | 75 | 0.589279 |
d5cdc3a0f5e46ad0ab740a282e0265f0e1bb27d5
| 702 |
py
|
Python
|
dags/simple_python_taskflow_api.py
|
davemasino/airflow101
|
f940e169b9c562e3834a201827b615744a99b86d
|
[
"Apache-2.0"
] | null | null | null |
dags/simple_python_taskflow_api.py
|
davemasino/airflow101
|
f940e169b9c562e3834a201827b615744a99b86d
|
[
"Apache-2.0"
] | null | null | null |
dags/simple_python_taskflow_api.py
|
davemasino/airflow101
|
f940e169b9c562e3834a201827b615744a99b86d
|
[
"Apache-2.0"
] | null | null | null |
"""
A simple Python DAG using the Taskflow API.
"""
import logging
import time
from datetime import datetime
from airflow import DAG
from airflow.decorators import task
log = logging.getLogger(__name__)
with DAG(
dag_id='simple_python_taskflow_api',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
catchup=False,
tags=['airflow101'],
) as dag:
hello_task = say_hello()
sleeping_task = sleep_for_1()
hello_task >> sleeping_task
| 20.057143 | 43 | 0.665242 |
d5cdc4a618ee4e3bc14a1bf765626931e9530f36
| 1,744 |
py
|
Python
|
pyunmarked/roylenichols.py
|
kenkellner/pyunmarked
|
485bd96b4ca12a019b478fc19f68f577279ac9b8
|
[
"MIT"
] | null | null | null |
pyunmarked/roylenichols.py
|
kenkellner/pyunmarked
|
485bd96b4ca12a019b478fc19f68f577279ac9b8
|
[
"MIT"
] | null | null | null |
pyunmarked/roylenichols.py
|
kenkellner/pyunmarked
|
485bd96b4ca12a019b478fc19f68f577279ac9b8
|
[
"MIT"
] | null | null | null |
from . import model
import numpy as np
from scipy import special, stats
| 37.913043 | 91 | 0.544151 |
d5cdf640db99a0e2d2dcf804807be669d9939f1e
| 75,933 |
py
|
Python
|
proc_chords_xarray.py
|
pgriewank/ASR_tools
|
306a7d92725888485a35f8824433ad7b0451b569
|
[
"MIT"
] | null | null | null |
proc_chords_xarray.py
|
pgriewank/ASR_tools
|
306a7d92725888485a35f8824433ad7b0451b569
|
[
"MIT"
] | null | null | null |
proc_chords_xarray.py
|
pgriewank/ASR_tools
|
306a7d92725888485a35f8824433ad7b0451b569
|
[
"MIT"
] | null | null | null |
#Contains the functions needed to process both chords and regularized beards
# proc_chords is used for chords
#proc_beard_regularize for generating beards
#proc_pdf saves pdfs of a variable below cloud base
#Both have a large overlap, but I split them in two to keep the one script from getting to confusing.
import numpy as np
import math
from netCDF4 import Dataset
import os
import time as ttiimmee
from scipy.interpolate import interp1d
from scipy.interpolate import interp2d
#from scipy.interpolate import griddata
#from mpl_toolkits.axes_grid1 import make_axes_locatable
import pickle
import sys
#sys.path.insert(0, "/home/pgriewank/code/2019-chords-plumes/")
#from unionfind import UnionFind
from cusize_functions import *
#import matplotlib.pyplot as plt
import pandas as pd
import gc
import glob
import xarray as xr
#turned into a function
#removed the possibility to loop over multiple dates, if you want to do that call the function repeatedly
#Full list of variables to analyze is unclear, I will try to include everything available, but this might break the memory bank
#want to keep the automatic x and y calculation
#Scaling shouldn't be needed, as all chord properties should be indepenent of wind direction (right?)
#Similarly, no basedefinition is needed, all values are relative to cloud base
#Should be able to work for any variable in the column output, or for any 3D variable as long as it is named the same as the file.
#Changing 3D output
#Default is now to always go over x and y directions
#TODO
#plot_flag disabled for the mean time
#turned into a function
#removed the possibility to loop over multiple dates, if you want to do that call the function repeatedly
#Should be able to work for any variable in the column output, or for any 3D variable as long as it is named the same as the file.
#If the input data is a 3D field it will always go over x and y directions
#Two different scale_flags added to rotate the curtain to point upwind.
#TODO
#plot_flag disabled for the mean time
#A simple script which calculates a histogram below the cloud base and saves it
#I will try to keep it at least somewhat general with a flexible variable
| 40.605882 | 218 | 0.563326 |
d5ce012afb2ebb7c4522ad96e38d4259432b472d
| 1,656 |
py
|
Python
|
expression-atlas-wf/scripts/dmel_tau_housekeeping.py
|
jfear/larval_gonad
|
624a71741864b74e0372f89bdcca578e5cca3722
|
[
"MIT"
] | 1 |
2019-09-13T13:24:18.000Z
|
2019-09-13T13:24:18.000Z
|
expression-atlas-wf/scripts/dmel_tau_housekeeping.py
|
jfear/larval_gonad
|
624a71741864b74e0372f89bdcca578e5cca3722
|
[
"MIT"
] | 65 |
2019-07-24T16:23:08.000Z
|
2020-03-06T22:18:47.000Z
|
expression-atlas-wf/scripts/dmel_tau_housekeeping.py
|
jfear/larval_gonad
|
624a71741864b74e0372f89bdcca578e5cca3722
|
[
"MIT"
] | 1 |
2021-06-02T19:09:35.000Z
|
2021-06-02T19:09:35.000Z
|
"""D. mel housekeeping genes based on tau.
Uses the intersection of w1118 and orgR to create a list of
D. mel housekeeping genes.
"""
import os
from functools import partial
import pandas as pd
from larval_gonad.io import pickle_load, pickle_dump
if __name__ == "__main__":
if os.getenv("SNAKE_DEBUG", False):
from larval_gonad.debug import snakemake_debug
snakemake = snakemake_debug(
workdir="expression-atlas-wf",
input=dict(
male=[
"../output/expression-atlas-wf/tau_housekeeping/w1118_male.pkl",
"../output/expression-atlas-wf/tau_housekeeping/orgR_male.pkl",
],
female=[
"../output/expression-atlas-wf/tau_housekeeping/w1118_female.pkl",
"../output/expression-atlas-wf/tau_housekeeping/orgR_female.pkl",
],
annot="../output/expression-atlas-wf/YOgn_to_dmel_ortholog/dmel.pkl",
),
)
main()
| 30.109091 | 96 | 0.630435 |
d5ce93a21169fedfe3df6edeca6f8d5d29633b0f
| 2,226 |
py
|
Python
|
api-server/server/core/key.py
|
TK-IBM-Call-for-Code-Challange-2021/call-for-code-challenge-2021
|
7a3d78d4067303d61c4a25d45c0671ae7e984222
|
[
"MIT"
] | 75 |
2020-07-22T15:24:56.000Z
|
2022-03-30T08:34:06.000Z
|
api-server/server/core/key.py
|
TK-IBM-Call-for-Code-Challange-2021/call-for-code-challenge-2021
|
7a3d78d4067303d61c4a25d45c0671ae7e984222
|
[
"MIT"
] | null | null | null |
api-server/server/core/key.py
|
TK-IBM-Call-for-Code-Challange-2021/call-for-code-challenge-2021
|
7a3d78d4067303d61c4a25d45c0671ae7e984222
|
[
"MIT"
] | 34 |
2020-07-23T02:54:03.000Z
|
2022-03-29T09:51:21.000Z
|
"""
Api Key validation
"""
from typing import Optional
from fastapi.security.api_key import APIKeyHeader
from fastapi import HTTPException, Security, Depends
from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN
from server.core.security import verify_key
from server.db.mongodb import AsyncIOMotorClient, get_database
from server.models.user import User
from server.db.crud.user import get_user_by_email
from pydantic import EmailStr
api_key_scheme = APIKeyHeader(name="X-API-KEY", auto_error=False)
email_scheme = APIKeyHeader(name="X-EMAIL-ID", auto_error=False)
| 33.223881 | 92 | 0.666667 |
d5cee84d7663e55b77b23428667b37ccfb80fbf9
| 1,253 |
py
|
Python
|
scripts/kconfig-split.py
|
Osirium/linuxkit
|
b710224cdf9a8425a7129cdcb84fc1af00f926d7
|
[
"Apache-2.0"
] | 7,798 |
2017-04-18T15:19:24.000Z
|
2022-03-30T19:34:42.000Z
|
scripts/kconfig-split.py
|
Osirium/linuxkit
|
b710224cdf9a8425a7129cdcb84fc1af00f926d7
|
[
"Apache-2.0"
] | 1,673 |
2017-04-18T16:15:20.000Z
|
2022-03-31T06:14:17.000Z
|
scripts/kconfig-split.py
|
Osirium/linuxkit
|
b710224cdf9a8425a7129cdcb84fc1af00f926d7
|
[
"Apache-2.0"
] | 1,099 |
2017-04-18T15:19:33.000Z
|
2022-03-31T20:23:20.000Z
|
#!/usr/bin/env python
# This is a slightly modified version of ChromiumOS' splitconfig
# https://chromium.googlesource.com/chromiumos/third_party/kernel/+/stabilize-5899.B-chromeos-3.14/chromeos/scripts/splitconfig
"""See this page for more details:
http://dev.chromium.org/chromium-os/how-tos-and-troubleshooting/kernel-configuration
"""
import os
import re
import sys
allconfigs = {}
# Parse config files
for config in sys.argv[1:]:
allconfigs[config] = set()
for line in open(config):
m = re.match("#*\s*CONFIG_(\w+)[\s=](.*)$", line)
if not m:
continue
option, value = m.groups()
allconfigs[config].add((option, value))
# Split out common config options
common = allconfigs.values()[0].copy()
for config in allconfigs.keys():
common &= allconfigs[config]
for config in allconfigs.keys():
allconfigs[config] -= common
allconfigs["common"] = common
# Generate new splitconfigs
for config in allconfigs.keys():
f = open("split-" + config, "w")
for option, value in sorted(list(allconfigs[config])):
if value == "is not set":
print >>f, "# CONFIG_%s %s" % (option, value)
else:
print >>f, "CONFIG_%s=%s" % (option, value)
f.close()
| 27.844444 | 127 | 0.651237 |
d5cef9720c8cb2b94870da749da3f4cf31757f01
| 1,631 |
py
|
Python
|
src/synapse/azext_synapse/vendored_sdks/azure_synapse/models/livy_statement_output.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 2 |
2021-06-05T17:51:26.000Z
|
2021-11-17T11:17:56.000Z
|
src/synapse/azext_synapse/vendored_sdks/azure_synapse/models/livy_statement_output.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 3 |
2020-05-27T20:16:26.000Z
|
2020-07-23T19:46:49.000Z
|
src/synapse/azext_synapse/vendored_sdks/azure_synapse/models/livy_statement_output.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 5 |
2020-05-09T17:47:09.000Z
|
2020-10-01T19:52:06.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
| 33.285714 | 76 | 0.563458 |
d5d04044860f90c923e15fee006637515d70252d
| 6,215 |
py
|
Python
|
src/main.py
|
mafshar/sub-puppo
|
20fe5bf3ca3d250d846c545085f748e706c4a33e
|
[
"MIT"
] | 1 |
2018-03-02T04:24:33.000Z
|
2018-03-02T04:24:33.000Z
|
src/main.py
|
mafshar/sub-puppo
|
20fe5bf3ca3d250d846c545085f748e706c4a33e
|
[
"MIT"
] | null | null | null |
src/main.py
|
mafshar/sub-puppo
|
20fe5bf3ca3d250d846c545085f748e706c4a33e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
'''
Notes:
- Weak implies weakly supervised learning (4 classes)
- Strong implies strongly (fully) superversied learning (10 classes)
- frame number is set to 22ms (default); that is the "sweet spot" based on dsp literature
- sampling rate is 16kHz (for the MFCC of each track)
- Accuracy increases as the test set gets smaller, which implies that a lot of these machine learning models are heavily data-driven (i.e. feed more data for more performance boosts)
- Currently, optimal benchmark results are achieved with a test set size of 10 percent of the total data
'''
import os
import glob
import sys
import time
import warnings
warnings.filterwarnings("ignore")
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from processing import mfcc_processing, datasets
from deep_models import models
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.cluster import KMeans
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import normalize
input_path = './data/genres/'
mfcc_path = './data/processed/mfcc/'
have_mfccs = True
if __name__ == '__main__':
mfccs = None
data = None
if not have_mfccs:
have_mfccs = True
print 'calculating mfccs...'
mfccs = mfcc_processing.write_mfccs(input_path, mfcc_path, True)
else :
print 'retrieving mfccs...'
mfccs = mfcc_processing.read_mfccs(mfcc_path, True)
data = mfcc_processing.featurize_data(mfccs, weak=True, verbose=True)
print
weak = False
if weak:
data = mfcc_processing.featurize_data(mfccs, weak=True, verbose=True)
print
svm_classifier(data, test_size=0.10, weak=True, verbose=True)
print
knn_classifier(data, test_size=0.10, weak=True, verbose=True)
print
mfcc_nn_model(num_epochs=10, test_size=0.10, weak=True, verbose=True)
else:
data = mfcc_processing.featurize_data(mfccs, weak=False, verbose=True)
print
svm_classifier(data, test_size=0.10, weak=False, verbose=True)
print
knn_classifier(data, test_size=0.10, weak=False, verbose=True)
print
mfcc_nn_model(num_epochs=10, test_size=0.10, weak=False, verbose=True)
| 32.710526 | 187 | 0.665809 |
d5d07c6912264faadbd6b41b6918a6a30e91f2bc
| 8,638 |
py
|
Python
|
plugins/Operations/Crypto/blowfish_encrypt_dialog.py
|
nmantani/FileInsight-plugins
|
a6b036672e4c72ed06678729a86293212b7213db
|
[
"BSD-2-Clause",
"CC0-1.0",
"MIT"
] | 120 |
2015-02-28T14:49:12.000Z
|
2022-03-27T07:13:24.000Z
|
plugins/Operations/Crypto/blowfish_encrypt_dialog.py
|
nmantani/FileInsight-plugins
|
a6b036672e4c72ed06678729a86293212b7213db
|
[
"BSD-2-Clause",
"CC0-1.0",
"MIT"
] | null | null | null |
plugins/Operations/Crypto/blowfish_encrypt_dialog.py
|
nmantani/FileInsight-plugins
|
a6b036672e4c72ed06678729a86293212b7213db
|
[
"BSD-2-Clause",
"CC0-1.0",
"MIT"
] | 17 |
2016-04-04T15:53:03.000Z
|
2021-12-10T18:07:59.000Z
|
#
# Blowfish encrypt - Encrypt selected region with Blowfish
#
# Copyright (c) 2019, Nobutaka Mantani
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import binascii
import re
import sys
import time
import tkinter
import tkinter.ttk
import tkinter.messagebox
try:
import Cryptodome.Cipher.Blowfish
import Cryptodome.Util.Padding
except ImportError:
exit(-1) # PyCryptodome is not installed
# Print selected items
# Receive data
data = sys.stdin.buffer.read()
# Create input dialog
root = tkinter.Tk()
root.title("Blowfish encrypt")
root.protocol("WM_DELETE_WINDOW", (lambda r=root: r.quit()))
label_mode = tkinter.Label(root, text="Mode:")
label_mode.grid(row=0, column=0, padx=5, pady=5, sticky="w")
combo_mode = tkinter.ttk.Combobox(root, width=5, state="readonly")
combo_mode["values"] = ("ECB", "CBC", "CFB", "OFB", "CTR")
combo_mode.current(0)
combo_mode.grid(row=0, column=1, padx=5, pady=5, sticky="w")
label_key_type = tkinter.Label(root, text="Key type:")
label_key_type.grid(row=1, column=0, padx=5, pady=5, sticky="w")
combo_key_type = tkinter.ttk.Combobox(root, width=5, state="readonly")
combo_key_type["values"] = ("Text", "Hex")
combo_key_type.current(0)
combo_key_type.grid(row=1, column=1, padx=5, pady=5)
label_key = tkinter.Label(root, text="Key:")
label_key.grid(row=1, column=2, padx=5, pady=5, sticky="w")
entry_key = tkinter.Entry(width=32)
entry_key.grid(row=1, column=3, padx=5, pady=5, sticky="w")
entry_key.focus() # Focus to this widget
label_iv_type = tkinter.Label(root, text="IV type:")
label_iv_type.grid(row=2, column=0, padx=5, pady=5, sticky="w")
combo_iv_type = tkinter.ttk.Combobox(root, width=5, state="readonly")
combo_iv_type["values"] = ("Text", "Hex")
combo_iv_type.current(0)
combo_iv_type.grid(row=2, column=1, padx=5, pady=5)
label_iv = tkinter.Label(root, text="IV:")
label_iv.grid(row=2, column=2, padx=5, pady=5, sticky="w")
entry_iv = tkinter.Entry(width=32)
entry_iv.grid(row=2, column=3, padx=5, pady=5, sticky="w")
button = tkinter.Button(root, text="OK", command=(lambda data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei)))
button.grid(row=3, column=0, padx=5, pady=5, columnspan=4)
label_ctr = tkinter.Label(root, text="Note:\nThe first seven bytes of IV are used as the nonce and the last one\nbyte is used as the initial value of the counter (compatible with\nCyberChef).", justify="left")
label_ctr.grid(row=4, column=0, padx=5, pady=5, columnspan=4, sticky="w")
label_ctr.grid_remove()
# Set callback functions
combo_mode.bind('<<ComboboxSelected>>', lambda event, root=root, cm=combo_mode, cit=combo_iv_type, ei=entry_iv, lc=label_ctr: combo_mode_selected(root, cm, cit, ei, lc))
combo_mode.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
combo_key_type.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
entry_key.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
combo_iv_type.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
entry_iv.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
button.bind("<Return>", lambda event, data=data, root=root, cm=combo_mode, ckt=combo_key_type, ek=entry_key, cit=combo_iv_type, ei=entry_iv: encrypt(data, root, cm, ckt, ek, cit, ei))
# These are disabled in the initial state (ECB mode)
combo_iv_type.configure(state = "disabled")
entry_iv.configure(state = "disabled")
# Adjust window position
sw = root.winfo_screenwidth()
sh = root.winfo_screenheight()
root.update_idletasks() # Necessary to get width and height of the window
ww = root.winfo_width()
wh = root.winfo_height()
root.geometry('+%d+%d' % ((sw/2) - (ww/2), (sh/2) - (wh/2)))
root.mainloop()
exit(1) # Not decrypted
| 44.297436 | 210 | 0.678629 |
d5d16bd87f7bfb96643e0e75dbd1d494645de558
| 5,734 |
py
|
Python
|
dns/rdtypes/IN/IPSECKEY.py
|
preo/dnspython
|
465785f85f87508209117264c677080e901e957c
|
[
"0BSD"
] | null | null | null |
dns/rdtypes/IN/IPSECKEY.py
|
preo/dnspython
|
465785f85f87508209117264c677080e901e957c
|
[
"0BSD"
] | null | null | null |
dns/rdtypes/IN/IPSECKEY.py
|
preo/dnspython
|
465785f85f87508209117264c677080e901e957c
|
[
"0BSD"
] | null | null | null |
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import struct
import dns.exception
import dns.inet
import dns.name
| 38.743243 | 81 | 0.591734 |
d5d20f7a81be3ee7ffae45e074584da66ec78259
| 210 |
py
|
Python
|
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
multistream_select/__init__.py
|
Projjol/py-multistream-select
|
624becaaeefa0a76d6841e27fbf7dea3240d2fe0
|
[
"MIT"
] | null | null | null |
__version = '0.1.0'
__all__ = ['MultiStreamSelect', 'hexify']
__author__ = 'Natnael Getahun ([email protected])'
__name__ = 'multistream'
from .multistream import MultiStreamSelect
from .utils import hexify
| 26.25 | 52 | 0.766667 |
d5d2163f998824781f4cf67aa89ebfc2260b9f51
| 42,648 |
py
|
Python
|
python/input_reader.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | 3 |
2018-03-29T08:48:57.000Z
|
2020-02-16T22:40:22.000Z
|
python/input_reader.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | null | null | null |
python/input_reader.py
|
dagesundholm/DAGE
|
0d0ef1d3e74ba751ca4d288db9f1ac7f9a822138
|
[
"MIT"
] | 1 |
2019-04-08T14:40:57.000Z
|
2019-04-08T14:40:57.000Z
|
"""---------------------------------------------------------------------------------*
* Copyright (c) 2010-2018 Pauli Parkkinen, Eelis Solala, Wen-Hua Xu, *
* Sergio Losilla, Elias Toivanen, Jonas Juselius *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in all*
* copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE *
* SOFTWARE. *
*----------------------------------------------------------------------------------"""
# Input file reader
import os
import sys
import xml.etree.ElementTree as ET
import numpy, ast
from .generate_objects import SettingsGenerator
from collections import OrderedDict
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Give the input file name as an input.")
else:
inp = InputXML(filename = sys.argv[1], definition_filename = os.path.dirname(os.path.realpath(__file__))+"/input_parameters.xml")
import dage_fortran
dage_fortran.python_interface.run(**inp.prepare())
| 47.020948 | 179 | 0.559487 |
d5d27a9aec4e8518393324c6681b93cf4f6993a5
| 506 |
py
|
Python
|
tests/test_mate_hashes_methods.py
|
MacHu-GWU/pathlib_mate-project
|
5b8f5441e681730d02209211cce7f46986147418
|
[
"MIT"
] | 9 |
2017-09-07T21:21:43.000Z
|
2020-10-11T09:47:24.000Z
|
tests/test_mate_hashes_methods.py
|
MacHu-GWU/pathlib_mate-project
|
5b8f5441e681730d02209211cce7f46986147418
|
[
"MIT"
] | 2 |
2018-10-16T14:30:26.000Z
|
2020-12-05T02:40:46.000Z
|
tests/test_mate_hashes_methods.py
|
MacHu-GWU/pathlib_mate-project
|
5b8f5441e681730d02209211cce7f46986147418
|
[
"MIT"
] | 2 |
2017-09-05T14:06:01.000Z
|
2021-06-29T15:31:13.000Z
|
# -*- coding: utf-8 -*-
import pytest
from pathlib_mate.pathlib2 import Path
if __name__ == "__main__":
import os
basename = os.path.basename(__file__)
pytest.main([basename, "-s", "--tb=native"])
| 23 | 59 | 0.592885 |
d5d2a60bb0dcf9c3c7f564f0707f97c252020d5c
| 4,183 |
py
|
Python
|
tools/lib/auth.py
|
shoes22/openpilot
|
a965de3c96a53b67d106cfa775e3407db82dd0e1
|
[
"MIT"
] | 121 |
2019-03-27T06:34:51.000Z
|
2021-06-15T14:37:29.000Z
|
tools/lib/auth.py
|
shoes22/openpilot
|
a965de3c96a53b67d106cfa775e3407db82dd0e1
|
[
"MIT"
] | 54 |
2019-04-11T08:51:58.000Z
|
2021-06-13T17:04:22.000Z
|
tools/lib/auth.py
|
shoes22/openpilot
|
a965de3c96a53b67d106cfa775e3407db82dd0e1
|
[
"MIT"
] | 139 |
2019-07-16T07:25:05.000Z
|
2021-06-09T11:27:53.000Z
|
#!/usr/bin/env python3
"""
Usage::
usage: auth.py [-h] [{google,apple,github,jwt}] [jwt]
Login to your comma account
positional arguments:
{google,apple,github,jwt}
jwt
optional arguments:
-h, --help show this help message and exit
Examples::
./auth.py # Log in with google account
./auth.py github # Log in with GitHub Account
./auth.py jwt ey......hw # Log in with a JWT from https://jwt.comma.ai, for use in CI
"""
import argparse
import sys
import pprint
import webbrowser
from http.server import BaseHTTPRequestHandler, HTTPServer
from typing import Any, Dict
from urllib.parse import parse_qs, urlencode
from tools.lib.api import APIError, CommaApi, UnauthorizedError
from tools.lib.auth_config import set_token, get_token
PORT = 3000
def auth_redirect_link(method):
provider_id = {
'google': 'g',
'apple': 'a',
'github': 'h',
}[method]
params = {
'redirect_uri': f"https://api.comma.ai/v2/auth/{provider_id}/redirect/",
'state': f'service,localhost:{PORT}',
}
if method == 'google':
params.update({
'type': 'web_server',
'client_id': '45471411055-ornt4svd2miog6dnopve7qtmh5mnu6id.apps.googleusercontent.com',
'response_type': 'code',
'scope': 'https://www.googleapis.com/auth/userinfo.email',
'prompt': 'select_account',
})
return 'https://accounts.google.com/o/oauth2/auth?' + urlencode(params)
elif method == 'github':
params.update({
'client_id': '28c4ecb54bb7272cb5a4',
'scope': 'read:user',
})
return 'https://github.com/login/oauth/authorize?' + urlencode(params)
elif method == 'apple':
params.update({
'client_id': 'ai.comma.login',
'response_type': 'code',
'response_mode': 'form_post',
'scope': 'name email',
})
return 'https://appleid.apple.com/auth/authorize?' + urlencode(params)
else:
raise NotImplementedError(f"no redirect implemented for method {method}")
def login(method):
oauth_uri = auth_redirect_link(method)
web_server = ClientRedirectServer(('localhost', PORT), ClientRedirectHandler)
print(f'To sign in, use your browser and navigate to {oauth_uri}')
webbrowser.open(oauth_uri, new=2)
while True:
web_server.handle_request()
if 'code' in web_server.query_params:
break
elif 'error' in web_server.query_params:
print('Authentication Error: "%s". Description: "%s" ' % (
web_server.query_params['error'],
web_server.query_params.get('error_description')), file=sys.stderr)
break
try:
auth_resp = CommaApi().post('v2/auth/', data={'code': web_server.query_params['code'], 'provider': web_server.query_params['provider']})
set_token(auth_resp['access_token'])
except APIError as e:
print(f'Authentication Error: {e}', file=sys.stderr)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Login to your comma account')
parser.add_argument('method', default='google', const='google', nargs='?', choices=['google', 'apple', 'github', 'jwt'])
parser.add_argument('jwt', nargs='?')
args = parser.parse_args()
if args.method == 'jwt':
if args.jwt is None:
print("method JWT selected, but no JWT was provided")
exit(1)
set_token(args.jwt)
else:
login(args.method)
try:
me = CommaApi(token=get_token()).get('/v1/me')
print("Authenticated!")
pprint.pprint(me)
except UnauthorizedError:
print("Got invalid JWT")
exit(1)
| 28.650685 | 140 | 0.672962 |
d5d313602da6567472c45152b7f1fb43db070947
| 901 |
py
|
Python
|
datedfolder.py
|
IgorRidanovic/flapi
|
7eb35cc670a5d1a06b01fb13982ffa63345369de
|
[
"MIT"
] | 3 |
2020-09-21T13:07:05.000Z
|
2021-01-29T19:44:02.000Z
|
datedfolder.py
|
IgorRidanovic/flapi
|
7eb35cc670a5d1a06b01fb13982ffa63345369de
|
[
"MIT"
] | null | null | null |
datedfolder.py
|
IgorRidanovic/flapi
|
7eb35cc670a5d1a06b01fb13982ffa63345369de
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Create a Baselight folder with current date and time stamp.
You must refresh the Job Manager after running the script.
Copyright (c) 2020 Igor Rianovi, Igor [at] hdhead.com, www.metafide.com
'''
import flapi
from getflapi import getflapi
from datetime import datetime
if __name__=='__main__':
conn, msg = getflapi()
print msg + '\n'
ip = 'localhost'
currentScene = 'Test01'
folderName = 'MyFolder'
make_dated_folder(ip, currentScene, folderName)
| 23.710526 | 73 | 0.662597 |
d5d51d8a99234145a06442d575334e8b8cd54c32
| 4,762 |
py
|
Python
|
elastica/wrappers/callbacks.py
|
zhidou2/PyElastica
|
0f5502bc5349ab5e5dc794d8dfc82b7c2bd69eb6
|
[
"MIT"
] | 71 |
2020-04-15T17:02:42.000Z
|
2022-03-26T04:53:51.000Z
|
elastica/wrappers/callbacks.py
|
zhidou2/PyElastica
|
0f5502bc5349ab5e5dc794d8dfc82b7c2bd69eb6
|
[
"MIT"
] | 59 |
2020-05-15T03:51:46.000Z
|
2022-03-28T13:53:01.000Z
|
elastica/wrappers/callbacks.py
|
zhidou2/PyElastica
|
0f5502bc5349ab5e5dc794d8dfc82b7c2bd69eb6
|
[
"MIT"
] | 57 |
2020-06-17T20:34:02.000Z
|
2022-03-16T08:09:54.000Z
|
__doc__ = """
CallBacks
-----------
Provides the callBack interface to collect data over time (see `callback_functions.py`).
"""
from elastica.callback_functions import CallBackBaseClass
| 29.214724 | 94 | 0.584208 |
d5d580ea25dd4fecd8cfeb8103bdbe268c389416
| 2,961 |
py
|
Python
|
vitrage/evaluator/template_data.py
|
HoonMinJeongUm/Hunmin-vitrage
|
37d43d6b78e8b76fa6a2e83e5c739e9e4917a7b6
|
[
"Apache-2.0"
] | null | null | null |
vitrage/evaluator/template_data.py
|
HoonMinJeongUm/Hunmin-vitrage
|
37d43d6b78e8b76fa6a2e83e5c739e9e4917a7b6
|
[
"Apache-2.0"
] | null | null | null |
vitrage/evaluator/template_data.py
|
HoonMinJeongUm/Hunmin-vitrage
|
37d43d6b78e8b76fa6a2e83e5c739e9e4917a7b6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import namedtuple
ActionSpecs = namedtuple(
'ActionSpecs', ['id', 'type', 'targets', 'properties'])
EdgeDescription = namedtuple('EdgeDescription', ['edge', 'source', 'target'])
ENTITY = 'entity'
RELATIONSHIP = 'relationship'
# noinspection PyAttributeOutsideInit
| 27.933962 | 77 | 0.667342 |
d5d5b53df6261a4974bd6d3bb678fc4435a6413e
| 15,032 |
py
|
Python
|
scripts/summarize-kmer-counts.py
|
rpetit3/anthrax-metagenome-study
|
b4a6f2c4d49b57aeae898afd6a95c8f6cb437945
|
[
"MIT"
] | null | null | null |
scripts/summarize-kmer-counts.py
|
rpetit3/anthrax-metagenome-study
|
b4a6f2c4d49b57aeae898afd6a95c8f6cb437945
|
[
"MIT"
] | null | null | null |
scripts/summarize-kmer-counts.py
|
rpetit3/anthrax-metagenome-study
|
b4a6f2c4d49b57aeae898afd6a95c8f6cb437945
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
"""Parse through the simulated sequencing group specific kmer counts."""
import argparse as ap
from collections import OrderedDict
import glob
import gzip
import os
import sys
import time
import numpy as np
import multiprocessing as mp
SAMPLES = OrderedDict()
KMERS = {}
HAMMING = OrderedDict()
SAMPLE_COLS = [
'sample', 'is_bcg', 'is_ba', 'has_lethal', 'simulated_coverage', 'group',
'total_kmers', 'tp', 'tn', 'fp', 'fn',
'kmer_cov_min', 'kmer_cov_mean', 'kmer_cov_median', 'kmer_cov_max',
'non_zero_kmer_cov_min', 'non_zero_kmer_cov_mean',
'non_zero_kmer_cov_median', 'non_zero_kmer_cov_max'
]
KMER_COLS = [
'kmer', 'simulated_coverage', 'group', 'hamming_distance',
'tp', 'tn', 'fp', 'fn',
'group_kmer_cov_min',
'group_kmer_cov_mean',
'group_kmer_cov_median',
'group_kmer_cov_max',
'non_zero_group_kmer_cov_min',
'non_zero_group_kmer_cov_mean',
'non_zero_group_kmer_cov_median',
'non_zero_group_kmer_cov_max',
'outgroup_kmer_cov_min',
'outgroup_kmer_cov_mean',
'outgroup_kmer_cov_median',
'outgroup_kmer_cov_max',
'non_zero_outgroup_kmer_cov_min',
'non_zero_outgroup_kmer_cov_mean',
'non_zero_outgroup_kmer_cov_median',
'non_zero_outgroup_kmer_cov_max'
]
def get_group_status(sample, group):
"""Return if a sample is within a group or not."""
within_group = None
if group == 'ba':
within_group = True if SAMPLES[sample]['is_ba'] == 'True' else False
elif group == 'bcg':
within_group = True if SAMPLES[sample]['is_bcg'] == 'True' else False
else:
# lef
within_group = True if SAMPLES[sample]['has_lethal'] else False
return within_group
def get_coverage_stats(coverage):
"""Return summary stats of a set of coverages."""
non_zero = [c for c in coverage if c]
np_array = np.array(coverage)
non_zero_array = np.array(non_zero)
return {
'min': min(coverage) if coverage else 0,
'median': int(np.median(np_array)) if coverage else 0,
'mean': "{0:.4f}".format(np.mean(np_array)) if coverage else 0,
'max': max(coverage) if coverage else 0,
'non_zero_min': min(non_zero_array) if non_zero else 0,
'non_zero_median': int(np.median(non_zero_array)) if non_zero else 0,
'non_zero_mean': int(round(np.mean(non_zero_array))) if non_zero else 0,
'non_zero_max': max(non_zero_array) if non_zero else 0,
}
def reverse_complement(seq):
"""Reverse complement a DNA sequence."""
complement = {
'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G',
'a': 't', 't': 'a', 'g': 'c', 'c': 'g'
}
return ''.join([complement[b] for b in seq[::-1]])
def parse_counts(counts, sample, coverage, group, skip_kmers=False,
filter_kmers=False):
"""Parse kmer counts."""
within_group = get_group_status(sample, group)
sample_row = {'coverages': [], 'tp': 0, 'tn': 0, 'fp': 0, 'fn': 0}
with gzip.open(counts, 'r') as count_handle:
for line in count_handle:
kmer, count = line.decode().rstrip().split()
count = int(count)
parse = True
if filter_kmers:
parse = kmer in KMERS or reverse_complement(kmer) in KMERS
elif not skip_kmers:
if kmer not in KMERS:
kmer = reverse_complement(kmer)
if within_group:
KMERS[kmer][coverage]['group_coverages'].append(count)
if count:
KMERS[kmer][coverage]['tp'] += 1
else:
KMERS[kmer][coverage]['fn'] += 1
else:
KMERS[kmer][coverage]['outgroup_coverages'].append(count)
if count:
KMERS[kmer][coverage]['fp'] += 1
else:
KMERS[kmer][coverage]['tn'] += 1
if parse:
sample_row['coverages'].append(count)
if within_group:
if count:
sample_row['tp'] += 1
else:
sample_row['fn'] += 1
else:
if count:
sample_row['fp'] += 1
else:
sample_row['tn'] += 1
coverage_stats = get_coverage_stats(sample_row['coverages'])
SAMPLES[sample]['results'].append({
'simulated_coverage': coverage,
'within_group': within_group,
'tp': sample_row['tp'],
'tn': sample_row['tn'],
'fp': sample_row['fp'],
'fn': sample_row['fn'],
'kmer_cov_min': coverage_stats['min'],
'kmer_cov_mean': coverage_stats['mean'],
'kmer_cov_median': coverage_stats['median'],
'kmer_cov_max': coverage_stats['max'],
'non_zero_kmer_cov_min': coverage_stats['non_zero_min'],
'non_zero_kmer_cov_mean': coverage_stats['non_zero_mean'],
'non_zero_kmer_cov_median': coverage_stats['non_zero_median'],
'non_zero_kmer_cov_max': coverage_stats['non_zero_max'],
})
def parse_summary(summary):
"""Parse Summary file."""
cols = None
with open(summary, 'r') as summary_handle:
# Column Names:
# accession, gi, is_bcg, is_ba, species, genome_size, description
for line in summary_handle:
line = line.rstrip()
if line.startswith('#'):
cols = line.replace('#', '').split('\t')
else:
row = dict(zip(cols, line.split('\t')))
SAMPLES[row['accession']] = row
if row['accession'] == 'NZ_CP009941':
# NZ_CP009941 - Bacillus cereus w/ lef on chromosome
SAMPLES[row['accession']]['has_lethal'] = True
else:
SAMPLES[row['accession']]['has_lethal'] = False
SAMPLES[row['accession']]['results'] = []
def print_sample_summary(file_output):
"""Print the final per sample summaries."""
with open(file_output, 'w') as output_handle:
output_handle.write(("\t".join(SAMPLE_COLS)))
output_handle.write("\n")
for sample in SAMPLES:
if SAMPLES[sample]['results']:
for result in SAMPLES[sample]['results']:
row = {
'sample': sample,
'is_bcg': SAMPLES[sample]['is_bcg'],
'is_ba': SAMPLES[sample]['is_ba'],
'has_lethal': SAMPLES[sample]['has_lethal'],
'simulated_coverage': result['simulated_coverage'],
'group': args.group,
'within_group': result['within_group'],
'total_kmers': total_kmers,
'tp': result['tp'],
'tn': result['tn'],
'fp': result['fp'],
'fn': result['fn'],
'kmer_cov_min': result['kmer_cov_min'],
'kmer_cov_mean': result['kmer_cov_mean'],
'kmer_cov_median': result['kmer_cov_median'],
'kmer_cov_max': result['kmer_cov_max'],
'non_zero_kmer_cov_min': result['non_zero_kmer_cov_min'],
'non_zero_kmer_cov_mean': result['non_zero_kmer_cov_mean'],
'non_zero_kmer_cov_median': result['non_zero_kmer_cov_median'],
'non_zero_kmer_cov_max': result['non_zero_kmer_cov_max']
}
output_handle.write(("\t".join([
str(row[col]) for col in SAMPLE_COLS
])))
output_handle.write("\n")
def print_kmer_summary(file_output):
"""Print the final per kmer summaries."""
with open(file_output, 'w') as output_handle:
output_handle.write(("\t".join(KMER_COLS)))
output_handle.write("\n")
for kmer, coverages in KMERS.items():
for coverage in coverages:
within_group = get_coverage_stats(
KMERS[kmer][coverage]['group_coverages']
)
outgroup = get_coverage_stats(
KMERS[kmer][coverage]['outgroup_coverages']
)
row = {
'kmer': kmer,
'simulated_coverage': coverage,
'group': args.group,
'hamming_distance': HAMMING[kmer],
'tp': KMERS[kmer][coverage]['tp'],
'tn': KMERS[kmer][coverage]['tn'],
'fp': KMERS[kmer][coverage]['fp'],
'fn': KMERS[kmer][coverage]['fn'],
'group_kmer_cov_min': within_group['min'],
'group_kmer_cov_mean': within_group['mean'],
'group_kmer_cov_median': within_group['median'],
'group_kmer_cov_max': within_group['max'],
'non_zero_group_kmer_cov_min': within_group['non_zero_min'],
'non_zero_group_kmer_cov_mean': within_group['non_zero_mean'],
'non_zero_group_kmer_cov_median': within_group['non_zero_median'],
'non_zero_group_kmer_cov_max': within_group['non_zero_max'],
'outgroup_kmer_cov_min': outgroup['min'],
'outgroup_kmer_cov_mean': outgroup['mean'],
'outgroup_kmer_cov_median': outgroup['median'],
'outgroup_kmer_cov_max': outgroup['max'],
'non_zero_outgroup_kmer_cov_min': outgroup['non_zero_min'],
'non_zero_outgroup_kmer_cov_mean': outgroup['non_zero_mean'],
'non_zero_outgroup_kmer_cov_median': outgroup['non_zero_median'],
'non_zero_outgroup_kmer_cov_max': outgroup['non_zero_max'],
}
output_handle.write(("\t".join([
str(row[col]) for col in KMER_COLS
])))
output_handle.write("\n")
def read_lines(input_file):
"""Return lines in a text file as a list."""
lines = []
with open(input_file, 'r') as input_handle:
for line in input_handle:
lines.append(line.rstrip())
return lines
if __name__ == '__main__':
parser = ap.ArgumentParser(
prog='summarize-kmer-counts.py', conflict_handler='resolve',
description=("Summarize kmer counts of each simulation.")
)
parser.add_argument('summary', type=str, metavar="SUMMARY",
help='Summary of Bacillus genomes.')
parser.add_argument('directory', type=str, metavar="SIMUALTION_DIR",
help='Directory with group specific 31-mer counts.')
parser.add_argument('group', type=str, metavar="GROUP",
help='Which group to parse (ba, bcg or lef).')
parser.add_argument('kmers', type=str, metavar="KMERS",
help='Group specific k-mers.')
parser.add_argument('coverages', type=str, metavar="COVERAGES",
help=('Coverages to subsample to.'))
parser.add_argument('outdir', type=str, metavar="OUTDIR",
help='Directory to output to.')
parser.add_argument('--cpu', default=1, type=int, metavar="INT",
help='Number of cores to use (Default: 1)')
parser.add_argument('--single_sample', type=str, metavar="STR",
help='Process a single sample.')
parser.add_argument('--skip_kmers', action='store_true', default=False,
help='Skip kmer processing.')
parser.add_argument('--filter', action='store_true', default=False,
help='Filter counts based on input kmers.')
args = parser.parse_args()
if args.group not in ['ba', 'bcg', 'lef']:
raise Exception("GROUPS must be 'ba', 'bcg' or 'lef'")
coverages = read_lines(args.coverages)
print("Parsing Summary")
parse_summary(args.summary)
print("Parsing Kmers")
if args.filter:
print("Filtering Kmers")
args.skip_kmers = True
parse_filter_kmers(args.kmers)
else:
print("Parsing Kmers")
parse_kmers(args.kmers, coverages, skip_kmers=args.skip_kmers,
has_hamming=False if args.group == 'lef' else True)
total_kmers = len(KMERS)
current = 1
samples = list(SAMPLES.keys())
if args.single_sample:
samples = [args.single_sample]
total = len(samples)
for sample in samples:
path = "{0}/{1}".format(args.directory, sample)
if os.path.exists(path):
print("Working on {0} ({1} of {2})".format(sample, current, total))
current += 1
count_files = sorted(glob.glob(
"{0}/*-{1}.txt.gz".format(path, args.group)
))
for count_file in count_files:
coverage = os.path.basename(count_file).split('-')[1]
parse_counts(count_file, sample, coverage, args.group,
skip_kmers=args.skip_kmers,
filter_kmers=args.filter)
print("Output sample summary")
if args.single_sample:
print_sample_summary("{0}/count-summary-{1}-{2}.txt".format(
args.outdir, args.single_sample, args.group
))
else:
print_sample_summary("{0}/count-summary-sample-{1}.txt".format(
args.outdir, args.group
))
if not args.skip_kmers:
print("Output kmer summary")
if args.single_sample:
print_kmer_summary("{0}/count-summary-kmer-{1}-{2}.txt".format(
args.outdir, args.single_sample, args.group
))
else:
print_kmer_summary("{0}/count-summary-kmer-{1}.txt".format(
args.outdir, args.group
))
| 40.408602 | 87 | 0.549494 |
d5d6cadbdf0418e7793af6364477d1005bd12ded
| 327 |
py
|
Python
|
movies/exceptions.py
|
te0dor/netguru-movies
|
8e2cc4585851ad31794ec9e6a3e4dd70cc0980c5
|
[
"MIT"
] | null | null | null |
movies/exceptions.py
|
te0dor/netguru-movies
|
8e2cc4585851ad31794ec9e6a3e4dd70cc0980c5
|
[
"MIT"
] | null | null | null |
movies/exceptions.py
|
te0dor/netguru-movies
|
8e2cc4585851ad31794ec9e6a3e4dd70cc0980c5
|
[
"MIT"
] | null | null | null |
from marshmallow.exceptions import ValidationError
__all__ = ('ValidationError', 'ObjectDoesNotExist', 'CommunicationError')
| 21.8 | 73 | 0.75841 |
d5d747b80a8ea5e6c6c092c35a44d7f1c0635eb8
| 117 |
py
|
Python
|
music_api/apps/music_app/admin.py
|
fejiroofficial/Simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | null | null | null |
music_api/apps/music_app/admin.py
|
fejiroofficial/Simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | 8 |
2019-12-04T23:40:12.000Z
|
2022-02-10T07:58:28.000Z
|
music_api/apps/music_app/admin.py
|
fejiroofficial/simple_music
|
2dd9dcf8e5c7374e29dcf96987c053eebf1cba2a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Songs
admin.site.register(Songs)
# Register your models here.
| 16.714286 | 32 | 0.794872 |
d5d9540eff941a339f643e59edbea5708ee6a194
| 2,354 |
py
|
Python
|
scripts/generate_image_series.py
|
JIC-Image-Analysis/senescence-in-field
|
f310e34df377eb807423c38cf27d1ade0782f5a2
|
[
"MIT"
] | null | null | null |
scripts/generate_image_series.py
|
JIC-Image-Analysis/senescence-in-field
|
f310e34df377eb807423c38cf27d1ade0782f5a2
|
[
"MIT"
] | null | null | null |
scripts/generate_image_series.py
|
JIC-Image-Analysis/senescence-in-field
|
f310e34df377eb807423c38cf27d1ade0782f5a2
|
[
"MIT"
] | null | null | null |
# Draw image time series for one or more plots
from jicbioimage.core.image import Image
import dtoolcore
import click
from translate_labels import rack_plot_to_image_plot
from image_utils import join_horizontally, join_vertically
if __name__ == '__main__':
main()
| 26.75 | 75 | 0.712404 |
d5d98de44dcdd3336c05cb1bd2a44010685446b0
| 4,505 |
py
|
Python
|
pytpp/properties/response_objects/system_status.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | 4 |
2022-02-04T23:58:55.000Z
|
2022-02-15T18:53:08.000Z
|
pytpp/properties/response_objects/system_status.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
pytpp/properties/response_objects/system_status.py
|
Venafi/pytpp
|
42af655b2403b8c9447c86962abd4aaa0201f646
|
[
"MIT"
] | null | null | null |
from pytpp.properties.response_objects.dataclasses import system_status
from pytpp.tools.helpers.date_converter import from_date_string
| 42.102804 | 116 | 0.668368 |
d5d9b42548010e4777afbfec7a0536b09a13b146
| 1,883 |
py
|
Python
|
src/data/dataModule.py
|
mikkelfo/Title-prediction-from-abstract
|
45c9b64c963ae9b00c6b34a3f2b9f7c25496350e
|
[
"MIT"
] | null | null | null |
src/data/dataModule.py
|
mikkelfo/Title-prediction-from-abstract
|
45c9b64c963ae9b00c6b34a3f2b9f7c25496350e
|
[
"MIT"
] | null | null | null |
src/data/dataModule.py
|
mikkelfo/Title-prediction-from-abstract
|
45c9b64c963ae9b00c6b34a3f2b9f7c25496350e
|
[
"MIT"
] | null | null | null |
from typing import Optional
import pytorch_lightning as pl
import torch
from omegaconf import OmegaConf
from torch.utils.data import DataLoader, random_split
from transformers import T5Tokenizer
from src.data.PaperDataset import PaperDataset
if __name__ == "__main__":
dm = ArvixDataModule()
| 32.465517 | 73 | 0.669676 |
d5d9d4fd434e21de06a534a9b7ddf3881191564e
| 10,573 |
py
|
Python
|
shs/gui/RootFrame.py
|
ansobolev/shs
|
7a5f61bd66fe1e8ae047a4d3400b055175a53f4e
|
[
"MIT"
] | 1 |
2016-06-22T13:30:25.000Z
|
2016-06-22T13:30:25.000Z
|
shs/gui/RootFrame.py
|
ansobolev/shs
|
7a5f61bd66fe1e8ae047a4d3400b055175a53f4e
|
[
"MIT"
] | 1 |
2017-12-01T04:49:45.000Z
|
2017-12-01T04:49:45.000Z
|
shs/gui/RootFrame.py
|
ansobolev/shs
|
7a5f61bd66fe1e8ae047a4d3400b055175a53f4e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import time
import subprocess
import wx
import ConfigParser
from wx.lib.mixins.listctrl import getListCtrlSelection
from wx.lib.pubsub import pub
from gui.RootGUI import RootGUI
from StepsDialog import StepsDialog
from PlotFrame import PlotFuncFrame, PlotCorrFrame
import interface
import mbox
| 39.01476 | 120 | 0.608909 |
d5da19776d7a24ff632b755eb644da772dbdd1cc
| 6,063 |
py
|
Python
|
saleor/order/migrations/0015_auto_20170206_0407.py
|
acabezasg/urpi-master
|
7c9cd0fbe6d89dad70652482712ca38b21ba6f84
|
[
"BSD-3-Clause"
] | 6 |
2019-01-06T08:39:20.000Z
|
2022-03-04T18:07:47.000Z
|
saleor/order/migrations/0015_auto_20170206_0407.py
|
valentine217/saleor
|
323963748e6a2702265ec6635b930a234abde4f5
|
[
"BSD-3-Clause"
] | 5 |
2021-03-09T16:22:37.000Z
|
2022-02-10T19:10:03.000Z
|
saleor/order/migrations/0015_auto_20170206_0407.py
|
valentine217/saleor
|
323963748e6a2702265ec6635b930a234abde4f5
|
[
"BSD-3-Clause"
] | 1 |
2020-12-26T10:25:37.000Z
|
2020-12-26T10:25:37.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-06 10:07
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_prices.models
| 45.586466 | 181 | 0.626752 |
d5dac56aef00dcc6cc7a0b56db80a25f82caafb4
| 3,357 |
py
|
Python
|
testrail_client/api/configurations.py
|
tonybearpan/testrail-lib
|
267070bd017bb1d80ac40e1b84ea40dc2c2e3956
|
[
"MIT"
] | null | null | null |
testrail_client/api/configurations.py
|
tonybearpan/testrail-lib
|
267070bd017bb1d80ac40e1b84ea40dc2c2e3956
|
[
"MIT"
] | null | null | null |
testrail_client/api/configurations.py
|
tonybearpan/testrail-lib
|
267070bd017bb1d80ac40e1b84ea40dc2c2e3956
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .base import TestRailAPIBase
| 36.48913 | 165 | 0.597557 |
d5dbc4975d95ff84c0f9a2e3857f8af1ed9103e1
| 40,713 |
py
|
Python
|
tools/proto/transaction_pb2.py
|
ctring/Detock
|
a1171a511d9cd1f79cc3a8d54ec17f759d088de4
|
[
"MIT"
] | null | null | null |
tools/proto/transaction_pb2.py
|
ctring/Detock
|
a1171a511d9cd1f79cc3a8d54ec17f759d088de4
|
[
"MIT"
] | null | null | null |
tools/proto/transaction_pb2.py
|
ctring/Detock
|
a1171a511d9cd1f79cc3a8d54ec17f759d088de4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/transaction.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='proto/transaction.proto',
package='slog',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x17proto/transaction.proto\x12\x04slog\"1\n\x0eMasterMetadata\x12\x0e\n\x06master\x18\x01 \x01(\r\x12\x0f\n\x07\x63ounter\x18\x02 \x01(\r\"\x81\x01\n\nValueEntry\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x11\n\tnew_value\x18\x02 \x01(\x0c\x12\x1b\n\x04type\x18\x03 \x01(\x0e\x32\r.slog.KeyType\x12(\n\x08metadata\x18\x04 \x01(\x0b\x32\x14.slog.MasterMetadataH\x00\x42\n\n\x08optional\"C\n\rKeyValueEntry\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12%\n\x0bvalue_entry\x18\x02 \x01(\x0b\x32\x10.slog.ValueEntry\"j\n\x14TransactionEventInfo\x12%\n\x05\x65vent\x18\x01 \x01(\x0e\x32\x16.slog.TransactionEvent\x12\x0c\n\x04time\x18\x02 \x01(\x03\x12\x0f\n\x07machine\x18\x03 \x01(\x05\x12\x0c\n\x04home\x18\x04 \x01(\x05\"\x8c\x03\n\x13TransactionInternal\x12\n\n\x02id\x18\x01 \x01(\x04\x12#\n\x04type\x18\x02 \x01(\x0e\x32\x15.slog.TransactionType\x12\x0c\n\x04home\x18\x03 \x01(\x05\x12\x1b\n\x13\x63oordinating_server\x18\x04 \x01(\r\x12\x11\n\ttimestamp\x18\x05 \x01(\x03\x12\x1b\n\x13involved_partitions\x18\x06 \x03(\r\x12\x19\n\x11\x61\x63tive_partitions\x18\x07 \x03(\r\x12\x18\n\x10involved_regions\x18\x08 \x03(\r\x12*\n\x06\x65vents\x18\t \x03(\x0b\x32\x1a.slog.TransactionEventInfo\x12\'\n\x1fmh_depart_from_coordinator_time\x18\n \x01(\x03\x12\x1e\n\x16mh_arrive_at_home_time\x18\x0b \x01(\x03\x12!\n\x19mh_enter_local_batch_time\x18\x0c \x01(\x03\x12\x1c\n\x14global_log_positions\x18\r \x03(\x03\"H\n\x11RemasterProcedure\x12\x12\n\nnew_master\x18\x01 \x01(\r\x12\x1f\n\x17is_new_master_lock_only\x18\x02 \x01(\x08\"\x19\n\tProcedure\x12\x0c\n\x04\x61rgs\x18\x01 \x03(\x0c\"1\n\nProcedures\x12#\n\nprocedures\x18\x01 \x03(\x0b\x32\x0f.slog.Procedure\"\xb1\x02\n\x0bTransaction\x12+\n\x08internal\x18\x01 \x01(\x0b\x32\x19.slog.TransactionInternal\x12 \n\x04\x63ode\x18\x02 \x01(\x0b\x32\x10.slog.ProceduresH\x00\x12+\n\x08remaster\x18\x03 \x01(\x0b\x32\x17.slog.RemasterProcedureH\x00\x12!\n\x04keys\x18\x04 \x03(\x0b\x32\x13.slog.KeyValueEntry\x12\x14\n\x0c\x64\x65leted_keys\x18\x05 \x03(\x0c\x12\'\n\x06status\x18\x06 \x01(\x0e\x32\x17.slog.TransactionStatus\x12#\n\nabort_code\x18\x07 \x01(\x0e\x32\x0f.slog.AbortCode\x12\x14\n\x0c\x61\x62ort_reason\x18\x08 \x01(\tB\t\n\x07program*L\n\x0fTransactionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0bSINGLE_HOME\x10\x01\x12\x1b\n\x17MULTI_HOME_OR_LOCK_ONLY\x10\x02*@\n\x11TransactionStatus\x12\x0f\n\x0bNOT_STARTED\x10\x00\x12\r\n\tCOMMITTED\x10\x01\x12\x0b\n\x07\x41\x42ORTED\x10\x02*7\n\tAbortCode\x12\t\n\x05OTHER\x10\x00\x12\x10\n\x0cRATE_LIMITED\x10\x01\x12\r\n\tRESTARTED\x10\x02*\x1e\n\x07KeyType\x12\x08\n\x04READ\x10\x00\x12\t\n\x05WRITE\x10\x01*\xde\x06\n\x10TransactionEvent\x12\x07\n\x03\x41LL\x10\x00\x12\x10\n\x0c\x45NTER_SERVER\x10\x01\x12\x1c\n\x18\x45XIT_SERVER_TO_FORWARDER\x10\x02\x12\x13\n\x0f\x45NTER_FORWARDER\x10\x03\x12\x1f\n\x1b\x45XIT_FORWARDER_TO_SEQUENCER\x10\x04\x12(\n$EXIT_FORWARDER_TO_MULTI_HOME_ORDERER\x10\x05\x12\x1c\n\x18\x45NTER_MULTI_HOME_ORDERER\x10\x06\x12%\n!ENTER_MULTI_HOME_ORDERER_IN_BATCH\x10\x07\x12$\n EXIT_MULTI_HOME_ORDERER_IN_BATCH\x10\x08\x12\x1b\n\x17\x45XIT_MULTI_HOME_ORDERER\x10\t\x12\x13\n\x0f\x45NTER_SEQUENCER\x10\n\x12.\n*EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH\x10\x0b\x12\x15\n\x11\x45NTER_LOCAL_BATCH\x10\x0c\x12\x1c\n\x18\x45NTER_SEQUENCER_IN_BATCH\x10\r\x12\x1b\n\x17\x45XIT_SEQUENCER_IN_BATCH\x10\x0e\x12\x1e\n\x1a\x45NTER_LOG_MANAGER_IN_BATCH\x10\x0f\x12\x1b\n\x17\x45NTER_LOG_MANAGER_ORDER\x10\x10\x12\x14\n\x10\x45XIT_LOG_MANAGER\x10\x11\x12\x13\n\x0f\x45NTER_SCHEDULER\x10\x12\x12\x16\n\x12\x45NTER_SCHEDULER_LO\x10\x13\x12\x16\n\x12\x45NTER_LOCK_MANAGER\x10\x14\x12\x15\n\x11\x44\x45\x41\x44LOCK_DETECTED\x10\x15\x12\x0e\n\nDISPATCHED\x10\x16\x12\x13\n\x0f\x44ISPATCHED_FAST\x10\x17\x12\x13\n\x0f\x44ISPATCHED_SLOW\x10\x18\x12\x1e\n\x1a\x44ISPATCHED_SLOW_DEADLOCKED\x10\x19\x12\x10\n\x0c\x45NTER_WORKER\x10\x1a\x12\x14\n\x10GOT_REMOTE_READS\x10\x1b\x12\x1f\n\x1bGOT_REMOTE_READS_DEADLOCKED\x10\x1c\x12\x0f\n\x0b\x45XIT_WORKER\x10\x1d\x12\x14\n\x10RETURN_TO_SERVER\x10\x1e\x12\x19\n\x15\x45XIT_SERVER_TO_CLIENT\x10\x1f\x62\x06proto3'
)
_TRANSACTIONTYPE = _descriptor.EnumDescriptor(
name='TransactionType',
full_name='slog.TransactionType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SINGLE_HOME', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MULTI_HOME_OR_LOCK_ONLY', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1252,
serialized_end=1328,
)
_sym_db.RegisterEnumDescriptor(_TRANSACTIONTYPE)
TransactionType = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONTYPE)
_TRANSACTIONSTATUS = _descriptor.EnumDescriptor(
name='TransactionStatus',
full_name='slog.TransactionStatus',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NOT_STARTED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COMMITTED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ABORTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1330,
serialized_end=1394,
)
_sym_db.RegisterEnumDescriptor(_TRANSACTIONSTATUS)
TransactionStatus = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONSTATUS)
_ABORTCODE = _descriptor.EnumDescriptor(
name='AbortCode',
full_name='slog.AbortCode',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='OTHER', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RATE_LIMITED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESTARTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1396,
serialized_end=1451,
)
_sym_db.RegisterEnumDescriptor(_ABORTCODE)
AbortCode = enum_type_wrapper.EnumTypeWrapper(_ABORTCODE)
_KEYTYPE = _descriptor.EnumDescriptor(
name='KeyType',
full_name='slog.KeyType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='READ', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WRITE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1453,
serialized_end=1483,
)
_sym_db.RegisterEnumDescriptor(_KEYTYPE)
KeyType = enum_type_wrapper.EnumTypeWrapper(_KEYTYPE)
_TRANSACTIONEVENT = _descriptor.EnumDescriptor(
name='TransactionEvent',
full_name='slog.TransactionEvent',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ALL', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_SERVER', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_SERVER_TO_FORWARDER', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_FORWARDER', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_FORWARDER_TO_SEQUENCER', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_FORWARDER_TO_MULTI_HOME_ORDERER', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_MULTI_HOME_ORDERER', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_MULTI_HOME_ORDERER_IN_BATCH', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_MULTI_HOME_ORDERER_IN_BATCH', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_MULTI_HOME_ORDERER', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_SEQUENCER', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_LOCAL_BATCH', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_SEQUENCER_IN_BATCH', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_SEQUENCER_IN_BATCH', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_LOG_MANAGER_IN_BATCH', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_LOG_MANAGER_ORDER', index=16, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_LOG_MANAGER', index=17, number=17,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_SCHEDULER', index=18, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_SCHEDULER_LO', index=19, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_LOCK_MANAGER', index=20, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEADLOCK_DETECTED', index=21, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISPATCHED', index=22, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISPATCHED_FAST', index=23, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISPATCHED_SLOW', index=24, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISPATCHED_SLOW_DEADLOCKED', index=25, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENTER_WORKER', index=26, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOT_REMOTE_READS', index=27, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOT_REMOTE_READS_DEADLOCKED', index=28, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_WORKER', index=29, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RETURN_TO_SERVER', index=30, number=30,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXIT_SERVER_TO_CLIENT', index=31, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1486,
serialized_end=2348,
)
_sym_db.RegisterEnumDescriptor(_TRANSACTIONEVENT)
TransactionEvent = enum_type_wrapper.EnumTypeWrapper(_TRANSACTIONEVENT)
UNKNOWN = 0
SINGLE_HOME = 1
MULTI_HOME_OR_LOCK_ONLY = 2
NOT_STARTED = 0
COMMITTED = 1
ABORTED = 2
OTHER = 0
RATE_LIMITED = 1
RESTARTED = 2
READ = 0
WRITE = 1
ALL = 0
ENTER_SERVER = 1
EXIT_SERVER_TO_FORWARDER = 2
ENTER_FORWARDER = 3
EXIT_FORWARDER_TO_SEQUENCER = 4
EXIT_FORWARDER_TO_MULTI_HOME_ORDERER = 5
ENTER_MULTI_HOME_ORDERER = 6
ENTER_MULTI_HOME_ORDERER_IN_BATCH = 7
EXIT_MULTI_HOME_ORDERER_IN_BATCH = 8
EXIT_MULTI_HOME_ORDERER = 9
ENTER_SEQUENCER = 10
EXPECTED_WAIT_TIME_UNTIL_ENTER_LOCAL_BATCH = 11
ENTER_LOCAL_BATCH = 12
ENTER_SEQUENCER_IN_BATCH = 13
EXIT_SEQUENCER_IN_BATCH = 14
ENTER_LOG_MANAGER_IN_BATCH = 15
ENTER_LOG_MANAGER_ORDER = 16
EXIT_LOG_MANAGER = 17
ENTER_SCHEDULER = 18
ENTER_SCHEDULER_LO = 19
ENTER_LOCK_MANAGER = 20
DEADLOCK_DETECTED = 21
DISPATCHED = 22
DISPATCHED_FAST = 23
DISPATCHED_SLOW = 24
DISPATCHED_SLOW_DEADLOCKED = 25
ENTER_WORKER = 26
GOT_REMOTE_READS = 27
GOT_REMOTE_READS_DEADLOCKED = 28
EXIT_WORKER = 29
RETURN_TO_SERVER = 30
EXIT_SERVER_TO_CLIENT = 31
_MASTERMETADATA = _descriptor.Descriptor(
name='MasterMetadata',
full_name='slog.MasterMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='master', full_name='slog.MasterMetadata.master', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='counter', full_name='slog.MasterMetadata.counter', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=82,
)
_VALUEENTRY = _descriptor.Descriptor(
name='ValueEntry',
full_name='slog.ValueEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='slog.ValueEntry.value', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='new_value', full_name='slog.ValueEntry.new_value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='slog.ValueEntry.type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metadata', full_name='slog.ValueEntry.metadata', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='optional', full_name='slog.ValueEntry.optional',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=85,
serialized_end=214,
)
_KEYVALUEENTRY = _descriptor.Descriptor(
name='KeyValueEntry',
full_name='slog.KeyValueEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='slog.KeyValueEntry.key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value_entry', full_name='slog.KeyValueEntry.value_entry', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=216,
serialized_end=283,
)
_TRANSACTIONEVENTINFO = _descriptor.Descriptor(
name='TransactionEventInfo',
full_name='slog.TransactionEventInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='slog.TransactionEventInfo.event', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time', full_name='slog.TransactionEventInfo.time', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='machine', full_name='slog.TransactionEventInfo.machine', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='home', full_name='slog.TransactionEventInfo.home', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=285,
serialized_end=391,
)
_TRANSACTIONINTERNAL = _descriptor.Descriptor(
name='TransactionInternal',
full_name='slog.TransactionInternal',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='slog.TransactionInternal.id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='slog.TransactionInternal.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='home', full_name='slog.TransactionInternal.home', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='coordinating_server', full_name='slog.TransactionInternal.coordinating_server', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='slog.TransactionInternal.timestamp', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='involved_partitions', full_name='slog.TransactionInternal.involved_partitions', index=5,
number=6, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='active_partitions', full_name='slog.TransactionInternal.active_partitions', index=6,
number=7, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='involved_regions', full_name='slog.TransactionInternal.involved_regions', index=7,
number=8, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='slog.TransactionInternal.events', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mh_depart_from_coordinator_time', full_name='slog.TransactionInternal.mh_depart_from_coordinator_time', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mh_arrive_at_home_time', full_name='slog.TransactionInternal.mh_arrive_at_home_time', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mh_enter_local_batch_time', full_name='slog.TransactionInternal.mh_enter_local_batch_time', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='global_log_positions', full_name='slog.TransactionInternal.global_log_positions', index=12,
number=13, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=394,
serialized_end=790,
)
_REMASTERPROCEDURE = _descriptor.Descriptor(
name='RemasterProcedure',
full_name='slog.RemasterProcedure',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='new_master', full_name='slog.RemasterProcedure.new_master', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_new_master_lock_only', full_name='slog.RemasterProcedure.is_new_master_lock_only', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=792,
serialized_end=864,
)
_PROCEDURE = _descriptor.Descriptor(
name='Procedure',
full_name='slog.Procedure',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='args', full_name='slog.Procedure.args', index=0,
number=1, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=866,
serialized_end=891,
)
_PROCEDURES = _descriptor.Descriptor(
name='Procedures',
full_name='slog.Procedures',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='procedures', full_name='slog.Procedures.procedures', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=893,
serialized_end=942,
)
_TRANSACTION = _descriptor.Descriptor(
name='Transaction',
full_name='slog.Transaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='internal', full_name='slog.Transaction.internal', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='slog.Transaction.code', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remaster', full_name='slog.Transaction.remaster', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keys', full_name='slog.Transaction.keys', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deleted_keys', full_name='slog.Transaction.deleted_keys', index=4,
number=5, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='slog.Transaction.status', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='abort_code', full_name='slog.Transaction.abort_code', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='abort_reason', full_name='slog.Transaction.abort_reason', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='program', full_name='slog.Transaction.program',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=945,
serialized_end=1250,
)
_VALUEENTRY.fields_by_name['type'].enum_type = _KEYTYPE
_VALUEENTRY.fields_by_name['metadata'].message_type = _MASTERMETADATA
_VALUEENTRY.oneofs_by_name['optional'].fields.append(
_VALUEENTRY.fields_by_name['metadata'])
_VALUEENTRY.fields_by_name['metadata'].containing_oneof = _VALUEENTRY.oneofs_by_name['optional']
_KEYVALUEENTRY.fields_by_name['value_entry'].message_type = _VALUEENTRY
_TRANSACTIONEVENTINFO.fields_by_name['event'].enum_type = _TRANSACTIONEVENT
_TRANSACTIONINTERNAL.fields_by_name['type'].enum_type = _TRANSACTIONTYPE
_TRANSACTIONINTERNAL.fields_by_name['events'].message_type = _TRANSACTIONEVENTINFO
_PROCEDURES.fields_by_name['procedures'].message_type = _PROCEDURE
_TRANSACTION.fields_by_name['internal'].message_type = _TRANSACTIONINTERNAL
_TRANSACTION.fields_by_name['code'].message_type = _PROCEDURES
_TRANSACTION.fields_by_name['remaster'].message_type = _REMASTERPROCEDURE
_TRANSACTION.fields_by_name['keys'].message_type = _KEYVALUEENTRY
_TRANSACTION.fields_by_name['status'].enum_type = _TRANSACTIONSTATUS
_TRANSACTION.fields_by_name['abort_code'].enum_type = _ABORTCODE
_TRANSACTION.oneofs_by_name['program'].fields.append(
_TRANSACTION.fields_by_name['code'])
_TRANSACTION.fields_by_name['code'].containing_oneof = _TRANSACTION.oneofs_by_name['program']
_TRANSACTION.oneofs_by_name['program'].fields.append(
_TRANSACTION.fields_by_name['remaster'])
_TRANSACTION.fields_by_name['remaster'].containing_oneof = _TRANSACTION.oneofs_by_name['program']
DESCRIPTOR.message_types_by_name['MasterMetadata'] = _MASTERMETADATA
DESCRIPTOR.message_types_by_name['ValueEntry'] = _VALUEENTRY
DESCRIPTOR.message_types_by_name['KeyValueEntry'] = _KEYVALUEENTRY
DESCRIPTOR.message_types_by_name['TransactionEventInfo'] = _TRANSACTIONEVENTINFO
DESCRIPTOR.message_types_by_name['TransactionInternal'] = _TRANSACTIONINTERNAL
DESCRIPTOR.message_types_by_name['RemasterProcedure'] = _REMASTERPROCEDURE
DESCRIPTOR.message_types_by_name['Procedure'] = _PROCEDURE
DESCRIPTOR.message_types_by_name['Procedures'] = _PROCEDURES
DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION
DESCRIPTOR.enum_types_by_name['TransactionType'] = _TRANSACTIONTYPE
DESCRIPTOR.enum_types_by_name['TransactionStatus'] = _TRANSACTIONSTATUS
DESCRIPTOR.enum_types_by_name['AbortCode'] = _ABORTCODE
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE
DESCRIPTOR.enum_types_by_name['TransactionEvent'] = _TRANSACTIONEVENT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MasterMetadata = _reflection.GeneratedProtocolMessageType('MasterMetadata', (_message.Message,), {
'DESCRIPTOR' : _MASTERMETADATA,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.MasterMetadata)
})
_sym_db.RegisterMessage(MasterMetadata)
ValueEntry = _reflection.GeneratedProtocolMessageType('ValueEntry', (_message.Message,), {
'DESCRIPTOR' : _VALUEENTRY,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.ValueEntry)
})
_sym_db.RegisterMessage(ValueEntry)
KeyValueEntry = _reflection.GeneratedProtocolMessageType('KeyValueEntry', (_message.Message,), {
'DESCRIPTOR' : _KEYVALUEENTRY,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.KeyValueEntry)
})
_sym_db.RegisterMessage(KeyValueEntry)
TransactionEventInfo = _reflection.GeneratedProtocolMessageType('TransactionEventInfo', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONEVENTINFO,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.TransactionEventInfo)
})
_sym_db.RegisterMessage(TransactionEventInfo)
TransactionInternal = _reflection.GeneratedProtocolMessageType('TransactionInternal', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONINTERNAL,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.TransactionInternal)
})
_sym_db.RegisterMessage(TransactionInternal)
RemasterProcedure = _reflection.GeneratedProtocolMessageType('RemasterProcedure', (_message.Message,), {
'DESCRIPTOR' : _REMASTERPROCEDURE,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.RemasterProcedure)
})
_sym_db.RegisterMessage(RemasterProcedure)
Procedure = _reflection.GeneratedProtocolMessageType('Procedure', (_message.Message,), {
'DESCRIPTOR' : _PROCEDURE,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.Procedure)
})
_sym_db.RegisterMessage(Procedure)
Procedures = _reflection.GeneratedProtocolMessageType('Procedures', (_message.Message,), {
'DESCRIPTOR' : _PROCEDURES,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.Procedures)
})
_sym_db.RegisterMessage(Procedures)
Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTION,
'__module__' : 'proto.transaction_pb2'
# @@protoc_insertion_point(class_scope:slog.Transaction)
})
_sym_db.RegisterMessage(Transaction)
# @@protoc_insertion_point(module_scope)
| 42.277259 | 4,079 | 0.759315 |
d5dc3b0ac30486b996b5ad01fe0ad1a247834e86
| 1,411 |
py
|
Python
|
srl/simulation_test.py
|
google/simple-reinforcement-learning
|
9bdac29427cd5c556d7ea7531b807645f043aae3
|
[
"Apache-2.0"
] | 60 |
2017-01-10T06:35:11.000Z
|
2020-12-19T07:33:40.000Z
|
srl/simulation_test.py
|
google/simple-reinforcement-learning
|
9bdac29427cd5c556d7ea7531b807645f043aae3
|
[
"Apache-2.0"
] | null | null | null |
srl/simulation_test.py
|
google/simple-reinforcement-learning
|
9bdac29427cd5c556d7ea7531b807645f043aae3
|
[
"Apache-2.0"
] | 29 |
2017-01-11T22:15:36.000Z
|
2022-03-17T02:17:37.000Z
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
from srl import movement
from srl import simulation
from srl import world
| 32.068182 | 74 | 0.722892 |
d5dc76ad37d386c3045e8ed5404e25dd2364d605
| 26,564 |
py
|
Python
|
src/xmltollvm.py
|
Tejvinder/thesis-ghidra
|
2e59bc48d6bb820ecf6b390e5cf5893fc6ea0216
|
[
"MIT"
] | 101 |
2019-10-22T09:48:19.000Z
|
2022-03-30T07:03:40.000Z
|
src/xmltollvm.py
|
Tejvinder/thesis-ghidra
|
2e59bc48d6bb820ecf6b390e5cf5893fc6ea0216
|
[
"MIT"
] | 4 |
2020-03-06T14:18:47.000Z
|
2021-11-05T04:10:59.000Z
|
src/xmltollvm.py
|
Tejvinder/thesis-ghidra
|
2e59bc48d6bb820ecf6b390e5cf5893fc6ea0216
|
[
"MIT"
] | 15 |
2019-10-22T13:12:39.000Z
|
2022-03-04T20:08:06.000Z
|
from llvmlite import ir
import xml.etree.ElementTree as et
int32 = ir.IntType(32)
int64 = ir.IntType(64)
int1 = ir.IntType(1)
void_type = ir.VoidType()
function_names = []
registers, functions, uniques, extracts = {}, {}, {}, {}
internal_functions = {}
memory = {}
flags = ["ZF", "CF", "OF", "SF"]
pointers = ["RSP", "RIP", "RBP", "EBP", "ESP"]
# noinspection DuplicatedCode
| 49.932331 | 118 | 0.571074 |
d5dc93546bee372b907de208f03583a6f68c3b62
| 925 |
py
|
Python
|
modules/WPSeku/modules/discovery/generic/wplisting.py
|
Farz7/Darkness
|
4f3eb5fee3d8a476d001ad319ca22bca274eeac9
|
[
"MIT"
] | 18 |
2020-04-24T06:50:23.000Z
|
2022-03-14T08:00:38.000Z
|
modules/WPSeku/modules/discovery/generic/wplisting.py
|
Farz7/Darkness
|
4f3eb5fee3d8a476d001ad319ca22bca274eeac9
|
[
"MIT"
] | null | null | null |
modules/WPSeku/modules/discovery/generic/wplisting.py
|
Farz7/Darkness
|
4f3eb5fee3d8a476d001ad319ca22bca274eeac9
|
[
"MIT"
] | 5 |
2020-06-28T16:21:22.000Z
|
2022-01-30T14:17:32.000Z
|
#/usr/bin/env python
# -*- Coding: UTF-8 -*-
#
# WPSeku: Wordpress Security Scanner
#
# @url: https://github.com/m4ll0k/WPSeku
# @author: Momo Outaadi (M4ll0k)
import re
from lib import wphttp
from lib import wpprint
| 24.342105 | 82 | 0.656216 |
d5deac526ab7e57ca5c90998d8462e6ef3d52eff
| 350 |
py
|
Python
|
tw2/jit/widgets/__init__.py
|
toscawidgets/tw2.jit
|
c5e8059975115385f225029ba5c7380673524122
|
[
"MIT"
] | 1 |
2020-01-12T05:11:24.000Z
|
2020-01-12T05:11:24.000Z
|
tw2/jit/widgets/__init__.py
|
toscawidgets/tw2.jit
|
c5e8059975115385f225029ba5c7380673524122
|
[
"MIT"
] | null | null | null |
tw2/jit/widgets/__init__.py
|
toscawidgets/tw2.jit
|
c5e8059975115385f225029ba5c7380673524122
|
[
"MIT"
] | null | null | null |
from tw2.jit.widgets.chart import (AreaChart, BarChart, PieChart)
from tw2.jit.widgets.graph import (ForceDirectedGraph, RadialGraph)
from tw2.jit.widgets.tree import (SpaceTree, HyperTree, Sunburst,
Icicle, TreeMap)
from tw2.jit.widgets.ajax import AjaxRadialGraph
from tw2.jit.widgets.sqla import SQLARadialGraph
| 43.75 | 67 | 0.742857 |
d5dfc52594a99b2ee5b9d8578f257b3fdecb0fcf
| 4,726 |
py
|
Python
|
bot.py
|
tiianprb/TikTok-Downloader-Bot
|
91b6fd64d5a151c3e439772c69850a18b7562ceb
|
[
"MIT"
] | null | null | null |
bot.py
|
tiianprb/TikTok-Downloader-Bot
|
91b6fd64d5a151c3e439772c69850a18b7562ceb
|
[
"MIT"
] | null | null | null |
bot.py
|
tiianprb/TikTok-Downloader-Bot
|
91b6fd64d5a151c3e439772c69850a18b7562ceb
|
[
"MIT"
] | null | null | null |
import json, requests, os, shlex, asyncio, uuid, shutil
from typing import Tuple
from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, CallbackQuery
# Configs
API_HASH = os.environ['API_HASH']
APP_ID = int(os.environ['APP_ID'])
BOT_TOKEN = os.environ['BOT_TOKEN']
downloads = './downloads/{}/'
#Button
START_BUTTONS=[
[
InlineKeyboardButton('Source', url='https://github.com/X-Gorn/TikTokDL'),
InlineKeyboardButton('Project Channel', url='https://t.me/xTeamBots'),
],
[InlineKeyboardButton('Author', url='https://t.me/xgorn')],
]
DL_BUTTONS=[
[
InlineKeyboardButton('No Watermark', callback_data='nowm'),
InlineKeyboardButton('Watermark', callback_data='wm'),
],
[InlineKeyboardButton('Audio', callback_data='audio')],
]
# Running bot
xbot = Client('TikTokDL', api_id=APP_ID, api_hash=API_HASH, bot_token=BOT_TOKEN)
# Helpers
# Thanks to FridayUB
# Start
# Downloader for tiktok
# Callbacks
xbot.run()
| 33.757143 | 152 | 0.643039 |
d5e12ba6cbfd755e451e70540ba00bbbd7d6bc8c
| 24,254 |
py
|
Python
|
frontend-gui/rpanel.py
|
skyu0221/660-iot
|
d31f973c93871bfa8122f1b83364d0147d402e9e
|
[
"Apache-2.0"
] | null | null | null |
frontend-gui/rpanel.py
|
skyu0221/660-iot
|
d31f973c93871bfa8122f1b83364d0147d402e9e
|
[
"Apache-2.0"
] | 8 |
2021-03-19T01:36:06.000Z
|
2022-03-12T00:22:43.000Z
|
frontend-gui/rpanel.py
|
skyu0221/660-iot
|
d31f973c93871bfa8122f1b83364d0147d402e9e
|
[
"Apache-2.0"
] | null | null | null |
import wx
import wx.adv
import random
import util
import config
import time
import datetime
import threading
import requests
import json
from functools import partial
| 34.599144 | 158 | 0.580564 |
d5e19c75c00ba0d6d2f1c4a0eb15f229a98c4904
| 7,259 |
py
|
Python
|
webapp/search.py
|
henchan/memfinity
|
3860985e29b203f0569f60eea68ffb22aaf34b1f
|
[
"MIT"
] | null | null | null |
webapp/search.py
|
henchan/memfinity
|
3860985e29b203f0569f60eea68ffb22aaf34b1f
|
[
"MIT"
] | null | null | null |
webapp/search.py
|
henchan/memfinity
|
3860985e29b203f0569f60eea68ffb22aaf34b1f
|
[
"MIT"
] | null | null | null |
"""High-level search API.
This module implements application-specific search semantics on top of
App Engine's search API. There are two chief operations: querying for
entities, and managing entities in the search facility.
Add and remove Card entities in the search facility:
insert_cards([models.Card])
delete_cards([models.Card])
Query for Card entities:
query_cards(query_string, limit=20) -> search.SearchResults
The results items will have the following fields:
user_key, user_nickname, front, back, info, tag (repeated), added,
modified, source_url
The query_string is free-form, as a user would enter it, and passes
through a custom query processor before the query is submitted to App
Engine. Notably, pass @username to restrict the query to entities
authored by username, and #tag to restrict the query to only documents
matching the given tag. Multiple @usernames or #tags result in an OR
query.
"""
import re
from google.appengine.api import search
from google.appengine.ext import ndb
QUERY_LIMIT = 20
CARD_INDEX_NAME = 'cards'
# Increase this value when _card2doc changes its format so that
# queries can determine the data available on returned documents.
CARD_DOCUMENT_VERSION = '1'
# Ensure we're under the 2000 character limit from
# https://developers.google.com/appengine/docs/python/search/query_strings
MAX_QUERY_LEN = 200
# TODO(chris): it would be better if this module didn't know about
# specific entity types, but instead defined a protocol to get
# metadata from an entity and generate a document.
def insert_cards(cards):
"""Insert or update models.Card entities in the search facility."""
# TODO(chris): should we allow more than 200 cards per call?
assert len(cards) <= 200, len(cards)
card_docs = map(_card2doc, cards)
index = search.Index(name=CARD_INDEX_NAME)
index.put(card_docs)
def delete_cards(cards):
"""Delete models.Card entities from the search facility."""
index = search.Index(name=CARD_INDEX_NAME)
card_doc_ids = map(_card2docid, cards)
index.delete(card_doc_ids)
def query_cards(query_str, limit=QUERY_LIMIT, web_safe_cursor=None,
ids_only=False, user_key=None):
"""Return the search.SearchResults for a query.
ids_only is useful because the returned document IDs are url-safe
keys for models.Card entities.
"""
if web_safe_cursor:
cursor = search.Cursor(web_safe_string=web_safe_cursor)
else:
cursor = None
index = search.Index(name=CARD_INDEX_NAME)
query_processor = _QueryProcessor(
query_str,
name_field='user_nickname',
tag_field='tag',
private_field='private',
user_key_field='user_key',
query_options=search.QueryOptions(limit=limit, cursor=cursor,
ids_only=ids_only),
user_key=user_key)
search_results = index.search(query_processor.query())
# TODO(chris): should this return partially-instantiated
# models.Card instances instead of leaking implementation details
# like we do now?
return search_results
| 37.035714 | 81 | 0.667998 |
d5e280ff84ed8b441621c5c137faf53691c8d37c
| 3,422 |
py
|
Python
|
Bot/Bot/board.py
|
Baidi96/AI-Agent-for-Light-Rider
|
6ae0cd4ea07248751c0f015ed74123ae3dec33d1
|
[
"MIT"
] | 1 |
2019-12-18T08:24:22.000Z
|
2019-12-18T08:24:22.000Z
|
Bot/Bot/board.py
|
Baidi96/AI-Agent-for-Light-Rider
|
6ae0cd4ea07248751c0f015ed74123ae3dec33d1
|
[
"MIT"
] | null | null | null |
Bot/Bot/board.py
|
Baidi96/AI-Agent-for-Light-Rider
|
6ae0cd4ea07248751c0f015ed74123ae3dec33d1
|
[
"MIT"
] | null | null | null |
import copy
import sys
PLAYER1, PLAYER2, EMPTY, BLOCKED = [0, 1, 2, 3]
S_PLAYER1, S_PLAYER2, S_EMPTY, S_BLOCKED, = ['0', '1', '.', 'x']
CHARTABLE = [(PLAYER1, S_PLAYER1), (PLAYER2, S_PLAYER2), (EMPTY, S_EMPTY), (BLOCKED, S_BLOCKED)]
DIRS = [
((-1, 0), "up"),
((1, 0), "down"),
((0, 1), "right"),
((0, -1), "left")
]
#the information of the whole grid
| 29.247863 | 124 | 0.504383 |
d5e2b128cd1d2cb827ad4460d329a4ebc4a12998
| 884 |
py
|
Python
|
baekjoon/1012.py
|
wonnerky/coteMaster
|
360e491e6342c1ee42ff49750b838a2ead865613
|
[
"Apache-2.0"
] | null | null | null |
baekjoon/1012.py
|
wonnerky/coteMaster
|
360e491e6342c1ee42ff49750b838a2ead865613
|
[
"Apache-2.0"
] | null | null | null |
baekjoon/1012.py
|
wonnerky/coteMaster
|
360e491e6342c1ee42ff49750b838a2ead865613
|
[
"Apache-2.0"
] | null | null | null |
import sys
sys.setrecursionlimit(10000)
T = int(input())
for _ in range(T):
M, N, K = map(int, input().split())
board = [[0] * M for _ in range(N)]
for _ in range(K):
c, r = map(int, input().split())
board[r][c] = 1
visit = [[False] * M for _ in range(N)]
cnt = 0
for r in range(N):
for c in range(M):
if not visit[r][c] and board[r][c] == 1:
cnt += 1
dfs(r, c)
for ele in visit:
print(ele)
print()
print(cnt)
| 27.625 | 90 | 0.417421 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.