hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b97f4f2077af2e6d4198d160e8fea133c49dee89
| 4,187 |
py
|
Python
|
pyecharts/custom/grid.py
|
zilong305/pycharts
|
6cf1bb7f17001a36da6a766615a78b1dbef5918f
|
[
"MIT"
] | null | null | null |
pyecharts/custom/grid.py
|
zilong305/pycharts
|
6cf1bb7f17001a36da6a766615a78b1dbef5918f
|
[
"MIT"
] | null | null | null |
pyecharts/custom/grid.py
|
zilong305/pycharts
|
6cf1bb7f17001a36da6a766615a78b1dbef5918f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding=utf-8
from pyecharts.option import grid
| 31.961832 | 100 | 0.540482 |
b97f78c59a8296809ae879f2d6f8355b0f8c52d0
| 4,588 |
py
|
Python
|
smooch/conversations.py
|
devinmcgloin/smooch
|
c9561c3e7f1546efc58daa472b70f738d0d35e13
|
[
"MIT"
] | 3 |
2016-07-04T12:02:03.000Z
|
2017-03-20T19:39:36.000Z
|
smooch/conversations.py
|
devinmcgloin/smooch
|
c9561c3e7f1546efc58daa472b70f738d0d35e13
|
[
"MIT"
] | 41 |
2019-05-28T09:54:04.000Z
|
2020-02-20T05:34:19.000Z
|
smooch/conversations.py
|
devinmcgloin/smooch
|
c9561c3e7f1546efc58daa472b70f738d0d35e13
|
[
"MIT"
] | 2 |
2016-07-20T14:31:45.000Z
|
2016-11-18T12:19:38.000Z
|
import logging
from .endpoint import ask
def request_payment(user_id, message, options):
"""Note that amount is a integer which specifies the amount of cents in the transaction
Smooch will default to the currency specified in your account settings."""
if not valid_args(user_id, message, options):
logging.warning("request payment called with invalid args user_id={} message={} options={}"
.format(user_id, message, options))
return
role = "appMaker"
buttons = []
for short_text, result in options:
buttons.append({
"type": "buy",
"text": short_text,
"amount": result})
data = {"text": message,
"role": role,
"actions": buttons}
return ask('appusers/{0}/conversation/messages'.format(user_id),
data,
'post')
def send_links(user_id, message, options):
"""Sends a series of links. The options field is a dictionary in which the keys are
descriptions and values uris"""
if not valid_args(user_id, message, options):
logging.warning("send links called with invalid args user_id={} message={} options={}"
.format(user_id, message, options))
return
role = "appMaker"
buttons = []
for short_text, result in options:
buttons.append({
"type": "link",
"text": short_text,
"uri": result})
data = {"text": message,
"role": role,
"actions": buttons}
return ask('appusers/{0}/conversation/messages'.format(user_id),
data,
'post')
def send_postbacks(user_id, message, options):
"""Sends a series of options that you can listen for on your webhook. The options field is a dictionary in which the keys are
descriptions and values the postback payload. You need to set up a webhook to listen for the postback."""
if not valid_args(user_id, message, options):
logging.warning("send postback called with invalid args user_id={} message={} options={}"
.format(user_id, message, options))
return
role = "appMaker"
buttons = []
for short_text, result in options:
buttons.append({
"type": "postback",
"text": short_text,
"payload": result
})
data = {"text": message,
"role": role,
"actions": buttons}
return ask('appusers/{0}/conversation/messages'.format(user_id),
data,
'post')
def send_buttons(user_id, message, options):
"""Options is a list of tuples in which the first element is the type of the button,
second the short text, and third the result for the specified type."""
if not valid_args(user_id, message, options):
logging.warning("send buttons called with invalid args user_id={} message={} options={}"
.format(user_id, message, options))
return
role = "appMaker"
buttons = []
for text, kind, result in options:
buttons.append({
"type": kind,
"text": text,
"payload": result
})
data = {"text": message,
"role": role,
"actions": buttons}
return ask('appusers/{0}/conversation/messages'.format(user_id),
data,
'post')
| 30.586667 | 129 | 0.598954 |
b980ab008a2dab6e2778edec1d7d9e24b2315a73
| 1,086 |
py
|
Python
|
cifar/evalit.py
|
Sharkbyteprojects/IRIS-ML_and_Deep-Learning
|
f0e053cf7a0e69019bbba36e6da3e60d76105fe9
|
[
"MIT"
] | null | null | null |
cifar/evalit.py
|
Sharkbyteprojects/IRIS-ML_and_Deep-Learning
|
f0e053cf7a0e69019bbba36e6da3e60d76105fe9
|
[
"MIT"
] | null | null | null |
cifar/evalit.py
|
Sharkbyteprojects/IRIS-ML_and_Deep-Learning
|
f0e053cf7a0e69019bbba36e6da3e60d76105fe9
|
[
"MIT"
] | null | null | null |
import keras
from keras.models import load_model
from PIL import Image
import matplotlib.pylab as plt
import numpy as np
import zipfile
print("Extract")
zip_ref = zipfile.ZipFile("./asset.zip", 'r')
zip_ref.extractall(".")
zip_ref.close()
print("Load Model")
model=load_model("cifar-model.h5")
CIFAR_10_CLASSES=["Plane","Car","bird","cat","deer","dog","frog","horse","ship","truck"]
print("START TEST")
calc("lkw-image.jpg")
calc("cat.jpg")
calc("frog.jpg")
calc("fog.jpg")
calc("lfog.jpg")
calc("d.jpg")
calc("b.jpg")
calc("bs.jpg")
calc("plapper.jpg")
calc("ds.jpg")
print("Complete")
print("End")
quit(0)
| 27.15 | 88 | 0.710866 |
b980be1e0d2b8db749e25a4f49c35cdddbdca9d9
| 1,650 |
py
|
Python
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | 1 |
2021-05-09T08:18:49.000Z
|
2021-05-09T08:18:49.000Z
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | 9 |
2021-03-19T01:11:35.000Z
|
2022-03-12T00:20:13.000Z
|
tt/urls.py
|
samiksha-patil/Knowledge-Sharing-Platform
|
22e61a659d5ad63fe656fa639dc897cbdebad4fe
|
[
"bzip2-1.0.6"
] | null | null | null |
"""
tt URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# Uncomment next two lines to enable admin:
from django.contrib import admin
from django.urls import path, include
from users import views as user_views
from django.contrib.auth import views as auth_views
from upload import views as upload_views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# Uncomment the next line to enable the admin:
path('admin/', admin.site.urls),
path('', include('blog.urls')),
path('register/', user_views.register, name='register'),
path('login/',auth_views.LoginView.as_view(template_name='users/login.html'),name='login'),
path('logout/',auth_views.LogoutView.as_view(template_name='users/logout.html') ,name='logout'),
path('profile/', user_views.profile, name='profile'),
path('book/',upload_views.book_list,name='book_list'),
path('book/upload',upload_views.upload_book,name='upload_book'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 35.869565 | 100 | 0.726061 |
b9814171798d1f2ddf5247c67182a7e7e032132e
| 105 |
py
|
Python
|
src/git/cmd.py
|
danihodovic/dht
|
636f54d70f8c6ca60ab48f2815b3e9e1a336d78f
|
[
"MIT"
] | 2 |
2021-01-21T15:04:32.000Z
|
2021-01-21T16:23:32.000Z
|
src/git/cmd.py
|
danihodovic/dht
|
636f54d70f8c6ca60ab48f2815b3e9e1a336d78f
|
[
"MIT"
] | 2 |
2020-12-30T20:34:51.000Z
|
2021-01-17T20:02:02.000Z
|
src/git/cmd.py
|
danihodovic/dht
|
636f54d70f8c6ca60ab48f2815b3e9e1a336d78f
|
[
"MIT"
] | null | null | null |
import os
import click
os.environ["GIT_PYTHON_REFRESH"] = "quiet"
| 9.545455 | 42 | 0.67619 |
b98238142a5e4442e3c9fdd220f6bde9274299de
| 570 |
py
|
Python
|
TwitterImage2JPG.py
|
Tymec/Playground
|
5a4aaa4a88e084d8d31803485b1ec521ad49a3d1
|
[
"MIT"
] | null | null | null |
TwitterImage2JPG.py
|
Tymec/Playground
|
5a4aaa4a88e084d8d31803485b1ec521ad49a3d1
|
[
"MIT"
] | null | null | null |
TwitterImage2JPG.py
|
Tymec/Playground
|
5a4aaa4a88e084d8d31803485b1ec521ad49a3d1
|
[
"MIT"
] | 1 |
2019-02-19T10:32:07.000Z
|
2019-02-19T10:32:07.000Z
|
import glob
import os
if __name__ == __name__:
main()
| 22.8 | 67 | 0.585965 |
b982943f0b8c226209550f8c7f62a0e03d0b5ff5
| 6,405 |
py
|
Python
|
Data Analysis/classification.py
|
Riccardo95Facchini/DIL-2019
|
febeda55fd647943a1b8c49b3c5192fcd69fdaf5
|
[
"MIT"
] | null | null | null |
Data Analysis/classification.py
|
Riccardo95Facchini/DIL-2019
|
febeda55fd647943a1b8c49b3c5192fcd69fdaf5
|
[
"MIT"
] | null | null | null |
Data Analysis/classification.py
|
Riccardo95Facchini/DIL-2019
|
febeda55fd647943a1b8c49b3c5192fcd69fdaf5
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.metrics import classification_report
#EVERY TIME THE DATASET IS RETRIEVED FROM GITHUB
input_file = 'https://raw.githubusercontent.com/lcphy/Digital-Innovation-Lab/master/bank-full.csv'
dataset = pd.read_csv(input_file, sep=';', header = 0)
dataset.head()
#DELETE NEXT CALLS DATA
dataset = dataset.drop("contact", axis=1)
dataset = dataset.drop("day", axis=1)
dataset = dataset.drop("month", axis=1)
dataset = dataset.drop("duration", axis=1)
dataset = dataset.drop("campaign", axis=1)
dataset = dataset.drop("pdays", axis=1)
dataset = dataset.drop("previous", axis=1)
dataset = dataset.drop("poutcome", axis=1)
dataset.head()
#FEATURE ENGINEERING
cleanup_nums = {"marital": {"married": 1, "single": 0, "divorced":-1},
"education": {"primary": 1, "secondary": 2, "tertiary": 3},
"default": {"yes": 1, "no": 0},
"housing": {"yes": 1, "no": 0},
"loan": {"yes": 1, "no": 0},
"y": {"yes": 1, "no": 0}}
dataset.replace(cleanup_nums, inplace=True)
dataset.head()
dataset.dtypes
dataset = dataset[dataset.job != 'unknown']
dataset = dataset[dataset.education != 'unknown']
dataset['education'] = dataset['education'].astype(int)
#COLLERATION MATRIX
plt.figure(figsize=(12,10))
cor = dataset.corr()
sns.heatmap(cor, annot=True, cmap=plt.cm.Reds)
plt.show()
#CLASSIFIFICATION
X = dataset.iloc[:, 0:7]
y = dataset.iloc[:, 7]
X = pd.get_dummies(X, columns=["job"], prefix=["job"])
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25)
#DECISION TREE
from sklearn import tree
from sklearn.tree import DecisionTreeClassifier
clf_dt = DecisionTreeClassifier()
clt_dt = clf_dt.fit(X_train,y_train)
esito = clf_dt.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, esito,target_names=target_names))
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, esito)
print(cm)
plt.hist(esito)
#RANDOM FOREST
from sklearn.ensemble import RandomForestClassifier
clf_dt = RandomForestClassifier()
clt_dt = clf_dt.fit(X_train,y_train)
esito = clf_dt.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, esito,target_names=target_names))
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, esito)
print(cm)
plt.hist(esito)
# K-NEAREST NEIGHBOURS
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# TRAINING - TEST
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
# SCALING
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# FITTING
from sklearn.neighbors import KNeighborsClassifier
classifier = KNeighborsClassifier(n_neighbors = 5, metric = 'minkowski', p = 2)
classifier.fit(X_train, y_train)
# PREDICTION
y_pred = classifier.predict(X_test)
# CONFUSION MATRIX
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, y_pred,target_names=target_names))
print(cm)
plt.hist(y_pred)
#UNDERSAMPLING
from sklearn.utils import resample
dataset_sample = pd.get_dummies(dataset, columns=["job"], prefix=["job"])
#SPLIT FEATURE AND TARGET
y = dataset_sample.y
X = dataset_sample.drop('y', axis=1)
#TRAIN TEST
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
X = pd.concat([X_train, y_train], axis=1)
#SELECTING TARGET CLASSES
not_sub = X[X.y==0]
sub = X[X.y==1]
not_sub_downsampled = resample(not_sub,
replace = False,
n_samples = len(sub),
random_state = 27)
# COMBINE MINORITY AND DOWNSAMPLED MAJORITY
downsampled = pd.concat([not_sub_downsampled, sub])
#DECISION TREE
y_train = downsampled.y
X_train = downsampled.drop('y', axis=1)
clf_dt = DecisionTreeClassifier()
clt_dt = clf_dt.fit(X_train,y_train)
esito = clf_dt.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, esito,target_names=target_names))
#RANDOM FOREST
y_train = downsampled.y
X_train = downsampled.drop('y', axis=1)
clf_dt = RandomForestClassifier()
clt_dt = clf_dt.fit(X_train,y_train)
esito = clf_dt.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, esito,target_names=target_names))
#SMOTE - DECISION TREE
from imblearn.over_sampling import SMOTE
#SPLIT FEATURE TARGET
y = dataset_sample.y
X = dataset_sample.drop('y', axis=1)
#TRAIN TEST
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
#SMOTE
sm = SMOTE(random_state=27, ratio=1.0)
X_train, y_train = sm.fit_sample(X_train, y_train)
clf_dt = DecisionTreeClassifier()
#FIT
smote = clf_dt.fit(X_train,y_train)
#PREDICITON
smote_pred = smote.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, smote_pred,target_names=target_names))
#SMOTE - RANDOM FOREST
from imblearn.over_sampling import SMOTE
y = dataset_sample.y
X = dataset_sample.drop('y', axis=1)
# setting up testing and training sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
sm = SMOTE(random_state=27, ratio=1.0)
X_train, y_train = sm.fit_sample(X_train, y_train)
clf_dt = RandomForestClassifier()
smote = clf_dt.fit(X_train,y_train)
smote_pred = smote.predict(X_test)
target_names = ['NOT-sub', 'Subscribed']
print(classification_report(y_test, smote_pred,target_names=target_names))
#RECAP on RECALL
x = np.arange(3)
plt.bar(x-0.2, [31,65,37], width=0.2, color='b', align='center', label='DT')
plt.bar(x, [18,61,32], width=0.2, color='r', align='center', label='RF')
plt.xticks(x-0.1, ['Normal','Under','Smote'])
plt.legend(loc='upper right')
#RECAP on F1
x = np.arange(3)
plt.bar(x-0.2, [31,26,32], width=0.2, color='b', align='center', label='DT')
plt.bar(x, [24,28,31], width=0.2, color='r', align='center', label='RF')
plt.xticks(x-0.1, ['Normal','Under','Smote'])
plt.legend(loc='lower right')
| 25.722892 | 98 | 0.721624 |
b982c2b4e976b723dfa3208c1bc1e4ea51b77ac9
| 5,562 |
py
|
Python
|
tools/c7n_azure/tests/test_route_table.py
|
anastasiia-zolochevska/cloud-custodian
|
f25315a01bec808c16ab0e2d433d6151cf5769e4
|
[
"Apache-2.0"
] | 2 |
2020-01-20T19:46:28.000Z
|
2020-08-19T14:20:27.000Z
|
tools/c7n_azure/tests/test_route_table.py
|
anastasiia-zolochevska/cloud-custodian
|
f25315a01bec808c16ab0e2d433d6151cf5769e4
|
[
"Apache-2.0"
] | 79 |
2019-03-20T12:27:06.000Z
|
2019-08-14T14:07:04.000Z
|
tools/c7n_azure/tests/test_route_table.py
|
anastasiia-zolochevska/cloud-custodian
|
f25315a01bec808c16ab0e2d433d6151cf5769e4
|
[
"Apache-2.0"
] | 2 |
2019-04-22T15:20:23.000Z
|
2019-08-27T12:37:51.000Z
|
# Copyright 2015-2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from azure_common import BaseTest, arm_template
| 35.426752 | 95 | 0.538655 |
b98531b0567b9e4719006397ec461d3fa4999e4b
| 11,730 |
py
|
Python
|
proto/tp_artifact_1.0/build/lib/sawtooth_artifact/processor/handler.py
|
pkthein/sparts_all_fam
|
ff162e4ea8c3919a197dc0cc13fde6b32da113c7
|
[
"Apache-2.0"
] | 1 |
2019-04-03T18:31:36.000Z
|
2019-04-03T18:31:36.000Z
|
proto/tp_artifact_1.0/build/lib/sawtooth_artifact/processor/handler.py
|
pkthein/sparts_all_fam
|
ff162e4ea8c3919a197dc0cc13fde6b32da113c7
|
[
"Apache-2.0"
] | null | null | null |
proto/tp_artifact_1.0/build/lib/sawtooth_artifact/processor/handler.py
|
pkthein/sparts_all_fam
|
ff162e4ea8c3919a197dc0cc13fde6b32da113c7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Intel Corporation
# Copyright 2017 Wind River
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
################################################################################
# LIBRARIES & DEPENDENCIES #
################################################################################
import hashlib
import logging
import json
from collections import OrderedDict
from sawtooth_sdk.processor.exceptions import InvalidTransaction
from sawtooth_sdk.processor.exceptions import InternalError
from sawtooth_sdk.processor.handler import TransactionHandler
LOGGER = logging.getLogger(__name__)
################################################################################
# HANDLER OBJ #
################################################################################
################################################################################
# HELPER FUNCTIONS #
################################################################################
def create_artifact(artifact_id, artifact_alias, artifact_name, artifact_type,
artifact_checksum, artifact_label, artifact_openchain,
prev, cur, timestamp, artifact_list=[], uri_list=[]):
"""
Constructs the payload to be stored in the state storage.
Args:
artifact_uuid (str): The uuid of the artifact
artifact_alias (str): The alias of the artifact
artifact_name (str): The name of the artifact
artifact_type (str): The type of the artifact
artifact_checksum (str): The checksum of the artifact
artifact_label (str): The label of the artifact
artifact_openchain (str): The openchain of the artifact
prev (str): The previous block id of the transaction (default "0")
cur (str): the current block id of the transaction
timestamp (str): The UTC time for when the transaction was submitted
artifact_list (list of dict):
The list of the artifact uuid associated with the artifact
(default [])
uri_list (list of dict):
The list of the uri associated with the artifact (default [])
Returns:
type: dict
The dictionary pertaining all the param is created and returned to
be stored on the state storage.
"""
return {
"uuid" : artifact_id,
"alias" : artifact_alias,
"name" : artifact_name,
"content_type" : artifact_type,
"checksum" : artifact_checksum,
"label" : artifact_label,
"openchain" : artifact_openchain,
"prev_block" : prev,
"cur_block" : cur,
"timestamp" : timestamp,
"artifact_list" : artifact_list,
"uri_list" : uri_list
}
def validate_transaction(artifact_id, action):
"""
Performs soft sanity check in order to improve runtime by eliminating the
obvious exception errors.
Args:
artifact_id (str): The uuid of the artifact
action (str): The command to be performed
Raises:
InvalidTransaction:
If the uuid or the action are not passed in or the
action is not a valid action.
"""
if not artifact_id:
raise InvalidTransaction("Artifact ID is required")
if not action:
raise InvalidTransaction("Action is required")
if action not in ("AddArtifact", "create", "AddURI", "amend"):
raise InvalidTransaction("Invalid action: {}".format(action))
def make_artifact_address(namespace_prefix, artifact_id):
"""
Creates an artifact address which will be used to recover the associated
UUID if the artifact already exists in the state storage; or, used as a key to
store the new data into the state storage.
Args:
namespace_prefix (str):
The prefix associating with the transaction family
artifact_id (str): The uuid of the artifact
Returns:
type: str
The address-to-be, which associates the uuid and the namespace prefix.
"""
return namespace_prefix + \
hashlib.sha512(artifact_id.encode("utf-8")).hexdigest()[:64]
def _display(msg):
"""
Logs the message to the debug logger.
Args:
msg (str): The message that is to be logged into the debug logger
"""
n = msg.count("\n")
if n > 0:
msg = msg.split("\n")
length = max(len(line) for line in msg)
else:
length = len(msg)
msg = [msg]
LOGGER.debug("+" + (length + 2) * "-" + "+")
for line in msg:
LOGGER.debug("+ " + line.center(length) + " +")
LOGGER.debug("+" + (length + 2) * "-" + "+")
################################################################################
# #
################################################################################
| 39.897959 | 82 | 0.521313 |
b9877d896f97460bc5a35787da6277925368bc9f
| 764 |
py
|
Python
|
ReviewsCollector.py
|
fsandx/moodybooks
|
5c13fe43849e4fa861a163c74411e9f796518bc9
|
[
"MIT"
] | null | null | null |
ReviewsCollector.py
|
fsandx/moodybooks
|
5c13fe43849e4fa861a163c74411e9f796518bc9
|
[
"MIT"
] | null | null | null |
ReviewsCollector.py
|
fsandx/moodybooks
|
5c13fe43849e4fa861a163c74411e9f796518bc9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
STEP 2
Takes the list of urls in the json files and downloads the html files to local drive
Start with: scrapy runspider ReviewsCollector.py
"""
import scrapy
import json
| 29.384615 | 124 | 0.611257 |
b9887b38cf06939bc8dd710e9861e2366862482a
| 3,120 |
py
|
Python
|
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | 16 |
2021-11-29T03:05:31.000Z
|
2022-01-19T05:32:45.000Z
|
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | null | null | null |
firelight/interfaces/light.py
|
roshie548/firelight
|
3a5af5e2a1e5784127baebcf1517ffddcaff4062
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from .color import Color
| 32.842105 | 77 | 0.641026 |
b98b6f0b6e5f35ef44fd272ec1f3a99b4d72acf0
| 1,293 |
py
|
Python
|
PolymorphismPYTHON/Polypy.py
|
cadeng23/oop-cjgustafson
|
cd3e5ca0e37f8b00a80516c6c8d5d6789a77d9a8
|
[
"MIT"
] | null | null | null |
PolymorphismPYTHON/Polypy.py
|
cadeng23/oop-cjgustafson
|
cd3e5ca0e37f8b00a80516c6c8d5d6789a77d9a8
|
[
"MIT"
] | null | null | null |
PolymorphismPYTHON/Polypy.py
|
cadeng23/oop-cjgustafson
|
cd3e5ca0e37f8b00a80516c6c8d5d6789a77d9a8
|
[
"MIT"
] | null | null | null |
import random
Daughter = Family('Ashley', 'Smith', 'Brown')
Son = Family('Kevin', 'Smith', 'Brown')
print(Daughter.eyes)
print(Son.eyes)
#with the kids being born it will define what color hair and eyes
# they may randomly get through inheritance
| 23.089286 | 66 | 0.618716 |
b98c3a1636cff18e5244db1f52b8e6e89e2c99b5
| 1,494 |
py
|
Python
|
homeassistant/components/device_tracker/owntracks.py
|
evancohen/home-assistant
|
dafc0ced6b07025c03417d8e7a2c0133b4c622fc
|
[
"MIT"
] | 14 |
2015-11-10T07:57:43.000Z
|
2021-08-29T13:45:26.000Z
|
homeassistant/components/device_tracker/owntracks.py
|
evancohen/home-assistant
|
dafc0ced6b07025c03417d8e7a2c0133b4c622fc
|
[
"MIT"
] | null | null | null |
homeassistant/components/device_tracker/owntracks.py
|
evancohen/home-assistant
|
dafc0ced6b07025c03417d8e7a2c0133b4c622fc
|
[
"MIT"
] | 8 |
2015-11-14T16:40:41.000Z
|
2020-02-17T19:48:08.000Z
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks/
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
| 27.666667 | 74 | 0.582999 |
b98c6a6e2a07073f4614093d6ae5d6469afd6835
| 48,027 |
py
|
Python
|
src/models/end_to_end_event_coreference.py
|
luyaojie/E3C
|
4b2f33da4629211fd6a3738077794f821c7f7c8b
|
[
"MIT"
] | 2 |
2022-02-20T15:13:11.000Z
|
2022-03-22T03:47:21.000Z
|
src/models/end_to_end_event_coreference.py
|
luyaojie/E3C
|
4b2f33da4629211fd6a3738077794f821c7f7c8b
|
[
"MIT"
] | null | null | null |
src/models/end_to_end_event_coreference.py
|
luyaojie/E3C
|
4b2f33da4629211fd6a3738077794f821c7f7c8b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Created by Roger on 2019-09-10
# Mostly by AllenNLP
import logging
import math
from typing import Any, Dict, List, Optional, Tuple
import torch
import torch.nn.functional as F
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import FeedForward, Pruner
from allennlp.modules import Seq2SeqEncoder, TimeDistributed, TextFieldEmbedder
from allennlp.modules.seq2seq_encoders import IntraSentenceAttentionEncoder
from allennlp.modules.similarity_functions import DotProductSimilarity
from allennlp.modules.span_extractors import SelfAttentiveSpanExtractor, EndpointSpanExtractor
from allennlp.modules.token_embedders import Embedding
from allennlp.nn import util, InitializerApplicator, RegularizerApplicator
from allennlp.training.metrics import Average
from overrides import overrides
from torch.nn import BCEWithLogitsLoss
from src.metrics.event_coref_scores import EventCorefScores
from src.metrics.mention_f1 import TopSpanMentionTypeF1
from src.utils.cluster_decoding_utils import node_decode
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def _generate_valid_antecedents(num_spans_to_keep: int,
max_antecedents: int,
device: int) -> Tuple[torch.IntTensor,
torch.IntTensor,
torch.FloatTensor]:
"""
This method generates possible antecedents per span which survived the pruning
stage. This procedure is `generic across the batch`. The reason this is the case is
that each span in a batch can be coreferent with any previous span, but here we
are computing the possible `indices` of these spans. So, regardless of the batch,
the 1st span _cannot_ have any antecedents, because there are none to select from.
Similarly, each element can only predict previous spans, so this returns a matrix
of shape (num_spans_to_keep, max_antecedents), where the (i,j)-th index is equal to
(i - 1) - j if j <= i, or zero otherwise.
Parameters
----------
num_spans_to_keep : ``int``, required.
The number of spans that were kept while pruning.
max_antecedents : ``int``, required.
The maximum number of antecedent spans to consider for every span.
device: ``int``, required.
The CUDA device to use.
Returns
-------
valid_antecedent_indices : ``torch.IntTensor``
The indices of every antecedent to consider with respect to the top k spans.
Has shape ``(num_spans_to_keep, max_antecedents)``.
valid_antecedent_offsets : ``torch.IntTensor``
The distance between the span and each of its antecedents in terms of the number
of considered spans (i.e not the word distance between the spans).
Has shape ``(1, max_antecedents)``.
valid_antecedent_log_mask : ``torch.FloatTensor``
The logged mask representing whether each antecedent span is valid. Required since
different spans have different numbers of valid antecedents. For example, the first
span in the document should have no valid antecedents.
Has shape ``(1, num_spans_to_keep, max_antecedents)``.
"""
# Shape: (num_spans_to_keep, 1)
target_indices = util.get_range_vector(num_spans_to_keep, device).unsqueeze(1)
# Shape: (1, max_antecedents)
valid_antecedent_offsets = (util.get_range_vector(max_antecedents, device) + 1).unsqueeze(0)
# This is a broadcasted subtraction.
# Shape: (num_spans_to_keep, max_antecedents)
raw_antecedent_indices = target_indices - valid_antecedent_offsets
# In our matrix of indices, the upper triangular part will be negative
# because the offsets will be > the target indices. We want to mask these,
# because these are exactly the indices which we don't want to predict, per span.
# We're generating a logspace mask here because we will eventually create a
# distribution over these indices, so we need the 0 elements of the mask to be -inf
# in order to not mess up the normalisation of the distribution.
# Shape: (1, num_spans_to_keep, max_antecedents)
valid_antecedent_log_mask = (raw_antecedent_indices >= 0).float().unsqueeze(0).log()
# Shape: (num_spans_to_keep, max_antecedents)
valid_antecedent_indices = F.relu(raw_antecedent_indices.float()).long()
return valid_antecedent_indices, valid_antecedent_offsets, valid_antecedent_log_mask
| 54.514188 | 134 | 0.629271 |
b98ccbb0c859fdccad6b30924e5845122d497aa5
| 1,964 |
py
|
Python
|
week2/7litersProblem.py
|
vietanhtran2710/ArtificialIntelligenceHomework
|
f4da761016d67477b50856cadf1e2560230d3f79
|
[
"MIT"
] | 3 |
2021-09-20T08:32:23.000Z
|
2021-09-25T08:11:48.000Z
|
week2/7litersProblem.py
|
vietanhtran2710/ArtificialIntelligenceHomework
|
f4da761016d67477b50856cadf1e2560230d3f79
|
[
"MIT"
] | null | null | null |
week2/7litersProblem.py
|
vietanhtran2710/ArtificialIntelligenceHomework
|
f4da761016d67477b50856cadf1e2560230d3f79
|
[
"MIT"
] | null | null | null |
"""
Given 3 bottles of capacities 3, 5, and 9 liters,
count number of all possible solutions to get 7 liters
"""
current_path = [[0, 0, 0]]
CAPACITIES = (3, 5, 9)
solutions_count = 0
if __name__ == "__main__":
try:
current_state = [0, 0, 0]
move_to_new_state(current_state)
print(solutions_count)
except KeyboardInterrupt:
print(solutions_count)
# Result: at least 44900799 solution
| 35.709091 | 91 | 0.548371 |
b98d02f62eca1818cb1fb297d1c8644dd35ff288
| 8,263 |
py
|
Python
|
st2common/st2common/bootstrap/rulesregistrar.py
|
avezraj/st2
|
519c7f6819e52fb289c440bb7d1df7b558bb9ed7
|
[
"Apache-2.0"
] | null | null | null |
st2common/st2common/bootstrap/rulesregistrar.py
|
avezraj/st2
|
519c7f6819e52fb289c440bb7d1df7b558bb9ed7
|
[
"Apache-2.0"
] | null | null | null |
st2common/st2common/bootstrap/rulesregistrar.py
|
avezraj/st2
|
519c7f6819e52fb289c440bb7d1df7b558bb9ed7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import six
from st2common import log as logging
from st2common.constants.meta import ALLOWED_EXTS
from st2common.constants.pack import DEFAULT_PACK_NAME
from st2common.bootstrap.base import ResourceRegistrar
from st2common.models.api.rule import RuleAPI
from st2common.models.system.common import ResourceReference
from st2common.persistence.rule import Rule
from st2common.services.triggers import cleanup_trigger_db_for_rule, increment_trigger_ref_count
from st2common.exceptions.db import coditationDBObjectNotFoundError
import st2common.content.utils as content_utils
__all__ = [
'RulesRegistrar',
'register_rules'
]
LOG = logging.getLogger(__name__)
| 41.109453 | 98 | 0.57824 |
b9912797a8155d6800745fe804b93206d95de8ac
| 91,819 |
py
|
Python
|
sdk/costmanagement/azure-mgmt-costmanagement/azure/mgmt/costmanagement/models/_models_py3.py
|
aiven/azure-sdk-for-python
|
8764dc07423beca46ed0b51212d81289d9e52c60
|
[
"MIT"
] | 1 |
2021-09-07T18:43:20.000Z
|
2021-09-07T18:43:20.000Z
|
sdk/costmanagement/azure-mgmt-costmanagement/azure/mgmt/costmanagement/models/_models_py3.py
|
aiven/azure-sdk-for-python
|
8764dc07423beca46ed0b51212d81289d9e52c60
|
[
"MIT"
] | 2 |
2021-11-03T06:10:36.000Z
|
2021-12-01T06:29:39.000Z
|
sdk/costmanagement/azure-mgmt-costmanagement/azure/mgmt/costmanagement/models/_models_py3.py
|
msyyc/azure-sdk-for-python
|
e2dba75181f8b4336ae57e75aa391322c12c3123
|
[
"MIT"
] | 1 |
2021-05-19T02:55:10.000Z
|
2021-05-19T02:55:10.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._cost_management_client_enums import *
| 38.16251 | 498 | 0.649005 |
b9921ebf7fdd9b5fb1dd763092a97ae1888e730f
| 3,860 |
py
|
Python
|
test/test_simple_compression.py
|
jayvdb/brotlipy
|
ffddf2ea5adc584c8c353d246bb1077b7e781b63
|
[
"MIT"
] | null | null | null |
test/test_simple_compression.py
|
jayvdb/brotlipy
|
ffddf2ea5adc584c8c353d246bb1077b7e781b63
|
[
"MIT"
] | null | null | null |
test/test_simple_compression.py
|
jayvdb/brotlipy
|
ffddf2ea5adc584c8c353d246bb1077b7e781b63
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
test_simple_compression
~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for compression of single chunks.
"""
import brotli
import pytest
from hypothesis import given
from hypothesis.strategies import binary, integers, sampled_from, one_of
def test_roundtrip_compression_with_files(simple_compressed_file):
"""
Roundtripping data through the compressor works correctly.
"""
with open(simple_compressed_file[0], 'rb') as f:
uncompressed_data = f.read()
assert brotli.decompress(
brotli.compress(uncompressed_data)
) == uncompressed_data
| 29.692308 | 78 | 0.615803 |
b992a4ec960bcf3e39ba5a1bb6a8cd2e68be293e
| 1,987 |
py
|
Python
|
wexapi/models/ticker.py
|
madmis/wexapi
|
f5b1b9b566f767bca7d8fad1f08c3d1bca42355a
|
[
"MIT"
] | 3 |
2018-06-08T12:45:04.000Z
|
2018-08-02T11:09:11.000Z
|
wexapi/models/ticker.py
|
madmis/wexapi
|
f5b1b9b566f767bca7d8fad1f08c3d1bca42355a
|
[
"MIT"
] | null | null | null |
wexapi/models/ticker.py
|
madmis/wexapi
|
f5b1b9b566f767bca7d8fad1f08c3d1bca42355a
|
[
"MIT"
] | null | null | null |
from decimal import Decimal
| 20.27551 | 38 | 0.545546 |
b99506d26f9716e398b3a3724d393185a9900942
| 1,216 |
py
|
Python
|
hard-gists/98bb452dc14e8c40e403/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21 |
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/98bb452dc14e8c40e403/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5 |
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/98bb452dc14e8c40e403/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17 |
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
from scryptos import *
p1 = 32581479300404876772405716877547
p2 = 27038194053540661979045656526063
p3 = 26440615366395242196516853423447
n = p1*p2*p3
e = 3
c = int(open("flag.enc", "rb").read().encode("hex"), 16)
# from User's Guide to PARI/GP, nth_root function
sqrtnall = 'sqrtnall(x,n)={my(V,r,z,r2);r=sqrtn(x,n,&z);if(!z,error("Impossible case in sqrtn"));if(type(x)=="t_INTMOD"||type(x)=="t_PADIC",r2 = r*z;n=1;while(r2!=r,r2*=z;n++));V=vector(n);V[1]=r;for(i=2,n,V[i]=V[i-1]*z);V}'
c1 = eval(parigp([sqrtnall, "Vec(liftall(sqrtnall(Mod(%d, %d), 3)))" % (c, p1)]))
c2 = eval(parigp([sqrtnall, "Vec(liftall(sqrtnall(Mod(%d, %d), 3)))" % (c, p2)]))
c3 = eval(parigp([sqrtnall, "Vec(liftall(sqrtnall(Mod(%d, %d), 3)))" % (c, p3)]))
"""
c1 = [6149264605288583791069539134541, 13404203109409336045283549715377, 13028011585706956936052628027629]
c2 = [19616973567618515464515107624812]
c3 = [13374868592866626517389128266735, 7379361747422713811654086477766, 5686385026105901867473638678946]
"""
for x in c1:
for y in c2:
for z in c3:
crt = chinese_remainder_theorem([(x, p1), (y, p2), (z, p3)])
d = hex(crt, 2)[2:].decode("hex")
if "0ctf" in d:
print d[d.find("0ctf"):].strip()
| 39.225806 | 224 | 0.663651 |
b9954284c404c9a5aed225965d5006c8735af349
| 1,717 |
py
|
Python
|
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
musa/migrations/0001_initial.py
|
ccsreenidhin/Music-Web-Django
|
9b8286914f9099b9ed56c712c7ca384846f189d1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-29 06:43
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import musa.models
| 40.880952 | 121 | 0.633663 |
b9957182927ee0480e35dd837a4d9ee2d8587462
| 3,207 |
py
|
Python
|
nuitka/codegen/LoopCodes.py
|
RESP3CT88/Nuitka
|
0fcc25d9f00c4fc78c79a863c4b7987f573962e1
|
[
"Apache-2.0"
] | 1 |
2021-05-25T12:48:28.000Z
|
2021-05-25T12:48:28.000Z
|
venv/Lib/site-packages/nuitka/codegen/LoopCodes.py
|
matthijsvanvliet/raytracing-python
|
73d692b47330ab94eedde579a51063e3a907e92b
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/nuitka/codegen/LoopCodes.py
|
matthijsvanvliet/raytracing-python
|
73d692b47330ab94eedde579a51063e3a907e92b
|
[
"MIT"
] | null | null | null |
# Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Loop codes.
Code generation for loops, breaking them, or continuing them. In Nuitka, there
are no for-loops or while-loops at this point. They have been re-formulated in
a simpler loop without a condition, and statements there-in that break under
certain conditions.
See Developer Manual for how the CPython loops are mapped to these nodes.
"""
from .CodeHelpers import generateStatementSequenceCode
from .ErrorCodes import getErrorExitBoolCode
from .ExceptionCodes import getExceptionUnpublishedReleaseCode
from .LabelCodes import getGotoCode, getLabelCode
| 34.858696 | 111 | 0.752728 |
b995831c9a98c5b05882c5bbcc4b241cd51503bd
| 4,837 |
py
|
Python
|
3_module/C_BloomFilter.py
|
L4mborg1n1-D14610/Algoritms_and_DataStructure
|
f61b7434dbc600da02e8ec38648fa84beb160f17
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
3_module/C_BloomFilter.py
|
L4mborg1n1-D14610/Algoritms_and_DataStructure
|
f61b7434dbc600da02e8ec38648fa84beb160f17
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
3_module/C_BloomFilter.py
|
L4mborg1n1-D14610/Algoritms_and_DataStructure
|
f61b7434dbc600da02e8ec38648fa84beb160f17
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
import math
from sys import exit
# , n - , P - ,
# m = -(nlog2P) / ln2 (2 - ), - -log2P
# - : (((i + 1)*x + p(i+1)) mod M) mod m, - x - , i - -,
# pi - i- , M - 31 , M = 2^31 - 1, M = 2 147 483 647, M - .
# - k . BloomFilter
# .
# m, ,
# . , :
# 1, 1 ()
Mersen_31 = 2147483647
bloom_filter = 0
while True:
try:
line = input().split()
if len(line) == 0:
continue
else:
if line[0] == "set":
try:
elements_number = int(line[1])
probability = float(line[2])
if (elements_number <= 0) | (probability <= 0) | (probability >= 1):
print("error")
continue
bloom_filter = BloomFilter(elements_number, probability)
if (bloom_filter.size == 0) | (bloom_filter.hash_numbers == 0):
print("error")
continue
break
except TypeError:
print("error")
continue
else:
print("error")
continue
except EOFError:
exit()
print(bloom_filter.size, bloom_filter.hash_numbers)
while True:
try:
line = input().split()
if len(line) == 0:
continue
elif line[0] == "print":
print(bloom_filter.print())
elif (line[0] == "add") & (line[1].isnumeric()):
bloom_filter.add(int(line[1]))
elif (line[0] == "search") & (line[1].isnumeric()):
print(int(bloom_filter.search(int(line[1]))))
else:
print("error")
except EOFError:
break
| 34.798561 | 116 | 0.551995 |
b996ad8d5f407e5b1769d9b50ca7be5705a211e8
| 1,937 |
py
|
Python
|
pyzmq/examples/pubsub/subscriber.py
|
Surfndez/source-publish
|
c3838b303c1a0806f21cd4e8d8c207015b3ce9c8
|
[
"Intel"
] | null | null | null |
pyzmq/examples/pubsub/subscriber.py
|
Surfndez/source-publish
|
c3838b303c1a0806f21cd4e8d8c207015b3ce9c8
|
[
"Intel"
] | 1 |
2021-01-21T17:43:33.000Z
|
2021-01-21T17:43:33.000Z
|
pyzmq/examples/pubsub/subscriber.py
|
Surfndez/source-publish
|
c3838b303c1a0806f21cd4e8d8c207015b3ce9c8
|
[
"Intel"
] | null | null | null |
"""A test that subscribes to NumPy arrays.
Uses REQ/REP (on PUB/SUB socket + 1) to synchronize
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2010 Brian Granger
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#-----------------------------------------------------------------------------
import sys
import time
import zmq
import numpy
if __name__ == "__main__":
main()
| 25.826667 | 78 | 0.545173 |
b997c70668ace413cc27502883f737e007e56239
| 1,006 |
py
|
Python
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 36 |
2019-06-07T20:44:06.000Z
|
2022-03-23T06:19:43.000Z
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 49 |
2016-02-29T17:59:52.000Z
|
2019-05-05T04:59:26.000Z
|
Doc/includes/sqlite3/load_extension.py
|
livioso/cpython
|
077061a7b24917aaf31057885c69919c5a553c88
|
[
"PSF-2.0"
] | 28 |
2019-06-27T04:11:27.000Z
|
2022-03-11T06:27:44.000Z
|
import sqlite3
con = sqlite3.connect(":memory:")
# enable extension loading
con.enable_load_extension(True)
# Load the fulltext search extension
con.execute("select load_extension('./fts3.so')")
# alternatively you can load the extension using an API call:
# con.load_extension("./fts3.so")
# disable extension loading again
con.enable_load_extension(False)
# example from SQLite wiki
con.execute("create virtual table recipe using fts3(name, ingredients)")
con.executescript("""
insert into recipe (name, ingredients) values ('broccoli stew', 'broccoli peppers cheese tomatoes');
insert into recipe (name, ingredients) values ('pumpkin stew', 'pumpkin onions garlic celery');
insert into recipe (name, ingredients) values ('broccoli pie', 'broccoli cheese onions flour');
insert into recipe (name, ingredients) values ('pumpkin pie', 'pumpkin sugar flour butter');
""")
for row in con.execute("select rowid, name, ingredients from recipe where name match 'pie'"):
print(row)
| 37.259259 | 104 | 0.744533 |
b9982b7f935a0931c3a9dc4e8ec48b12b5523acb
| 22,060 |
py
|
Python
|
lingvo/core/inference_graph_exporter.py
|
RunzheYang/lingvo
|
1291e29812f9ee9836f9cacbb05db9ec6b095234
|
[
"Apache-2.0"
] | 1 |
2021-09-02T18:04:13.000Z
|
2021-09-02T18:04:13.000Z
|
lingvo/core/inference_graph_exporter.py
|
RunzheYang/lingvo
|
1291e29812f9ee9836f9cacbb05db9ec6b095234
|
[
"Apache-2.0"
] | null | null | null |
lingvo/core/inference_graph_exporter.py
|
RunzheYang/lingvo
|
1291e29812f9ee9836f9cacbb05db9ec6b095234
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility for exporting an InferenceGraph proto from model params."""
import collections
import contextlib
import re
import lingvo.compat as tf
from lingvo.core import base_model
from lingvo.core import bfloat16_variables
from lingvo.core import inference_graph_pb2
from lingvo.core import py_utils
import six
from google.protobuf import text_format
FLAGS = tf.flags.FLAGS
# InferenceDeviceOptions contains options to configure inference on the device.
# device: Device to infer on.
# retain_device_placement: If true, the specified device in the generated
# inference graph nodes will be retained. Otherwise, the specified device
# will be cleared, so that the runtime can choose automatically.
# var_options: Options on handling variables. For TPUs, variables can be
# either placed on device through 'ON_DEVICE' option, or treated as
# constants with AS_CONSTANTS.
# gen_init_op: Whether to serialize initialization ops for the device. For TPUs,
# servers can be initialized globally once, in which case this should be
# turned off to avoid tripping initialization checks.
# dtype_override: Whether to override the dtype to use for activations and
# weights in the model. Options supported are None or tf.bfloat16.
InferenceDeviceOptions = collections.namedtuple('InferenceDeviceOptions', [
'device', 'retain_device_placement', 'var_options', 'gen_init_op',
'dtype_override', 'fprop_dtype_override'
])
_CONST_GUARANTEE = None
# Marks variable as constants for compilation
def MaybeGuaranteeConstGetter(getter, name, *args, **kwargs):
global _CONST_GUARANTEE
if _CONST_GUARANTEE:
with tf.control_dependencies(None):
return tf.guarantee_const(
getter(name, *args, **kwargs), name=name + '/GuaranteeConst')
else:
return getter(name, *args, **kwargs)
def _GetVarName(v):
return v.name[:-len(':0')]
def _MakeVariableDictionary(variables):
"""Returns a dictionary with name -> tf.Variable() mapping."""
vars_dict = {}
for v in variables:
vars_dict[_GetVarName(v)] = v
return vars_dict
def IsTpu(device_options):
return device_options.device == 'tpu'
def ShouldForceBfloat16ForWeightsAndActivations(device_options):
return device_options.dtype_override == tf.bfloat16
def ShouldForceBfloat16ForActivations(device_options):
return device_options.fprop_dtype_override == tf.bfloat16
def ConvertSubgraphDictToProto(subgraphs_dict):
"""Converts dict of subgraphs/feeds/fetches to InferenceGraph.
Args:
subgraphs_dict: Dict of (fetches, feeds) where each fetches/feeds is a
NestedMap.
Returns:
Equivalent InferenceGraph.
"""
# Build the output inference graph.
inference_graph_proto = inference_graph_pb2.InferenceGraph()
for subgraph_name, tensors in subgraphs_dict.items():
fetches = tensors[0]
feeds = tensors[1]
# Rewrite fetches and feeds to map to their tensor name instead of
# Tensor instance.
named_fetches = {k: v.name for k, v in fetches.items() if v is not None}
named_feeds = {k: v.name for k, v in feeds.items() if v is not None}
# Export as subgraph.
inference_graph_proto.subgraphs[subgraph_name].fetches.update(named_fetches)
inference_graph_proto.subgraphs[subgraph_name].feeds.update(named_feeds)
return inference_graph_proto
def GetOutputOpNames(graph,
inference_graph_proto,
subgraphs=None,
preserve_colocation_nodes=True,
preserve_saver_restore_nodes=False,
preserve_extra_ops=None):
"""Gets output op names from an inference graph.
Args:
graph: The tf graph.
inference_graph_proto: an InferenceGraph proto.
subgraphs: an optional list of subgraph names. If provided, only output ops
from these subgraphs are preserved. Otherwise, all subgraphs are included.
preserve_colocation_nodes: a Python bool, default to True. Preserves nodes
colocating with the closure of output ops in the returned array.
preserve_saver_restore_nodes: a Python bool, default to False. Preserves
nodes for restoring according to inference_graph_proto.saver_def.
preserve_extra_ops: an optional list of extra op names to preserve as long
as they present in the graph.
Returns:
Array of tf op names that should be preserved in the graph.
"""
output_op_names = set()
def _GetOpName(tensor_or_op_name):
"""Returns the op name of the given node name."""
# Tensor names have format <op_name>:<output_index>. Some inference
# graphs put tensors and others put ops in the feeds/fetches (depends
# on how it is used). We differentiate here. We still do the lookup in
# the graph to sanity check (versus relying on the text manipulation).
# If this logic ever breaks, TensorFlow will raise a ValueError with
# a description of the syntax of each.
if re.search(r':[0-9]+$', tensor_or_op_name):
# Tensor-name.
t = graph.get_tensor_by_name(tensor_or_op_name)
return t.op.name
else:
op = graph.get_operation_by_name(tensor_or_op_name)
return op.name
for subgraph_name, subgraph in inference_graph_proto.subgraphs.items():
if subgraphs and subgraph_name not in subgraphs:
tf.logging.info('Skip subgraph %s.', subgraph_name)
continue
# Sometimes feeds aren't connected to any outputs but keep them in the graph
# anyways to avoid errors.
for tensor_or_op_name in (list(subgraph.feeds.values()) +
list(subgraph.fetches.values())):
output_op_names.add(_GetOpName(tensor_or_op_name))
if preserve_saver_restore_nodes:
# Only nodes for restoring is preserved. saver_def.save_tensor_name is
# skipped because it's only used for saving.
saver_def = inference_graph_proto.saver_def
for op_name in [saver_def.filename_tensor_name, saver_def.restore_op_name]:
try:
output_op_names.add(_GetOpName(op_name))
except KeyError:
tf.logging.info('Op/tensor %s not in the graph. Ignoring.' % op_name)
if not preserve_colocation_nodes and not preserve_extra_ops:
return sorted(list(output_op_names))
# We also need to preserve any nodes that are used for colocation.
# E.g., a node may have this attr:
# attr {
# key: "_class"
# value {
# list {
# s: "loc:@inference/embedding_lookup/Read/ReadVariableOp"
# }
# }
# }
#
# In this case, we need to make sure the node
# inference/embedding_lookup/Read/ReadVariableOp is not pruned.
#
# TODO(zhifengc): It's possible that it's better to fix in
# tf.graph_util.extract_sub_graph.
graph_def = tf.graph_util.extract_sub_graph(graph.as_graph_def(),
list(output_op_names))
reachable_vars = [node.name for node in graph_def.node]
for node in graph.get_operations():
if preserve_extra_ops and node.name in preserve_extra_ops:
output_op_names.add(node.name)
elif preserve_colocation_nodes and '_class' in node.node_def.attr:
for loc in node.node_def.attr['_class'].list.s:
loc = six.ensure_text(loc, 'utf-8')
if loc.startswith('loc:@'):
loc_name = loc[5:]
if loc_name not in reachable_vars:
# Skip nodes that cannot be reached from the pruned graph.
continue
output_op_names.add(node.name)
return sorted(list(output_op_names))
def _ParamExists(param_obj, param_name):
"""Tests whether param_name is contained in param_obj."""
if not param_obj:
return
for k, _ in param_obj.IterParams():
if k == param_name:
return True
return False
def _FreezeGraphFromCheckpoint(graph, saver, checkpoint, output_op_names):
"""Freezes a graph from a checkpoint.
Args:
graph: tf.Graph.
saver: The tf.Saver to use for restoration.
checkpoint: The checkpoint to restore.
output_op_names: Names of output ops.
Returns:
Resulting tf.GraphDef.
"""
sess = tf.Session(graph=graph, config=py_utils.SessionConfig())
saver.restore(sess, checkpoint)
return tf.graph_util.convert_variables_to_constants(
sess, graph.as_graph_def(), output_op_names)
def _FreezeDefaults(graph, output_op_names):
"""Default initializes a graph and freezes it.
Args:
graph: tf.Graph.
output_op_names: Names of output ops.
Returns:
Resulting tf.GraphDef.
"""
with tf.Session(graph=graph, config=py_utils.SessionConfig()) as sess:
sess.run(graph.get_operation_by_name('init_all_variables'))
return tf.graph_util.convert_variables_to_constants(sess,
graph.as_graph_def(),
output_op_names)
class InferenceGraphExporter:
"""Class for exporting inference graphs."""
| 38.100173 | 116 | 0.694334 |
b9982e3e4e7a4b4799e5780bd7629d5235cc1b40
| 1,836 |
py
|
Python
|
src/preprocessing/annual_hc_by_crime_loc.py
|
VijayKalmath/USCrimeAnalysis
|
14c96aae52547a4f7ea140395c62a621a97def50
|
[
"MIT"
] | null | null | null |
src/preprocessing/annual_hc_by_crime_loc.py
|
VijayKalmath/USCrimeAnalysis
|
14c96aae52547a4f7ea140395c62a621a97def50
|
[
"MIT"
] | null | null | null |
src/preprocessing/annual_hc_by_crime_loc.py
|
VijayKalmath/USCrimeAnalysis
|
14c96aae52547a4f7ea140395c62a621a97def50
|
[
"MIT"
] | null | null | null |
#! usr/env/bin python
import glob
import numpy as np
import pandas as pd
from tqdm import tqdm
def get_place_crime_count(path:str)->pd.DataFrame:
"""
Function to return
"""
# Extracting the table name from and year from the given file path
t_name = " ".join(path[path.index("Table"):path.index("_Incidents")].split("_"))
t_year = path[path.index(".xls")-4:path.index(".xls")]
try:
# Read the Excel spreadsheet
df = pd.read_excel(path,sheet_name=t_name)
# Get the start and end indices of the interested datapoints
start = df.index[df[t_name] == "Total"][0] + 1
end = df.index[df[t_name] == "Multiple locations"][0]
# Slice the dataset
df = df.iloc[start:end,0:2]
# Reset the index for the reduced dataframe
df.reset_index(drop = True, inplace = True)
# Rename the columns
df.rename(columns={t_name: "Place", "Unnamed: 1": t_year}, inplace = True)
# Return the value
return df
except:
# If there is no such data return an empty dataframe
i_list = list(range(0,47))
return pd.DataFrame(np.nan, index= i_list, columns=['Place', t_year])
if __name__ == '__main__':
main()
| 33.381818 | 84 | 0.6378 |
b998534e368ce74be309448b790e384f839c6d4a
| 1,672 |
py
|
Python
|
allennlp/tests/modules/token_embedders/bag_of_word_counts_token_embedder_test.py
|
ethanjperez/allennlp
|
e520993f16f0da7e2c40f6e44b8dc56338f46b57
|
[
"Apache-2.0"
] | 24 |
2019-09-16T00:10:54.000Z
|
2021-09-08T19:31:51.000Z
|
allennlp/tests/modules/token_embedders/bag_of_word_counts_token_embedder_test.py
|
ethanjperez/allennlp
|
e520993f16f0da7e2c40f6e44b8dc56338f46b57
|
[
"Apache-2.0"
] | null | null | null |
allennlp/tests/modules/token_embedders/bag_of_word_counts_token_embedder_test.py
|
ethanjperez/allennlp
|
e520993f16f0da7e2c40f6e44b8dc56338f46b57
|
[
"Apache-2.0"
] | 7 |
2019-09-16T02:37:31.000Z
|
2021-09-01T06:06:17.000Z
|
# pylint: disable=no-self-use,invalid-name
import numpy as np
from numpy.testing import assert_almost_equal
import torch
from allennlp.common import Params
from allennlp.data import Vocabulary
from allennlp.modules.token_embedders import BagOfWordCountsTokenEmbedder
from allennlp.common.testing import AllenNlpTestCase
| 45.189189 | 93 | 0.70634 |
b998e92d411833a80bc4657adf0243c90d5c6084
| 5,457 |
py
|
Python
|
demo/demo_shapenet.py
|
hengkaiz/meshrcnn
|
eb5b5bc0639a33e48f0fc1e0834106798cd1e3d8
|
[
"BSD-3-Clause"
] | null | null | null |
demo/demo_shapenet.py
|
hengkaiz/meshrcnn
|
eb5b5bc0639a33e48f0fc1e0834106798cd1e3d8
|
[
"BSD-3-Clause"
] | null | null | null |
demo/demo_shapenet.py
|
hengkaiz/meshrcnn
|
eb5b5bc0639a33e48f0fc1e0834106798cd1e3d8
|
[
"BSD-3-Clause"
] | null | null | null |
import argparse
import logging
import multiprocessing as mp
import logging
import os
from detectron2.evaluation import inference_context
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from detectron2.utils.collect_env import collect_env_info
from detectron2.utils.logger import setup_logger
from fvcore.common.file_io import PathManager
from pathlib import Path
from pytorch3d.io import save_obj
from shapenet.config.config import get_shapenet_cfg
from shapenet.data.utils import imagenet_preprocess
from shapenet.modeling.heads import voxel_head
from shapenet.modeling.mesh_arch import build_model
from shapenet.utils.checkpoint import clean_state_dict
import torchvision.transforms as T
import glob
from PIL import Image
import trimesh
import pyvista as pv
import pyacvd
import numpy as np
logger = logging.getLogger('demo')
if __name__ == "__main__":
mp.set_start_method("spawn", force=True)
args = get_parser().parse_args()
device = torch.device("cuda:%d" % 0)
logger = setup_logger(name="demo shapenet")
logger.info("Arguments: " + str(args))
cfg = setup_cfgs(args)
# load checkpoing and build model
if cfg.MODEL.CHECKPOINT == "":
raise ValueError("Invalid checkpoing provided")
logger.info("Loading model from checkpoint: %s" % (cfg.MODEL.CHECKPOINT))
cp = torch.load(PathManager.get_local_path(cfg.MODEL.CHECKPOINT))
state_dict = clean_state_dict(cp["best_states"]["model"])
model = build_model(cfg)
model.load_state_dict(state_dict)
logger.info("Model loaded")
model.to(device)
sub_dir = sorted(os.listdir(args.input))
for sd in sub_dir:
curr_path = os.path.join(args.input, sd)
images = glob.glob(curr_path + "/*.png")
for img_dir in images:
# load image
transform = [T.ToTensor()]
transform.append(imagenet_preprocess())
transform = T.Compose(transform)
im_name = img_dir.split("/")[-1].split(".")[0]
with PathManager.open(img_dir, "rb") as f:
img = Image.open(f).convert("RGB")
img = transform(img)
img = img[None, :, :, :]
img = img.to(device)
with inference_context(model):
img_feats, voxel_scores, meshes_pred, P, cubified_meshes = model(img)
# Save voxel_score
voxel_odir = os.path.join(curr_path, "voxel_score")
if not Path(voxel_odir).is_dir():
os.mkdir(voxel_odir)
voxel_file = os.path.join(voxel_odir, "%s.pt" % (im_name))
torch.save(voxel_scores, voxel_file)
# Save image features
imgfeat_odir = os.path.join(curr_path, "img_feat")
if not Path(imgfeat_odir).is_dir():
os.mkdir(imgfeat_odir)
img_feat_file = os.path.join(imgfeat_odir, "%s.pt" % (im_name))
torch.save(img_feats, img_feat_file)
# Save P
p_odir = os.path.join(curr_path, "P")
if not Path(p_odir).is_dir():
os.mkdir(p_odir)
p_file = os.path.join(p_odir, "%s.pt" % (im_name))
torch.save(P, p_file)
# Save cubified mesh
cmesh_odir = os.path.join(curr_path, "cube_mesh")
if not Path(cmesh_odir).is_dir():
os.mkdir(cmesh_odir)
cube_mesh_file = os.path.join(cmesh_odir, "%s_cube.obj" % (im_name))
c_verts, c_faces = cubified_meshes[-1].get_mesh_verts_faces(0)
save_obj(cube_mesh_file, c_verts, c_faces)
# Save predicted mesh
mesh_odir = os.path.join(curr_path, "final_mesh")
if not Path(mesh_odir).is_dir():
os.mkdir(mesh_odir)
save_file = os.path.join(mesh_odir, "%s.obj" % (im_name))
verts, faces = meshes_pred[-1].get_mesh_verts_faces(0)
save_obj(save_file, verts, faces)
logger.info("Predictions saved for %s/%s" % (curr_path.split('/')[-1], im_name))
| 31.912281 | 99 | 0.637713 |
b998f6994cf6e83702b501cd661bb37f91b59317
| 7,854 |
py
|
Python
|
proglearn/voters.py
|
jshin13/progressive-learning
|
dccc70fe5f6a03d2c53c2b01fd2122d7fd2798dc
|
[
"Apache-2.0"
] | null | null | null |
proglearn/voters.py
|
jshin13/progressive-learning
|
dccc70fe5f6a03d2c53c2b01fd2122d7fd2798dc
|
[
"Apache-2.0"
] | null | null | null |
proglearn/voters.py
|
jshin13/progressive-learning
|
dccc70fe5f6a03d2c53c2b01fd2122d7fd2798dc
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
# from sklearn.ensemble import BaggingClassifier
# from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.utils.validation import (
check_X_y,
check_array,
NotFittedError,
)
from sklearn.utils.multiclass import check_classification_targets, type_of_target
from .base import BaseVoter
from tensorflow import keras
from keras import layers
| 26.805461 | 99 | 0.556277 |
b999024320e50c940c8f273e6f0536039450c829
| 1,949 |
py
|
Python
|
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
config.py
|
jhattat/photoBooth
|
f6fe3ab418bb917792e10349597401ed34078766
|
[
"MIT"
] | null | null | null |
# Tumblr Setup
# Replace the values with your information
# OAuth keys can be generated from https://api.tumblr.com/console/calls/user/info
consumer_key='ShbOqI5zErQXOL7Qnd5XduXpY9XQUlBgJDpCLeq1OYqnY2KzSt' #replace with your key
consumer_secret='ulZradkbJGksjpl2MMlshAfJgEW6TNeSdZucykqeTp8jvwgnhu' #replace with your secret code
oath_token='uUcBuvJx8yhk4HJIZ39sfcYo0W4VoqcvUetR2EwcI5Sn8SLgNt' #replace with your oath token
oath_secret='iNJlqQJI6dwhAGmdNbMtD9u7VazmX2Rk5uW0fuIozIEjk97lz4' #replace with your oath secret code
tumblr_blog = 'soniaetjeremie' # replace with your tumblr account name without .tumblr.com
tagsForTumblr = "photobooth" # change to tags you want, separated with commas
#Config settings to change behavior of photo booth
monitor_w = 800 # width of the display monitor
monitor_h = 480 # height of the display monitor
file_path = '/home/pi/photobooth/pics/' # path to save images
clear_on_startup = False # True will clear previously stored photos as the program launches. False will leave all previous photos.
debounce = 0.3 # how long to debounce the button. Add more time if the button triggers too many times.
post_online = True # True to upload images. False to store locally only.
capture_count_pics = True # if true, show a photo count between taking photos. If false, do not. False is faster.
make_gifs = True # True to make an animated gif. False to post 4 jpgs into one post.
hi_res_pics = False # True to save high res pics from camera.
# If also uploading, the program will also convert each image to a smaller image before making the gif.
# False to first capture low res pics. False is faster.
# Careful, each photo costs against your daily Tumblr upload max.
camera_iso = 400 # adjust for lighting issues. Normal is 100 or 200. Sort of dark is 400. Dark is 800 max.
# available options: 100, 200, 320, 400, 500, 640, 800
| 77.96 | 130 | 0.758338 |
b9991711cbe60fa3459b0fb4cb64d023132610e8
| 896 |
py
|
Python
|
accounts/admin.py
|
GuilhemN/site-interludes
|
69873810d5b0168aa57277ba51805117e6c53874
|
[
"MIT"
] | null | null | null |
accounts/admin.py
|
GuilhemN/site-interludes
|
69873810d5b0168aa57277ba51805117e6c53874
|
[
"MIT"
] | 1 |
2022-03-24T10:41:10.000Z
|
2022-03-24T12:39:30.000Z
|
accounts/admin.py
|
GuilhemN/site-interludes
|
69873810d5b0168aa57277ba51805117e6c53874
|
[
"MIT"
] | 1 |
2022-03-23T22:30:12.000Z
|
2022-03-23T22:30:12.000Z
|
from django.contrib import admin
from django.contrib.auth.models import Group
from accounts.models import EmailUser
from shared.admin import ExportCsvMixin
# no need for groups - we only have regular users and superusers
admin.site.unregister(Group)
| 37.333333 | 106 | 0.753348 |
b9993aa0d134cc4869bfe49fd1ecd6dc8c6b0b96
| 23,640 |
py
|
Python
|
rotkehlchen/exchanges/coinbase.py
|
vnavascues/rotki
|
8675bdb02bf84bfccb5d59362e3ae2b7138fcd8f
|
[
"BSD-3-Clause"
] | null | null | null |
rotkehlchen/exchanges/coinbase.py
|
vnavascues/rotki
|
8675bdb02bf84bfccb5d59362e3ae2b7138fcd8f
|
[
"BSD-3-Clause"
] | null | null | null |
rotkehlchen/exchanges/coinbase.py
|
vnavascues/rotki
|
8675bdb02bf84bfccb5d59362e3ae2b7138fcd8f
|
[
"BSD-3-Clause"
] | null | null | null |
import hashlib
import hmac
import logging
import time
from json.decoder import JSONDecodeError
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
from urllib.parse import urlencode
import requests
from rotkehlchen.assets.asset import Asset
from rotkehlchen.assets.converters import asset_from_coinbase
from rotkehlchen.constants.misc import ZERO
from rotkehlchen.errors import DeserializationError, RemoteError, UnknownAsset, UnsupportedAsset
from rotkehlchen.exchanges.data_structures import AssetMovement, Trade
from rotkehlchen.exchanges.exchange import ExchangeInterface
from rotkehlchen.exchanges.utils import deserialize_asset_movement_address, get_key_if_has_val
from rotkehlchen.inquirer import Inquirer
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.serialization.deserialize import (
deserialize_asset_amount,
deserialize_asset_amount_force_positive,
deserialize_asset_movement_category,
deserialize_fee,
deserialize_timestamp_from_date,
deserialize_trade_type,
)
from rotkehlchen.typing import (
ApiKey,
ApiSecret,
AssetMovementCategory,
Fee,
Location,
Price,
Timestamp,
TradePair,
)
from rotkehlchen.user_messages import MessagesAggregator
from rotkehlchen.utils.interfaces import cache_response_timewise, protect_with_lock
from rotkehlchen.utils.serialization import rlk_jsonloads_dict
if TYPE_CHECKING:
from rotkehlchen.db.dbhandler import DBHandler
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
def trade_from_coinbase(raw_trade: Dict[str, Any]) -> Optional[Trade]:
"""Turns a coinbase transaction into a rotkehlchen Trade.
https://developers.coinbase.com/api/v2?python#buys
If the coinbase transaction is not a trade related transaction returns None
Throws:
- UnknownAsset due to Asset instantiation
- DeserializationError due to unexpected format of dict entries
- KeyError due to dict entires missing an expected entry
"""
if raw_trade['status'] != 'completed':
# We only want to deal with completed trades
return None
if raw_trade['instant']:
raw_time = raw_trade['created_at']
else:
raw_time = raw_trade['payout_at']
timestamp = deserialize_timestamp_from_date(raw_time, 'iso8601', 'coinbase')
trade_type = deserialize_trade_type(raw_trade['resource'])
tx_amount = deserialize_asset_amount(raw_trade['amount']['amount'])
tx_asset = asset_from_coinbase(raw_trade['amount']['currency'], time=timestamp)
native_amount = deserialize_asset_amount(raw_trade['subtotal']['amount'])
native_asset = asset_from_coinbase(raw_trade['subtotal']['currency'], time=timestamp)
# in coinbase you are buying/selling tx_asset for native_asset
pair = TradePair(f'{tx_asset.identifier}_{native_asset.identifier}')
amount = tx_amount
# The rate is how much you get/give in quotecurrency if you buy/sell 1 unit of base currency
rate = Price(native_amount / tx_amount)
fee_amount = deserialize_fee(raw_trade['fee']['amount'])
fee_asset = asset_from_coinbase(raw_trade['fee']['currency'], time=timestamp)
return Trade(
timestamp=timestamp,
location=Location.COINBASE,
pair=pair,
trade_type=trade_type,
amount=amount,
rate=rate,
fee=fee_amount,
fee_currency=fee_asset,
link=str(raw_trade['id']),
)
| 40.688468 | 98 | 0.592047 |
b9994eb6b47f29e07dc9f474ab82878fdc8ae029
| 3,533 |
py
|
Python
|
lib/python3.7/site-packages/ldap/controls/deref.py
|
aonrobot/MSC-thug-auth-provider
|
aef37ef5a000586b8502cc536244f31e08b9c2db
|
[
"Apache-2.0"
] | 1 |
2019-06-21T11:51:26.000Z
|
2019-06-21T11:51:26.000Z
|
lib/python3.7/site-packages/ldap/controls/deref.py
|
aonrobot/MSC-thug-auth-provider
|
aef37ef5a000586b8502cc536244f31e08b9c2db
|
[
"Apache-2.0"
] | 13 |
2019-07-03T21:28:31.000Z
|
2022-02-26T10:42:05.000Z
|
lib/python3.7/site-packages/ldap/controls/deref.py
|
aonrobot/MSC-thug-auth-provider
|
aef37ef5a000586b8502cc536244f31e08b9c2db
|
[
"Apache-2.0"
] | 2 |
2020-02-11T09:34:39.000Z
|
2020-11-10T14:41:32.000Z
|
# -*- coding: utf-8 -*-
"""
ldap.controls.deref - classes for
(see https://tools.ietf.org/html/draft-masarati-ldap-deref)
See https://www.python-ldap.org/ for project details.
"""
__all__ = [
'DEREF_CONTROL_OID',
'DereferenceControl',
]
import ldap.controls
from ldap.controls import LDAPControl,KNOWN_RESPONSE_CONTROLS
import pyasn1_modules.rfc2251
from pyasn1.type import namedtype,univ,tag
from pyasn1.codec.ber import encoder,decoder
from pyasn1_modules.rfc2251 import LDAPDN,AttributeDescription,AttributeDescriptionList,AttributeValue
DEREF_CONTROL_OID = '1.3.6.1.4.1.4203.666.5.16'
# Request types
#---------------------------------------------------------------------------
# For compatibility with ASN.1 declaration in I-D
AttributeList = AttributeDescriptionList
# Response types
#---------------------------------------------------------------------------
KNOWN_RESPONSE_CONTROLS[DereferenceControl.controlType] = DereferenceControl
| 29.441667 | 102 | 0.711577 |
b999aec7c34874ef90e0f30812ac97217ce90cca
| 3,145 |
py
|
Python
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | 1 |
2020-03-19T20:09:00.000Z
|
2020-03-19T20:09:00.000Z
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
import requests
import urllib.request
import os.path
import shutil
import csv
if __name__ == '__main__':
main()
| 37.440476 | 151 | 0.574245 |
b99b1d1ec6004cbeeb91e19410dbbb1e2216c45e
| 1,478 |
py
|
Python
|
nsq/__init__.py
|
jehiah/pynsq
|
899b60a8ce77ed6c8ab899fbdfd7adbc1b450c96
|
[
"MIT"
] | 1 |
2015-05-25T00:23:53.000Z
|
2015-05-25T00:23:53.000Z
|
nsq/__init__.py
|
barkinet/pynsq
|
899b60a8ce77ed6c8ab899fbdfd7adbc1b450c96
|
[
"MIT"
] | null | null | null |
nsq/__init__.py
|
barkinet/pynsq
|
899b60a8ce77ed6c8ab899fbdfd7adbc1b450c96
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
import signal
import tornado.ioloop
import logging
from .protocol import (
Error,
unpack_response,
decode_message,
valid_topic_name,
valid_channel_name,
identify,
subscribe,
ready,
finish,
touch,
requeue,
nop,
pub,
mpub,
FRAME_TYPE_RESPONSE,
FRAME_TYPE_ERROR,
FRAME_TYPE_MESSAGE,
)
from .message import Message
from .backoff_timer import BackoffTimer
from .sync import SyncConn
from .async import AsyncConn
from .reader import Reader
from .legacy_reader import LegacyReader
from .writer import Writer
from .version import __version__ # NOQA
def run():
"""
Starts any instantiated :class:`nsq.Reader` or :class:`nsq.Writer`
"""
signal.signal(signal.SIGTERM, _handle_term_signal)
tornado.ioloop.IOLoop.instance().start()
__author__ = "Matt Reiferson <[email protected]>"
__all__ = ["Reader", "Writer", "run", "BackoffTimer", "Message", "Error", "LegacyReader",
"SyncConn", "AsyncConn", "unpack_response", "decode_message",
"identify", "subscribe", "ready", "finish", "touch", "requeue", "nop", "pub", "mpub",
"valid_topic_name", "valid_channel_name",
"FRAME_TYPE_RESPONSE", "FRAME_TYPE_ERROR", "FRAME_TYPE_MESSAGE"]
| 26.392857 | 96 | 0.696211 |
b99b2da4f2ac2ca37d2ded7c72545cef1cab4228
| 5,356 |
py
|
Python
|
scripts/summaryPlot.py
|
Hespian/ParFastKer
|
5ddf1685c0652e73c889cfc64c7ec1fd827f905c
|
[
"BSD-3-Clause",
"MIT"
] | 3 |
2019-08-10T08:24:19.000Z
|
2019-08-12T07:16:03.000Z
|
scripts/summaryPlot.py
|
Hespian/ParFastKer
|
5ddf1685c0652e73c889cfc64c7ec1fd827f905c
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
scripts/summaryPlot.py
|
Hespian/ParFastKer
|
5ddf1685c0652e73c889cfc64c7ec1fd827f905c
|
[
"BSD-3-Clause",
"MIT"
] | null | null | null |
import get_data_ours
import get_data_akiba
import get_data_NearLinear
import get_data_LinearTime
import os
import matplotlib.pyplot as plt
# graphs = ["uk-2002", "arabic-2005", "gsh-2015-tpd", "uk-2005", "it-2004", "sk-2005", "uk-2007-05", "webbase-2001", "asia.osm", "road_usa", "europe.osm", "rgg_n26_s0", "RHG-100000000-nodes-2000000000-edges", "delaunay_n24", "del26"]
graphs = ["uk-2002", "arabic-2005", "gsh-2015-tpd", "uk-2005", "it-2004", "sk-2005", "uk-2007-05", "webbase-2001", "asia.osm", "road_usa", "europe.osm", "rgg_n26_s0", "delaunay_n24", "del26"]
linearTimeDir = "../../../triangle_counting_paper/MIS_sigmod_pub/results/LinearTimeKernels/logs"
partitioningDir = "../../LinearTimeKernels/partitions"
ourTimeDir = "../../results/LinearTimeKernelsScalingAll"
nearLinearDir = "../../../triangle_counting_paper/MIS_sigmod_pub/results/NearLinear"
akibaDir = "../../akiba_vertex_cover/results"
oursizeSequential = []
ourtimeSequential = []
oursizeParallel = []
ourtimeParallel = []
akibasize = []
akibatime = []
nearlinearsize = []
nearlineartime = []
lineartimesize = []
lineartimetime = []
for graph in graphs:
minsize = getAkibaTimeAndSize(graph)["size"]
mintime = getAkibaTimeAndSize(graph)["time"]
oss = getOurTimeAndSizeSequential(graph)["size"] / minsize
# print(graph + "(sequential): " + str(getOurTimeAndSizeSequential(graph)["size"]))
ots = getOurTimeAndSizeSequential(graph)["time"] / mintime
if oss > 0 and ots > 0:
oursizeSequential.append(oss)
ourtimeSequential.append(ots)
osp = getOurTimeAndSizeParallel(graph)["size"] / minsize
# print(graph + "(parallel): " + str(getOurTimeAndSizeParallel(graph)["size"]))
otp = getOurTimeAndSizeParallel(graph)["time"] / mintime
if osp > 0 and otp > 0:
oursizeParallel.append(osp)
ourtimeParallel.append(otp)
aks = getAkibaTimeAndSize(graph)["size"] / minsize
akt = getAkibaTimeAndSize(graph)["time"] / mintime
if aks > 0 and akt > 0:
akibasize.append(aks)
akibatime.append(akt)
nls = getNearLinearTimeAndSize(graph)["size"] / minsize
nlt = getNearLinearTimeAndSize(graph)["time"] / mintime
if nls > 0 and nlt > 0:
nearlinearsize.append(nls)
nearlineartime.append(nlt)
lts = getLinearTimeTimeAndSize(graph)["size"] / minsize
ltt = getLinearTimeTimeAndSize(graph)["time"] / mintime
if nls > 0 and nlt > 0:
lineartimesize.append(lts)
lineartimetime.append(ltt)
# print("We")
# print(oursizeSequential)
# print(ourtimeSequential)
# print("We (parallel)")
# print(oursizeParallel)
# print(ourtimeParallel)
# print("Akiba")
# print(akibasize)
# print(akibatime)
# print("NearLinear")
# print(nearlinearsize)
# print(nearlineartime)
# print("LinearTime")
# print(lineartimesize)
# print(lineartimetime)
plt.rc('font', size=14)
fig = plt.figure(figsize=(3.2, 2.4))
ax = fig.add_subplot(1,1,1)
plt.title("Summary", fontsize=14)
ax.set_yscale("log")
ax.set_xscale("log")
ax.scatter(ourtimeSequential, oursizeSequential, label="FastKer", marker="x", color="green")
ax.scatter(ourtimeParallel, oursizeParallel, label="ParFastKer", marker="+", color="black")
# ax.scatter(akibatime, akibasize, label="VCSolver", marker="^", edgecolors="blue", facecolors="none")
ax.scatter(nearlineartime, nearlinearsize, label="NearLinear", marker="o", edgecolors="red", facecolors="none")
ax.scatter(lineartimetime, lineartimesize, label="LinearTime", marker="^", edgecolors="magenta", facecolors="none")
plt.xlabel("time / VCSolver time")
plt.ylabel("size / VCSolver size")
plt.xticks([0.0001, 0.01, 1])
ax.legend(bbox_to_anchor=(0.35,-0.7), ncol=2, loc='lower center', frameon=False, borderaxespad=0., mode="expand")
plt.savefig("summaryplot_vcsolver_baseline.pdf", bbox_inches="tight")
# plt.show()
| 39.094891 | 234 | 0.720127 |
b99c2305beceab596bedee8ad399b6faa3216070
| 3,587 |
py
|
Python
|
bouncer/cli/base.py
|
lrnt/git-bouncer
|
3015e11a5d2c90986124de73bf1fd0f5a8563360
|
[
"MIT"
] | null | null | null |
bouncer/cli/base.py
|
lrnt/git-bouncer
|
3015e11a5d2c90986124de73bf1fd0f5a8563360
|
[
"MIT"
] | null | null | null |
bouncer/cli/base.py
|
lrnt/git-bouncer
|
3015e11a5d2c90986124de73bf1fd0f5a8563360
|
[
"MIT"
] | null | null | null |
import configparser
import sys
import inspect
from argparse import ArgumentParser, RawDescriptionHelpFormatter
| 27.381679 | 72 | 0.578199 |
b99c4d9fb380e0635cac67dff2a6820b500bf34f
| 13,728 |
py
|
Python
|
Examples/ExampleCodes_ssccoorriinngg.py
|
MahdadJafarzadeh/ssccoorriinngg
|
63c726e9e7d0f6d13032415c76b8c3bb1ff2bee3
|
[
"MIT"
] | 2 |
2020-04-28T12:50:26.000Z
|
2020-05-13T08:52:42.000Z
|
Examples/ExampleCodes_ssccoorriinngg.py
|
MahdadJafarzadeh/ssccoorriinngg
|
63c726e9e7d0f6d13032415c76b8c3bb1ff2bee3
|
[
"MIT"
] | null | null | null |
Examples/ExampleCodes_ssccoorriinngg.py
|
MahdadJafarzadeh/ssccoorriinngg
|
63c726e9e7d0f6d13032415c76b8c3bb1ff2bee3
|
[
"MIT"
] | 1 |
2020-07-14T13:48:56.000Z
|
2020-07-14T13:48:56.000Z
|
#%% Import libs
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_validate
from sklearn.metrics import make_scorer, accuracy_score, precision_score, recall_score, f1_score
import h5py
import time
from ssccoorriinngg import ssccoorriinngg
import numpy as np
from sklearn.model_selection import cross_validate
#%% Picking featureset of interest and apply classification
Object = ssccoorriinngg(filename='', channel='', fs = 200, T = 30)
path = 'C:/PhD/ML in depression/'
fname = 'feat42_Fp1-Fp2_train'
feats = 'featureset'
labels = 'labels'
# Train set
X_train, y_train = Object.LoadFeatureSet(path, fname, feats, labels)
# Test set
fname = 'feat42_Fp1-Fp2_test'
X_test, y_test = Object.LoadFeatureSet(path, fname, feats, labels)
# Define the scoring criteria:
scoring = {'accuracy' : make_scorer(accuracy_score),
'precision' : make_scorer(precision_score),
'recall' : make_scorer(recall_score),
'f1_score' : make_scorer(f1_score)}
# Cross-validation using logistic Random Forests
y_pred_RF = Object.RandomForest_Modelling(X_train, y_train, X_test, y_test, scoring = scoring, n_estimators = 500, cv = 10)
Acc, Recall, prec, f1_sc = Object.multi_label_confusion_matrix(y_test, y_pred_RF)
# Cross-validation using XGBoost
y_pred_xgb = Object.XGB_Modelling(X_train, y_train,X_test, y_test, scoring, n_estimators = 1000,
cv = 10 , max_depth=3, learning_rate=.1)
Acc, Recall, prec, f1_sc = Object.multi_label_confusion_matrix(y_test, y_pred_xgb)
#%% Outcome measures
# Defien required metrics here:
Metrics = ['test_accuracy', 'test_precision', 'test_recall', 'test_f1_score']
for metric in Metrics:
#RF
r1 = results_RF[metric].mean()
std1 = results_RF[metric].std()
print(f'{metric} for RF is: {round(r1*100, 2)}+- {round(std1*100, 2)}')
# xgb
r2 = results_xgb[metric].mean()
std2 = results_xgb[metric].std()
print(f'{metric} for xgb is: {round(r2*100, 2)}+- {round(std2*100, 2)}')
# SVM
r3 = results_SVM[metric].mean()
std3 = results_SVM[metric].std()
print(f'{metric} for SVM is: {round(r3*100, 2)}+- {round(std3*100, 2)}')
# LR
r4 = results_LR[metric].mean()
std4 = results_LR[metric].std()
print(f'{metric} for LR is: {round(r4*100, 2)}+- {round(std4*100, 2)}')
#%% Applying Randomized grid search to find the best config. of RF
BestParams_RandomSearch, Bestsocre_RandomSearch ,means, stds, params= Object.RandomSearchRF(X, y,
estimator = RandomForestClassifier(), scoring = scoring,
n_estimators = [int(x) for x in np.arange(10, 500, 20)],
max_features = ['log2', 'sqrt'],
max_depth = [int(x) for x in np.arange(10, 100, 30)],
min_samples_split = [2, 5, 10],
min_samples_leaf = [1, 2, 4],
bootstrap = [True, False],
n_iter = 100, cv = 10)
#%% Test feature selection methods ##
# PCA
PCA_out = Object.FeatSelect_PCA(X, y, n_components = 5)
# Boruta
ranks_Boruta, Feat_selected_Boruta = Object.FeatSelect_Boruta(X, y, max_depth = 7)
# Lasso
Feat_selected_lasso = Object.FeatSelect_LASSO(X, y, C = 1)
#ANOVA
Feat_selected_ANOVA = Object.FeatSelect_ANOVA(X,y, k = 80)
#Recruisive
ranks_rec, Feat_selected_rec = Object.FeatSelect_Recrusive(X, y, k = 20)
#### NOW TEST CLASSIFIERS WITH SELECTED FEATS
results_RF = Object.RandomForest_Modelling(Feat_selected_Boruta, y, scoring = scoring, n_estimators = 200, cv = 10)
#%% Example save featureset
path = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/features/'
Object.SaveFeatureSet(X, y, path = path, filename = 'feat42_N3')
#%% Example load features:
X, y= Object.LoadFeatureSet(path = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/features/',
fname = 'feat42_N3_fp2-M1',
feats = 'featureset',
labels = 'labels')
#%% Combining some REM and SWS epochs
Object.CombineEpochs(directory = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/train_test/',
ch = 'fp1-M2', N3_fname = 'tr90_N3_fp1-M2_fp2-M1',
REM_fname = 'tr90_fp1-M2_fp2-M1',
saving = True, fname_save = 'tr90_N3&REM_fp1-M2')
#%% How to save some results?
directory = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/results/'
fname = '42feats_N3'
with h5py.File((directory+fname + '.h5'), 'w') as wf:
# Accuracies
dset = wf.create_dataset('acc_SVM', results_SVM['test_accuracy'].shape, data = results_SVM['test_accuracy'])
dset = wf.create_dataset('acc_LR' , results_LR['test_accuracy'].shape, data = results_LR['test_accuracy'])
dset = wf.create_dataset('acc_RF' , results_RF['test_accuracy'].shape, data = results_RF['test_accuracy'])
dset = wf.create_dataset('acc_xgb', results_xgb['test_accuracy'].shape, data = results_xgb['test_accuracy'])
# Precision
dset = wf.create_dataset('prec_SVM', results_SVM['test_precision'].shape, data = results_SVM['test_precision'])
dset = wf.create_dataset('prec_LR' , results_LR['test_precision'].shape, data = results_LR['test_precision'])
dset = wf.create_dataset('prec_RF' , results_RF['test_precision'].shape, data = results_RF['test_precision'])
dset = wf.create_dataset('prec_xgb', results_xgb['test_precision'].shape, data = results_xgb['test_precision'])
# Recall
dset = wf.create_dataset('rec_SVM', results_SVM['test_recall'].shape, data = results_SVM['test_recall'])
dset = wf.create_dataset('rec_LR' , results_LR['test_recall'].shape, data = results_LR['test_recall'])
dset = wf.create_dataset('rec_RF' , results_RF['test_recall'].shape, data = results_RF['test_recall'])
dset = wf.create_dataset('rec_xgb', results_xgb['test_recall'].shape, data = results_xgb['test_recall'])
# f1-score
dset = wf.create_dataset('f1_SVM', results_SVM['test_f1_score'].shape, data = results_SVM['test_f1_score'])
dset = wf.create_dataset('f1_LR' , results_LR['test_f1_score'].shape, data = results_LR['test_f1_score'])
dset = wf.create_dataset('f1_RF' , results_RF['test_f1_score'].shape, data = results_RF['test_f1_score'])
dset = wf.create_dataset('f1_xgb', results_xgb['test_f1_score'].shape, data = results_xgb['test_f1_score'])
#%% Extracting features from more than one channel:
tic = time.time()
########### Central electrodes #############
main_path = "D:/1D_TimeSeries/raw_EEG/without artefact/train_test/"
save_path = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/features/'
fname_C_N3 = (main_path+"tr90_N3_C3-M2_C4-M1.h5")
fname_C_REM = (main_path+"tr90_REM_C3-M2_C4-M1.h5")
ch_C4 = 'C4-M1'
ch_C3 = 'C3-M2'
Object_C3_REM = ML_Depression(filename=fname_C_REM, channel = ch_C3, fs = 200, T = 30)
X_C3_REM,y_C3_REM = Object_C3_REM.FeatureExtraction()
Object_C3_REM.SaveFeatureSet(X = X_C3_REM, y=y_C3_REM, path = save_path, filename = 'feat42_C3_REM')
Object_C4_REM = ML_Depression(filename=fname_C_REM, channel = ch_C4, fs = 200, T = 30)
X_C4_REM,y_C4_REM = Object_C4_REM.FeatureExtraction()
Object_C4_REM.SaveFeatureSet(X = X_C4_REM, y=y_C4_REM, path = save_path, filename = 'feat42_C4_REM')
Object_C3_N3 = ML_Depression(filename=fname_C_N3, channel = ch_C3, fs = 200, T = 30)
X_C3_N3,y_C3_N3 = Object_C3_N3.FeatureExtraction()
Object_C3_N3.SaveFeatureSet(X = X_C3_N3, y=y_C3_N3, path = save_path, filename = 'feat42_C3_N3')
Object_C4_N3 = ML_Depression(filename=fname_C_N3, channel = ch_C4, fs = 200, T = 30)
X_C4_N3,y_C4_N3 = Object_C4_N3.FeatureExtraction()
Object_C4_N3.SaveFeatureSet(X = X_C4_N3, y=y_C4_N3, path = save_path, filename = 'feat42_C4_N3')
########### Occipital electrodes #############
main_path = "D:/1D_TimeSeries/raw_EEG/without artefact/train_test/"
fname_O_N3 = (main_path+"tr90_N3_O1-M2_O2-M1.h5")
fname_O_REM = (main_path+"tr90_REM_O1-M2_O2-M1.h5")
ch_O2 = 'O2-M1'
ch_O1 = 'O1-M2'
Object_O1_REM = ML_Depression(filename=fname_O_REM, channel = ch_O1, fs = 200, T = 30)
X_O1_REM,y_O1_REM = Object_O1_REM.FeatureExtraction()
Object_O1_REM.SaveFeatureSet(X = X_O1_REM, y=y_O1_REM, path = save_path, filename = 'feat42_O1_REM')
Object_O2_REM = ML_Depression(filename=fname_O_REM, channel = ch_O2, fs = 200, T = 30)
X_O2_REM,y_O2_REM = Object_O2_REM.FeatureExtraction()
Object_O2_REM.SaveFeatureSet(X = X_O2_REM, y=y_O2_REM, path = save_path, filename = 'feat42_O2_REM')
Object_O1_N3 = ML_Depression(filename=fname_O_N3, channel = ch_O1, fs = 200, T = 30)
X_O1_N3,y_O1_N3 = Object_O1_N3.FeatureExtraction()
Object_O1_N3.SaveFeatureSet(X = X_O1_N3, y=y_O1_N3, path = save_path, filename = 'feat42_O1_N3')
Object_O2_N3 = ML_Depression(filename=fname_O_N3, channel = ch_O2, fs = 200, T = 30)
X_O2_N3,y_O2_N3 = Object_O2_N3.FeatureExtraction()
Object_O2_N3.SaveFeatureSet(X = X_O2_N3, y=y_O2_N3, path = save_path, filename = 'feat42_O2_N3')
########### Fp electrodes #############
main_path = "D:/1D_TimeSeries/raw_EEG/without artefact/train_test/"
fname_fp_N3 = (main_path+"tr90_N3_fp1-M2_fp2-M1.h5")
fname_fp_REM = (main_path+"tr90_REM_fp1-M2_fp2-M1.h5")
ch_fp2 = 'fp2-M1'
ch_fp1 = 'fp1-M2'
Object_fp1_REM = ML_Depression(filename=fname_fp_REM, channel = ch_fp1, fs = 200, T = 30)
X_fp1_REM,y_fp1_REM = Object_fp1_REM.FeatureExtraction()
Object_fp1_REM.SaveFeatureSet(X = X_fp1_REM, y=y_fp1_REM, path = save_path, filename = 'feat42_fp1_REM')
Object_fp2_REM = ML_Depression(filename=fname_fp_REM, channel = ch_fp2, fs = 200, T = 30)
X_fp2_REM,y_fp2_REM = Object_fp2_REM.FeatureExtraction()
Object_fp2_REM.SaveFeatureSet(X = X_fp2_REM, y=y_fp2_REM, path = save_path, filename = 'feat42_fp2_REM')
Object_fp1_N3 = ML_Depression(filename=fname_fp_N3, channel = ch_fp1, fs = 200, T = 30)
X_fp1_N3,y_fp1_N3 = Object_fp1_N3.FeatureExtraction()
Object_fp1_N3.SaveFeatureSet(X = X_fp1_N3, y=y_fp1_N3, path = save_path, filename = 'feat42_fp1_N3')
Object_fp2_N3 = ML_Depression(filename=fname_fp_N3, channel = ch_fp2, fs = 200, T = 30)
X_fp2_N3,y_fp2_N3 = Object_fp2_N3.FeatureExtraction()
Object_fp2_N3.SaveFeatureSet(X = X_fp2_N3, y=y_fp2_N3, path = save_path, filename = 'feat42_fp2_N3')
toc = time.time()
print(f'time taken: {toc - tic}')
########## Concatenate all features #########
# RIGHT hemisphere - REM
X_rh_REM = np.column_stack((X_fp2_REM,X_C4_REM))
X_rh_REM = np.column_stack((X_rh_REM,X_O2_REM))
# RIGHT hemisphere - N3
X_rh_N3 = np.column_stack((X_fp2_N3,X_C4_N3))
X_rh_N3 = np.column_stack((X_rh_N3,X_O2_N3))
# LEFT hemisphere - REM
X_lh_REM = np.column_stack((X_fp1_REM,X_C3_REM))
X_lh_REM = np.column_stack((X_lh_REM,X_O1_REM))
# LEFT hemisphere - N3
X_lh_N3 = np.column_stack((X_fp1_N3,X_C3_N3))
X_lh_N3 = np.column_stack((X_lh_N3,X_O1_N3))
# Both sides - REM
X_REM = np.column_stack((X_rh_REM, X_lh_REM))
# Both sides - N3
X_N3 = np.column_stack((X_rh_N3, X_lh_N3))
# Combine SWS and REM
X_SWS_REM = np.row_stack((X_N3, X_REM))
y_SWS_REM = np.concatenate((y_fp2_N3, y_fp2_REM))
# SAVE ALL COMBINATIONS
Object = ML_Depression(filename='', channel='', fs = 200, T = 30)
# one hemisphere
Object.SaveFeatureSet(X = X_rh_REM, y=y_fp2_REM, path = save_path, filename = 'feat42_rh_REM')
Object.SaveFeatureSet(X = X_lh_REM, y=y_fp2_REM, path = save_path, filename = 'feat42_lh_REM')
Object.SaveFeatureSet(X = X_rh_N3 , y=y_fp2_N3 , path = save_path, filename = 'feat42_rh_N3')
Object.SaveFeatureSet(X = X_lh_N3 , y=y_fp2_N3 , path = save_path, filename = 'feat42_lh_N3')
# Both hemisphere
Object.SaveFeatureSet(X = X_N3 , y=y_fp2_N3 , path = save_path, filename = 'feat42_l&rh_N3')
Object.SaveFeatureSet(X = X_REM , y=y_fp2_N3 , path = save_path, filename = 'feat42_l&rh_REM')
# Both hemispheres- SWS &REM combination
Object.SaveFeatureSet(X = X_SWS_REM , y=y_SWS_REM , path = save_path, filename = 'feat42_l&rh_N3&REM')
#%% Load features from different brain regions, sleep stage and combine them
Object = ML_Depression(filename='', channel='', fs = 200, T = 30)
path = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/features/'
save_path = 'P:/3013080.02/ml_project/scripts/1D_TimeSeries/features/'
feats = 'featureset'
labels = 'labels'
# Pick right hemisphere N3
fname_rh_N3 = 'feat42_rh_N3'
X_rh_N3, y_rh_N3 = Object.LoadFeatureSet(path, fname_rh_N3, feats, labels)
# Pick left hemisphere N3
fname_lh_N3 = 'feat42_lh_N3'
X_lh_N3, y_lh_N3 = Object.LoadFeatureSet(path, fname_lh_N3, feats, labels)
# Pick right hemisphere REM
fname_rh_REM = 'feat42_rh_REM'
X_rh_REM, y_rh_REM = Object.LoadFeatureSet(path, fname_rh_REM, feats, labels)
# Pick LEFT hemisphere REM
fname_lh_REM = 'feat42_lh_REM'
X_lh_REM, y_lh_REM = Object.LoadFeatureSet(path, fname_lh_REM, feats, labels)
# Combine them
X_N3 = np.column_stack((X_rh_N3, X_lh_N3))
X_REM = np.column_stack((X_rh_REM, X_lh_REM))
# Save combination
Object.SaveFeatureSet(X = X_N3 , y=y_lh_N3 , path = save_path, filename = 'feat42_l&rh_N3')
Object.SaveFeatureSet(X = X_REM , y=y_lh_REM , path = save_path, filename = 'feat42_l&rh_REM')
| 53.209302 | 127 | 0.682984 |
b99d08420cae81be117acdda96af821aba38eea2
| 6,891 |
py
|
Python
|
igibson/examples/behavior/behavior_demo_collection.py
|
suresh-guttikonda/iGibson
|
a69e623058180146466cd52d4bb3c00d1facdacf
|
[
"MIT"
] | null | null | null |
igibson/examples/behavior/behavior_demo_collection.py
|
suresh-guttikonda/iGibson
|
a69e623058180146466cd52d4bb3c00d1facdacf
|
[
"MIT"
] | null | null | null |
igibson/examples/behavior/behavior_demo_collection.py
|
suresh-guttikonda/iGibson
|
a69e623058180146466cd52d4bb3c00d1facdacf
|
[
"MIT"
] | null | null | null |
"""
Main BEHAVIOR demo collection entrypoint
"""
import argparse
import copy
import datetime
import os
import bddl
import numpy as np
import igibson
from igibson.activity.activity_base import iGBEHAVIORActivityInstance
from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
from igibson.render.mesh_renderer.mesh_renderer_vr import VrConditionSwitcher, VrSettings
from igibson.simulator import Simulator
from igibson.utils.ig_logging import IGLogWriter
POST_TASK_STEPS = 200
PHYSICS_WARMING_STEPS = 200
if __name__ == "__main__":
main()
| 31.465753 | 120 | 0.652881 |
b99e3b0ee335439a781ae231769595415a1dc6ec
| 546 |
py
|
Python
|
wagtail/wagtailadmin/menu.py
|
digitalmarmalade/wagtail
|
ac4d23172ff3f42746625630583b17d243fb9822
|
[
"BSD-3-Clause"
] | 1 |
2015-11-05T18:02:04.000Z
|
2015-11-05T18:02:04.000Z
|
wagtail/wagtailadmin/menu.py
|
digitalmarmalade/wagtail
|
ac4d23172ff3f42746625630583b17d243fb9822
|
[
"BSD-3-Clause"
] | null | null | null |
wagtail/wagtailadmin/menu.py
|
digitalmarmalade/wagtail
|
ac4d23172ff3f42746625630583b17d243fb9822
|
[
"BSD-3-Clause"
] | null | null | null |
from django.utils.text import slugify
from django.utils.html import format_html
| 32.117647 | 79 | 0.611722 |
b99ee5dfe9849188796ff8d2b024b524adedb8d2
| 1,950 |
py
|
Python
|
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
django_mfa/migrations/0001_initial.py
|
timgates42/django-mfa
|
89eeb83f7da3ea24f205b40b13c7f9d33ea15b99
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.5 on 2019-03-26 11:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 41.489362 | 143 | 0.598974 |
b99f21827c3ba7ccbcab4806c878cdacfa139e20
| 317 |
py
|
Python
|
app/logger_example/main.py
|
khanh-nguyen-code/my-collection
|
31581ef0b1dae67aafb1f4e64b9973a38cc01edf
|
[
"MIT"
] | null | null | null |
app/logger_example/main.py
|
khanh-nguyen-code/my-collection
|
31581ef0b1dae67aafb1f4e64b9973a38cc01edf
|
[
"MIT"
] | null | null | null |
app/logger_example/main.py
|
khanh-nguyen-code/my-collection
|
31581ef0b1dae67aafb1f4e64b9973a38cc01edf
|
[
"MIT"
] | null | null | null |
from my_collection import logger
if __name__ == "__main__":
logger.now().debug("debug1")
logger.now().debug("debug2")
logger.now().info("hello1")
logger.now().info("hello2")
logger.now().with_field("key", "val").error("with field1")
logger.now().with_field("key", "val").error("with field2")
| 31.7 | 62 | 0.646688 |
b9a14f8cda479b51cbe9296c63d8ae7397078bc7
| 760 |
py
|
Python
|
robotframework_iperf3/__main__.py
|
scathaig/robotframework-iperf3
|
cfeeb3e265777403d7eb06fcfa6d69650f2a5e67
|
[
"Apache-2.0"
] | null | null | null |
robotframework_iperf3/__main__.py
|
scathaig/robotframework-iperf3
|
cfeeb3e265777403d7eb06fcfa6d69650f2a5e67
|
[
"Apache-2.0"
] | null | null | null |
robotframework_iperf3/__main__.py
|
scathaig/robotframework-iperf3
|
cfeeb3e265777403d7eb06fcfa6d69650f2a5e67
|
[
"Apache-2.0"
] | null | null | null |
import argparse
from robotremoteserver import RobotRemoteServer
from .iperf3 import Iperf3
if __name__ == '__main__':
# create commandline parser
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.prog = 'python3 -m robotframework_iperf3'
# add parser options
parser.add_argument(
"-a",
"--address",
type=str,
help="server listen address",
default='0.0.0.0')
parser.add_argument(
"-p",
"--port",
type=int,
help="server listen port",
default=8270)
args = parser.parse_args()
server = RobotRemoteServer(
Iperf3(),
host=args.address,
port=args.port
)
server.serve()
| 21.111111 | 92 | 0.619737 |
b9a1ae11b40a499e6f6854e1a273c2ff226ef650
| 692 |
py
|
Python
|
h2o-docs/src/booklets/v2_2015/source/Python_Vignette_code_examples/python_combine_frames_append_one_as_columns.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 6,098 |
2015-05-22T02:46:12.000Z
|
2022-03-31T16:54:51.000Z
|
h2o-docs/src/booklets/v2_2015/source/Python_Vignette_code_examples/python_combine_frames_append_one_as_columns.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 2,517 |
2015-05-23T02:10:54.000Z
|
2022-03-30T17:03:39.000Z
|
h2o-docs/src/booklets/v2_2015/source/Python_Vignette_code_examples/python_combine_frames_append_one_as_columns.py
|
ahmedengu/h2o-3
|
ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11
|
[
"Apache-2.0"
] | 2,199 |
2015-05-22T04:09:55.000Z
|
2022-03-28T22:20:45.000Z
|
df8.cbind(df9)
# A B C D A0 B0 C0 D0
# ----- ------ ------ ------ ------ ----- ----- -----
# -0.09 0.944 0.160 0.271 -0.351 1.66 -2.32 -0.86
# -0.95 0.669 0.664 1.535 -0.633 -1.78 0.32 1.27
# 0.17 0.657 0.970 -0.419 -1.413 -0.51 0.64 -1.25
# 0.58 -0.516 -1.598 -1.346 0.711 1.09 0.05 0.63
# 1.04 -0.281 -0.411 0.959 -0.009 -0.47 0.41 -0.52
# 0.49 0.170 0.124 -0.170 -0.722 -0.79 -0.91 -2.09
# 1.42 -0.409 -0.525 2.155 -0.841 -0.19 0.13 0.63
# 0.94 1.192 -1.075 0.017 0.167 0.54 0.52 1.42
# -0.53 0.777 -1.090 -2.237 -0.693 0.24 -0.56 1.45
# 0.34 -0.456 -1.220 -0.456 -0.315 1.10 1.38 -0.05
#
# [100 rows x 8 columns]
| 43.25 | 54 | 0.460983 |
b9a1dbb5125acea57356714e95e66c8e3a612e30
| 1,101 |
py
|
Python
|
FluentPython/dynamic_attr_and_prop/frozen_json.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
FluentPython/dynamic_attr_and_prop/frozen_json.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
FluentPython/dynamic_attr_and_prop/frozen_json.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
from collections import abc
from keyword import iskeyword
| 27.525 | 56 | 0.561308 |
b9a20089dfb3f5c8a3472d1f3be189af236d4d44
| 4,062 |
py
|
Python
|
pomdp_problems/tag/models/transition_model.py
|
Semanti1/pomdp_findit
|
b96c1c06aab4b485fa005654cf6438ff63718083
|
[
"MIT"
] | null | null | null |
pomdp_problems/tag/models/transition_model.py
|
Semanti1/pomdp_findit
|
b96c1c06aab4b485fa005654cf6438ff63718083
|
[
"MIT"
] | null | null | null |
pomdp_problems/tag/models/transition_model.py
|
Semanti1/pomdp_findit
|
b96c1c06aab4b485fa005654cf6438ff63718083
|
[
"MIT"
] | null | null | null |
"""The Tag problem. Implemented according to the paper `Anytime Point-Based
Approximations for Large POMDPs <https://arxiv.org/pdf/1110.0027.pdf>`_.
Transition model: the robot moves deterministically. The target's movement
depends on the robot; With Pr=0.8 the target moves away from the robot,
and with Pr=0.2, the target stays at the same place. The target never
moves closer to the robot.
"""
import copy
import pomdp_py
import pomdp_problems.util as util
import pomdp_problems.tag.constants as constants
from pomdp_problems.tag.domain.action import *
| 45.640449 | 103 | 0.537912 |
b9a21ff5a8c4fcb07930580d031f6847ecfaed43
| 4,731 |
py
|
Python
|
packit/fedpkg.py
|
bocekm/packit
|
b5da23c0fa3f205537551b9ed212d8f77d00d705
|
[
"MIT"
] | null | null | null |
packit/fedpkg.py
|
bocekm/packit
|
b5da23c0fa3f205537551b9ed212d8f77d00d705
|
[
"MIT"
] | null | null | null |
packit/fedpkg.py
|
bocekm/packit
|
b5da23c0fa3f205537551b9ed212d8f77d00d705
|
[
"MIT"
] | null | null | null |
# MIT License
#
# Copyright (c) 2019 Red Hat, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
from typing import Optional
from packit.exceptions import PackitCommandFailedError
from packit.utils import commands # so we can mock utils
from packit.utils.logging import logger
| 35.044444 | 82 | 0.609808 |
b9a3c97262cf3c50a695832e8941374463a78067
| 901 |
py
|
Python
|
tests/test_MaskedArrayCollection.py
|
ahaldane/NDducktype_tests
|
4876416e5fbff7ba0d85445c0eeae432d6e80014
|
[
"BSD-3-Clause"
] | 3 |
2020-06-18T14:18:39.000Z
|
2021-07-22T18:05:52.000Z
|
tests/test_MaskedArrayCollection.py
|
ahaldane/NDducktype_tests
|
4876416e5fbff7ba0d85445c0eeae432d6e80014
|
[
"BSD-3-Clause"
] | 2 |
2020-07-19T15:44:09.000Z
|
2020-07-28T23:22:21.000Z
|
tests/test_MaskedArrayCollection.py
|
ahaldane/NDducktype_tests
|
4876416e5fbff7ba0d85445c0eeae432d6e80014
|
[
"BSD-3-Clause"
] | 2 |
2019-06-20T00:20:13.000Z
|
2020-09-20T21:42:52.000Z
|
#!/usr/bin/env python
from ndarray_ducktypes.ArrayCollection import ArrayCollection
from ndarray_ducktypes.MaskedArray import MaskedArray
from ndarray_ducktypes.MaskedArrayCollection import MaskedArrayCollection
import numpy as np
# Tests for Masked ArrayCollections.
#
# First try: Simply make an arraycollection of MaskedArrays. Downside: this
# strategy does not give a "filled" method. Probably to get a masked
# ArrayCollection we should really subclass ArrayCollection to have a
# fill_value and a filled() method
#a = MaskedArray(np.arange(10), np.arange(10)%3)
#b = MaskedArray(np.arange(10.) + 13, np.arange(10)%2)
#c = ArrayCollection([('age', a), ('weight', b)])
#print(repr(c))
#c['age'] += 100
#print(repr(c))
## second try: Subclass of ArrayCollection
#c = MaskedArrayCollection([('age', a), ('weight', b)])
#print(repr(c))
#c['age'] += 100
#print(repr(c))
#print(repr(c.filled()))
| 31.068966 | 75 | 0.738069 |
b9a4cbf5401cd86949e3f94c13bc464c4725fcee
| 192,704 |
py
|
Python
|
rpc/gen/core_pb2.py
|
jasonjoo2010/core
|
7c05ddbdac2e05a3d96db28f8bdfacf661907b82
|
[
"MIT"
] | null | null | null |
rpc/gen/core_pb2.py
|
jasonjoo2010/core
|
7c05ddbdac2e05a3d96db28f8bdfacf661907b82
|
[
"MIT"
] | null | null | null |
rpc/gen/core_pb2.py
|
jasonjoo2010/core
|
7c05ddbdac2e05a3d96db28f8bdfacf661907b82
|
[
"MIT"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: core.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='core.proto',
package='pb',
syntax='proto3',
serialized_pb=_b('\n\ncore.proto\x12\x02pb\"\x07\n\x05\x45mpty\"\xb4\x01\n\x15ListContainersOptions\x12\x0f\n\x07\x61ppname\x18\x01 \x01(\t\x12\x12\n\nentrypoint\x18\x02 \x01(\t\x12\x10\n\x08nodename\x18\x03 \x01(\t\x12\x35\n\x06labels\x18\x04 \x03(\x0b\x32%.pb.ListContainersOptions.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"L\n\x13\x44\x65ployStatusOptions\x12\x0f\n\x07\x61ppname\x18\x01 \x01(\t\x12\x12\n\nentrypoint\x18\x02 \x01(\t\x12\x10\n\x08nodename\x18\x03 \x01(\t\"v\n\x13\x44\x65ployStatusMessage\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x0f\n\x07\x61ppname\x18\x02 \x01(\t\x12\x12\n\nentrypoint\x18\x03 \x01(\t\x12\x10\n\x08nodename\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\"0\n\x03Pod\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x65sc\x18\x02 \x01(\t\x12\r\n\x05\x66\x61vor\x18\x03 \x01(\t\"\x1d\n\x04Pods\x12\x15\n\x04pods\x18\x01 \x03(\x0b\x32\x07.pb.Pod\"\xfc\x02\n\x0bPodResource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12%\n\x03\x63pu\x18\x02 \x03(\x0b\x32\x18.pb.PodResource.CpuEntry\x12+\n\x06memory\x18\x03 \x03(\x0b\x32\x1b.pb.PodResource.MemoryEntry\x12\'\n\x04\x64iff\x18\x04 \x03(\x0b\x32\x19.pb.PodResource.DiffEntry\x12+\n\x06\x64\x65tail\x18\x05 \x03(\x0b\x32\x1b.pb.PodResource.DetailEntry\x1a*\n\x08\x43puEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a-\n\x0bMemoryEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a+\n\tDiffEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a-\n\x0b\x44\x65tailEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"5\n\x12ListNetworkOptions\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x0e\n\x06\x64river\x18\x02 \x01(\t\"(\n\x07Network\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07subnets\x18\x02 \x03(\t\")\n\x08Networks\x12\x1d\n\x08networks\x18\x01 \x03(\x0b\x32\x0b.pb.Network\"\x9e\x03\n\x04Node\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08\x65ndpoint\x18\x02 \x01(\t\x12\x0f\n\x07podname\x18\x03 \x01(\t\x12\x1e\n\x03\x63pu\x18\x04 \x03(\x0b\x32\x11.pb.Node.CpuEntry\x12\x10\n\x08\x63pu_used\x18\x05 \x01(\x01\x12\x0e\n\x06memory\x18\x06 \x01(\x03\x12\x13\n\x0bmemory_used\x18\x07 \x01(\x03\x12\x11\n\tavailable\x18\x08 \x01(\x08\x12$\n\x06labels\x18\t \x03(\x0b\x32\x14.pb.Node.LabelsEntry\x12\x13\n\x0binit_memory\x18\n \x01(\x03\x12\'\n\x08init_cpu\x18\x0b \x03(\x0b\x32\x15.pb.Node.InitCpuEntry\x12\x0c\n\x04info\x18\x0c \x01(\t\x1a*\n\x08\x43puEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a.\n\x0cInitCpuEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\" \n\x05Nodes\x12\x17\n\x05nodes\x18\x01 \x03(\x0b\x32\x08.pb.Node\"E\n\rNodeAvailable\x12\x10\n\x08nodename\x18\x01 \x01(\t\x12\x0f\n\x07podname\x18\x02 \x01(\t\x12\x11\n\tavailable\x18\x03 \x01(\x08\"\xb8\x03\n\tContainer\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07podname\x18\x02 \x01(\t\x12\x10\n\x08nodename\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12#\n\x03\x63pu\x18\x05 \x03(\x0b\x32\x16.pb.Container.CpuEntry\x12\r\n\x05quota\x18\x06 \x01(\x01\x12\x0e\n\x06memory\x18\x07 \x01(\x03\x12\x12\n\nprivileged\x18\x08 \x01(\x08\x12)\n\x06labels\x18\t \x03(\x0b\x32\x19.pb.Container.LabelsEntry\x12+\n\x07publish\x18\n \x03(\x0b\x32\x1a.pb.Container.PublishEntry\x12\r\n\x05image\x18\x0b \x01(\t\x12\x0f\n\x07inspect\x18\x0c \x01(\x0c\x12\x13\n\x0bstatus_data\x18\r \x01(\x0c\x1a*\n\x08\x43puEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a.\n\x0cPublishEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"k\n\x18\x43ontainerDeployedOptions\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07\x61ppname\x18\x02 \x01(\t\x12\x12\n\nentrypoint\x18\x03 \x01(\t\x12\x10\n\x08nodename\x18\x04 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x05 \x01(\x0c\"/\n\nContainers\x12!\n\ncontainers\x18\x01 \x03(\x0b\x32\r.pb.Container\"\x19\n\x0b\x43ontainerID\x12\n\n\x02id\x18\x01 \x01(\t\"\x1b\n\x0c\x43ontainerIDs\x12\x0b\n\x03ids\x18\x01 \x03(\t\"4\n\x16RemoveContainerOptions\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\r\n\x05\x66orce\x18\x02 \x01(\x08\"7\n\x0eReallocOptions\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x0b\n\x03\x63pu\x18\x02 \x01(\x01\x12\x0b\n\x03mem\x18\x03 \x01(\x03\":\n\rAddPodOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x66\x61vor\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x65sc\x18\x03 \x01(\t\" \n\x10RemovePodOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\rGetPodOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xf7\x01\n\x0e\x41\x64\x64NodeOptions\x12\x10\n\x08nodename\x18\x01 \x01(\t\x12\x10\n\x08\x65ndpoint\x18\x02 \x01(\t\x12\x0f\n\x07podname\x18\x03 \x01(\t\x12\n\n\x02\x63\x61\x18\x04 \x01(\t\x12\x0c\n\x04\x63\x65rt\x18\x05 \x01(\t\x12\x0b\n\x03key\x18\x06 \x01(\t\x12\x0b\n\x03\x63pu\x18\x07 \x01(\x05\x12\r\n\x05share\x18\x08 \x01(\x05\x12\x0e\n\x06memory\x18\t \x01(\x03\x12.\n\x06labels\x18\n \x03(\x0b\x32\x1e.pb.AddNodeOptions.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n\x11RemoveNodeOptions\x12\x10\n\x08nodename\x18\x01 \x01(\t\x12\x0f\n\x07podname\x18\x02 \x01(\t\"3\n\x0eGetNodeOptions\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x10\n\x08nodename\x18\x02 \x01(\t\"0\n\x10ListNodesOptions\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x0b\n\x03\x61ll\x18\x02 \x01(\x08\"\x8e\x04\n\x05\x42uild\x12\x0c\n\x04\x62\x61se\x18\x01 \x01(\t\x12\x0c\n\x04repo\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\x11\n\tsubmodule\x18\x05 \x01(\x08\x12\x10\n\x08\x63ommands\x18\x06 \x03(\t\x12!\n\x04\x65nvs\x18\x07 \x03(\x0b\x32\x13.pb.Build.EnvsEntry\x12!\n\x04\x61rgs\x18\x08 \x03(\x0b\x32\x13.pb.Build.ArgsEntry\x12%\n\x06labels\x18\t \x03(\x0b\x32\x15.pb.Build.LabelsEntry\x12+\n\tartifacts\x18\n \x03(\x0b\x32\x18.pb.Build.ArtifactsEntry\x12#\n\x05\x63\x61\x63he\x18\x0b \x03(\x0b\x32\x14.pb.Build.CacheEntry\x1a+\n\tEnvsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x30\n\x0e\x41rtifactsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a,\n\nCacheEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"z\n\x06\x42uilds\x12\x0e\n\x06stages\x18\x01 \x03(\t\x12&\n\x06\x62uilds\x18\x02 \x03(\x0b\x32\x16.pb.Builds.BuildsEntry\x1a\x38\n\x0b\x42uildsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x18\n\x05value\x18\x02 \x01(\x0b\x32\t.pb.Build:\x02\x38\x01\"s\n\x11\x42uildImageOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04user\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x05\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12\x1a\n\x06\x62uilds\x18\x05 \x01(\x0b\x32\n.pb.Builds\x12\x0b\n\x03tar\x18\x06 \x01(\x0c\"F\n\x0bHookOptions\x12\x13\n\x0b\x61\x66ter_start\x18\x01 \x03(\t\x12\x13\n\x0b\x62\x65\x66ore_stop\x18\x02 \x03(\t\x12\r\n\x05\x66orce\x18\x03 \x01(\x08\"U\n\x12HealthCheckOptions\x12\x11\n\ttcp_ports\x18\x01 \x03(\t\x12\x11\n\thttp_port\x18\x02 \x01(\t\x12\x0b\n\x03url\x18\x03 \x01(\t\x12\x0c\n\x04\x63ode\x18\x04 \x01(\x05\"u\n\nLogOptions\x12\x0c\n\x04type\x18\x01 \x01(\t\x12*\n\x06\x63onfig\x18\x02 \x03(\x0b\x32\x1a.pb.LogOptions.ConfigEntry\x1a-\n\x0b\x43onfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xca\x02\n\x11\x45ntrypointOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t\x12\x12\n\nprivileged\x18\x03 \x01(\x08\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\x1b\n\x03log\x18\x05 \x01(\x0b\x32\x0e.pb.LogOptions\x12\x0f\n\x07publish\x18\x06 \x03(\t\x12+\n\x0bhealthcheck\x18\x07 \x01(\x0b\x32\x16.pb.HealthCheckOptions\x12\x1d\n\x04hook\x18\x08 \x01(\x0b\x32\x0f.pb.HookOptions\x12\x16\n\x0erestart_policy\x18\t \x01(\t\x12\x33\n\x07sysctls\x18\n \x03(\x0b\x32\".pb.EntrypointOptions.SysctlsEntry\x1a.\n\x0cSysctlsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x88\x06\n\rDeployOptions\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\nentrypoint\x18\x02 \x01(\x0b\x32\x15.pb.EntrypointOptions\x12\x0f\n\x07podname\x18\x03 \x01(\t\x12\x10\n\x08nodename\x18\x04 \x01(\t\x12\r\n\x05image\x18\x05 \x01(\t\x12\x12\n\nextra_args\x18\x06 \x01(\t\x12\x11\n\tcpu_quota\x18\x07 \x01(\x01\x12\x0e\n\x06memory\x18\x08 \x01(\x03\x12\r\n\x05\x63ount\x18\t \x01(\x05\x12\x0b\n\x03\x65nv\x18\n \x03(\t\x12\x0b\n\x03\x64ns\x18\x0b \x03(\t\x12\x13\n\x0b\x65xtra_hosts\x18\x0c \x03(\t\x12\x0f\n\x07volumes\x18\r \x03(\t\x12\x31\n\x08networks\x18\x0e \x03(\x0b\x32\x1f.pb.DeployOptions.NetworksEntry\x12\x13\n\x0bnetworkmode\x18\x0f \x01(\t\x12\x0c\n\x04user\x18\x10 \x01(\t\x12\r\n\x05\x64\x65\x62ug\x18\x11 \x01(\x08\x12\x11\n\topenStdin\x18\x12 \x01(\x08\x12-\n\x06labels\x18\x13 \x03(\x0b\x32\x1d.pb.DeployOptions.LabelsEntry\x12\x35\n\nnodelabels\x18\x14 \x03(\x0b\x32!.pb.DeployOptions.NodelabelsEntry\x12\x15\n\rdeploy_method\x18\x15 \x01(\t\x12)\n\x04\x64\x61ta\x18\x16 \x03(\x0b\x32\x1b.pb.DeployOptions.DataEntry\x12\x11\n\tsoftlimit\x18\x17 \x01(\x08\x12\x13\n\x0bnodes_limit\x18\x18 \x01(\x05\x1a/\n\rNetworksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fNodelabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a+\n\tDataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xb5\x02\n\x0eReplaceOptions\x12$\n\tdeployOpt\x18\x01 \x01(\x0b\x32\x11.pb.DeployOptions\x12\r\n\x05\x66orce\x18\x02 \x01(\x08\x12;\n\rfilter_labels\x18\x03 \x03(\x0b\x32$.pb.ReplaceOptions.FilterLabelsEntry\x12*\n\x04\x63opy\x18\x04 \x03(\x0b\x32\x1c.pb.ReplaceOptions.CopyEntry\x12\x0b\n\x03ids\x18\x05 \x03(\t\x12\x16\n\x0enetworkinherit\x18\x06 \x01(\x08\x1a\x33\n\x11\x46ilterLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a+\n\tCopyEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"T\n\x11\x43\x61\x63heImageOptions\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x10\n\x08nodename\x18\x02 \x01(\t\x12\x0e\n\x06images\x18\x03 \x03(\t\x12\x0c\n\x04step\x18\x04 \x01(\x05\"d\n\x12RemoveImageOptions\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x10\n\x08nodename\x18\x02 \x01(\t\x12\x0e\n\x06images\x18\x03 \x03(\t\x12\x0c\n\x04step\x18\x04 \x01(\x05\x12\r\n\x05prune\x18\x05 \x01(\x08\"\x1a\n\tCopyPaths\x12\r\n\x05paths\x18\x01 \x03(\t\"{\n\x0b\x43opyOptions\x12-\n\x07targets\x18\x01 \x03(\x0b\x32\x1c.pb.CopyOptions.TargetsEntry\x1a=\n\x0cTargetsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.pb.CopyPaths:\x02\x38\x01\",\n\x0b\x45rrorDetail\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x03\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x87\x01\n\x11\x42uildImageMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x10\n\x08progress\x18\x03 \x01(\t\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x0e\n\x06stream\x18\x05 \x01(\t\x12%\n\x0c\x65rror_detail\x18\x06 \x01(\x0b\x32\x0f.pb.ErrorDetail\"\xea\x02\n\x16\x43reateContainerMessage\x12\x0f\n\x07podname\x18\x01 \x01(\t\x12\x10\n\x08nodename\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\r\n\x05\x65rror\x18\x05 \x01(\t\x12\x0f\n\x07success\x18\x06 \x01(\x08\x12\x30\n\x03\x63pu\x18\x07 \x03(\x0b\x32#.pb.CreateContainerMessage.CpuEntry\x12\r\n\x05quota\x18\x08 \x01(\x01\x12\x0e\n\x06memory\x18\t \x01(\x03\x12\x38\n\x07publish\x18\n \x03(\x0b\x32\'.pb.CreateContainerMessage.PublishEntry\x12\x0c\n\x04hook\x18\x0b \x01(\x0c\x1a*\n\x08\x43puEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a.\n\x0cPublishEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x80\x01\n\x17ReplaceContainerMessage\x12*\n\x06\x63reate\x18\x01 \x01(\x0b\x32\x1a.pb.CreateContainerMessage\x12*\n\x06remove\x18\x02 \x01(\x0b\x32\x1a.pb.RemoveContainerMessage\x12\r\n\x05\x65rror\x18\x03 \x01(\t\"7\n\x11RunAndWaitMessage\x12\x14\n\x0c\x63ontainer_id\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"V\n\x11\x43\x61\x63heImageMessage\x12\r\n\x05image\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x10\n\x08nodename\x18\x03 \x01(\t\x12\x0f\n\x07message\x18\x04 \x01(\t\"F\n\x12RemoveImageMessage\x12\r\n\x05image\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x10\n\x08messages\x18\x03 \x03(\t\"C\n\x16RemoveContainerMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x0c\n\x04hook\x18\x03 \x01(\t\"5\n\x16ReallocResourceMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\"b\n\x0b\x43opyMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\r\n\x05\x65rror\x18\x05 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x06 \x01(\x0c\"J\n\x11RunAndWaitOptions\x12(\n\rDeployOptions\x18\x01 \x01(\x0b\x32\x11.pb.DeployOptions\x12\x0b\n\x03\x43md\x18\x02 \x01(\x0c\"4\n\x17\x43ontrolContainerOptions\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\"B\n\x17\x43ontrolContainerMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x0c\n\x04hook\x18\x03 \x01(\x0c\x32\xcb\x0c\n\x07\x43oreRPC\x12!\n\x08ListPods\x12\t.pb.Empty\x1a\x08.pb.Pods\"\x00\x12&\n\x06\x41\x64\x64Pod\x12\x11.pb.AddPodOptions\x1a\x07.pb.Pod\"\x00\x12.\n\tRemovePod\x12\x14.pb.RemovePodOptions\x1a\t.pb.Empty\"\x00\x12&\n\x06GetPod\x12\x11.pb.GetPodOptions\x1a\x07.pb.Pod\"\x00\x12\x36\n\x0eGetPodResource\x12\x11.pb.GetPodOptions\x1a\x0f.pb.PodResource\"\x00\x12)\n\x07\x41\x64\x64Node\x12\x12.pb.AddNodeOptions\x1a\x08.pb.Node\"\x00\x12.\n\nRemoveNode\x12\x15.pb.RemoveNodeOptions\x1a\x07.pb.Pod\"\x00\x12\x31\n\x10SetNodeAvailable\x12\x11.pb.NodeAvailable\x1a\x08.pb.Node\"\x00\x12)\n\x07GetNode\x12\x12.pb.GetNodeOptions\x1a\x08.pb.Node\"\x00\x12\x30\n\x0cGetContainer\x12\x0f.pb.ContainerID\x1a\r.pb.Container\"\x00\x12\x33\n\rGetContainers\x12\x10.pb.ContainerIDs\x1a\x0e.pb.Containers\"\x00\x12/\n\rGetNodeByName\x12\x12.pb.GetNodeOptions\x1a\x08.pb.Node\"\x00\x12\x31\n\x0cListPodNodes\x12\x14.pb.ListNodesOptions\x1a\t.pb.Nodes\"\x00\x12\x36\n\x0cListNetworks\x12\x16.pb.ListNetworkOptions\x1a\x0c.pb.Networks\"\x00\x12=\n\x0eListContainers\x12\x19.pb.ListContainersOptions\x1a\x0e.pb.Containers\"\x00\x12:\n\x12ListNodeContainers\x12\x12.pb.GetNodeOptions\x1a\x0e.pb.Containers\"\x00\x12>\n\x11\x43ontainerDeployed\x12\x1c.pb.ContainerDeployedOptions\x1a\t.pb.Empty\"\x00\x12,\n\x04\x43opy\x12\x0f.pb.CopyOptions\x1a\x0f.pb.CopyMessage\"\x00\x30\x01\x12>\n\nBuildImage\x12\x15.pb.BuildImageOptions\x1a\x15.pb.BuildImageMessage\"\x00\x30\x01\x12>\n\nCacheImage\x12\x15.pb.CacheImageOptions\x1a\x15.pb.CacheImageMessage\"\x00\x30\x01\x12\x41\n\x0bRemoveImage\x12\x16.pb.RemoveImageOptions\x1a\x16.pb.RemoveImageMessage\"\x00\x30\x01\x12\x44\n\x0c\x44\x65ployStatus\x12\x17.pb.DeployStatusOptions\x1a\x17.pb.DeployStatusMessage\"\x00\x30\x01\x12@\n\nRunAndWait\x12\x15.pb.RunAndWaitOptions\x1a\x15.pb.RunAndWaitMessage\"\x00(\x01\x30\x01\x12\x44\n\x0f\x43reateContainer\x12\x11.pb.DeployOptions\x1a\x1a.pb.CreateContainerMessage\"\x00\x30\x01\x12G\n\x10ReplaceContainer\x12\x12.pb.ReplaceOptions\x1a\x1b.pb.ReplaceContainerMessage\"\x00\x30\x01\x12M\n\x0fRemoveContainer\x12\x1a.pb.RemoveContainerOptions\x1a\x1a.pb.RemoveContainerMessage\"\x00\x30\x01\x12P\n\x10\x43ontrolContainer\x12\x1b.pb.ControlContainerOptions\x1a\x1b.pb.ControlContainerMessage\"\x00\x30\x01\x12\x45\n\x0fReallocResource\x12\x12.pb.ReallocOptions\x1a\x1a.pb.ReallocResourceMessage\"\x00\x30\x01\x62\x06proto3')
)
_EMPTY = _descriptor.Descriptor(
name='Empty',
full_name='pb.Empty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18,
serialized_end=25,
)
_LISTCONTAINERSOPTIONS_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.ListContainersOptions.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.ListContainersOptions.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.ListContainersOptions.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_LISTCONTAINERSOPTIONS = _descriptor.Descriptor(
name='ListContainersOptions',
full_name='pb.ListContainersOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appname', full_name='pb.ListContainersOptions.appname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entrypoint', full_name='pb.ListContainersOptions.entrypoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.ListContainersOptions.nodename', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.ListContainersOptions.labels', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_LISTCONTAINERSOPTIONS_LABELSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=28,
serialized_end=208,
)
_DEPLOYSTATUSOPTIONS = _descriptor.Descriptor(
name='DeployStatusOptions',
full_name='pb.DeployStatusOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appname', full_name='pb.DeployStatusOptions.appname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entrypoint', full_name='pb.DeployStatusOptions.entrypoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.DeployStatusOptions.nodename', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=210,
serialized_end=286,
)
_DEPLOYSTATUSMESSAGE = _descriptor.Descriptor(
name='DeployStatusMessage',
full_name='pb.DeployStatusMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='action', full_name='pb.DeployStatusMessage.action', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='appname', full_name='pb.DeployStatusMessage.appname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entrypoint', full_name='pb.DeployStatusMessage.entrypoint', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.DeployStatusMessage.nodename', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='pb.DeployStatusMessage.id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.DeployStatusMessage.data', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=288,
serialized_end=406,
)
_POD = _descriptor.Descriptor(
name='Pod',
full_name='pb.Pod',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.Pod.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='desc', full_name='pb.Pod.desc', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favor', full_name='pb.Pod.favor', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=408,
serialized_end=456,
)
_PODS = _descriptor.Descriptor(
name='Pods',
full_name='pb.Pods',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pods', full_name='pb.Pods.pods', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=458,
serialized_end=487,
)
_PODRESOURCE_CPUENTRY = _descriptor.Descriptor(
name='CpuEntry',
full_name='pb.PodResource.CpuEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.PodResource.CpuEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.PodResource.CpuEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=689,
serialized_end=731,
)
_PODRESOURCE_MEMORYENTRY = _descriptor.Descriptor(
name='MemoryEntry',
full_name='pb.PodResource.MemoryEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.PodResource.MemoryEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.PodResource.MemoryEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=733,
serialized_end=778,
)
_PODRESOURCE_DIFFENTRY = _descriptor.Descriptor(
name='DiffEntry',
full_name='pb.PodResource.DiffEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.PodResource.DiffEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.PodResource.DiffEntry.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=780,
serialized_end=823,
)
_PODRESOURCE_DETAILENTRY = _descriptor.Descriptor(
name='DetailEntry',
full_name='pb.PodResource.DetailEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.PodResource.DetailEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.PodResource.DetailEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=825,
serialized_end=870,
)
_PODRESOURCE = _descriptor.Descriptor(
name='PodResource',
full_name='pb.PodResource',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.PodResource.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.PodResource.cpu', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.PodResource.memory', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='diff', full_name='pb.PodResource.diff', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='detail', full_name='pb.PodResource.detail', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PODRESOURCE_CPUENTRY, _PODRESOURCE_MEMORYENTRY, _PODRESOURCE_DIFFENTRY, _PODRESOURCE_DETAILENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=490,
serialized_end=870,
)
_LISTNETWORKOPTIONS = _descriptor.Descriptor(
name='ListNetworkOptions',
full_name='pb.ListNetworkOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.ListNetworkOptions.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='driver', full_name='pb.ListNetworkOptions.driver', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=872,
serialized_end=925,
)
_NETWORK = _descriptor.Descriptor(
name='Network',
full_name='pb.Network',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.Network.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subnets', full_name='pb.Network.subnets', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=927,
serialized_end=967,
)
_NETWORKS = _descriptor.Descriptor(
name='Networks',
full_name='pb.Networks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='networks', full_name='pb.Networks.networks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=969,
serialized_end=1010,
)
_NODE_CPUENTRY = _descriptor.Descriptor(
name='CpuEntry',
full_name='pb.Node.CpuEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Node.CpuEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Node.CpuEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1290,
serialized_end=1332,
)
_NODE_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.Node.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Node.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Node.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_NODE_INITCPUENTRY = _descriptor.Descriptor(
name='InitCpuEntry',
full_name='pb.Node.InitCpuEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Node.InitCpuEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Node.InitCpuEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1381,
serialized_end=1427,
)
_NODE = _descriptor.Descriptor(
name='Node',
full_name='pb.Node',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.Node.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='endpoint', full_name='pb.Node.endpoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.Node.podname', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.Node.cpu', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu_used', full_name='pb.Node.cpu_used', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.Node.memory', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory_used', full_name='pb.Node.memory_used', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available', full_name='pb.Node.available', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.Node.labels', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='init_memory', full_name='pb.Node.init_memory', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='init_cpu', full_name='pb.Node.init_cpu', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='info', full_name='pb.Node.info', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_NODE_CPUENTRY, _NODE_LABELSENTRY, _NODE_INITCPUENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1013,
serialized_end=1427,
)
_NODES = _descriptor.Descriptor(
name='Nodes',
full_name='pb.Nodes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodes', full_name='pb.Nodes.nodes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1429,
serialized_end=1461,
)
_NODEAVAILABLE = _descriptor.Descriptor(
name='NodeAvailable',
full_name='pb.NodeAvailable',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.NodeAvailable.nodename', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.NodeAvailable.podname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='available', full_name='pb.NodeAvailable.available', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1463,
serialized_end=1532,
)
_CONTAINER_CPUENTRY = _descriptor.Descriptor(
name='CpuEntry',
full_name='pb.Container.CpuEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Container.CpuEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Container.CpuEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1290,
serialized_end=1332,
)
_CONTAINER_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.Container.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Container.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Container.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_CONTAINER_PUBLISHENTRY = _descriptor.Descriptor(
name='PublishEntry',
full_name='pb.Container.PublishEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Container.PublishEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Container.PublishEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1929,
serialized_end=1975,
)
_CONTAINER = _descriptor.Descriptor(
name='Container',
full_name='pb.Container',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.Container.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.Container.podname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.Container.nodename', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='pb.Container.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.Container.cpu', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quota', full_name='pb.Container.quota', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.Container.memory', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='privileged', full_name='pb.Container.privileged', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.Container.labels', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publish', full_name='pb.Container.publish', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='image', full_name='pb.Container.image', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inspect', full_name='pb.Container.inspect', index=11,
number=12, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status_data', full_name='pb.Container.status_data', index=12,
number=13, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CONTAINER_CPUENTRY, _CONTAINER_LABELSENTRY, _CONTAINER_PUBLISHENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1535,
serialized_end=1975,
)
_CONTAINERDEPLOYEDOPTIONS = _descriptor.Descriptor(
name='ContainerDeployedOptions',
full_name='pb.ContainerDeployedOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.ContainerDeployedOptions.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='appname', full_name='pb.ContainerDeployedOptions.appname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entrypoint', full_name='pb.ContainerDeployedOptions.entrypoint', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.ContainerDeployedOptions.nodename', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.ContainerDeployedOptions.data', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1977,
serialized_end=2084,
)
_CONTAINERS = _descriptor.Descriptor(
name='Containers',
full_name='pb.Containers',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='containers', full_name='pb.Containers.containers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2086,
serialized_end=2133,
)
_CONTAINERID = _descriptor.Descriptor(
name='ContainerID',
full_name='pb.ContainerID',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.ContainerID.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2135,
serialized_end=2160,
)
_CONTAINERIDS = _descriptor.Descriptor(
name='ContainerIDs',
full_name='pb.ContainerIDs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ids', full_name='pb.ContainerIDs.ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2162,
serialized_end=2189,
)
_REMOVECONTAINEROPTIONS = _descriptor.Descriptor(
name='RemoveContainerOptions',
full_name='pb.RemoveContainerOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ids', full_name='pb.RemoveContainerOptions.ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='force', full_name='pb.RemoveContainerOptions.force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2191,
serialized_end=2243,
)
_REALLOCOPTIONS = _descriptor.Descriptor(
name='ReallocOptions',
full_name='pb.ReallocOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ids', full_name='pb.ReallocOptions.ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.ReallocOptions.cpu', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mem', full_name='pb.ReallocOptions.mem', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2245,
serialized_end=2300,
)
_ADDPODOPTIONS = _descriptor.Descriptor(
name='AddPodOptions',
full_name='pb.AddPodOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.AddPodOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favor', full_name='pb.AddPodOptions.favor', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='desc', full_name='pb.AddPodOptions.desc', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2302,
serialized_end=2360,
)
_REMOVEPODOPTIONS = _descriptor.Descriptor(
name='RemovePodOptions',
full_name='pb.RemovePodOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.RemovePodOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2362,
serialized_end=2394,
)
_GETPODOPTIONS = _descriptor.Descriptor(
name='GetPodOptions',
full_name='pb.GetPodOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.GetPodOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2396,
serialized_end=2425,
)
_ADDNODEOPTIONS_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.AddNodeOptions.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.AddNodeOptions.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.AddNodeOptions.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_ADDNODEOPTIONS = _descriptor.Descriptor(
name='AddNodeOptions',
full_name='pb.AddNodeOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.AddNodeOptions.nodename', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='endpoint', full_name='pb.AddNodeOptions.endpoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.AddNodeOptions.podname', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ca', full_name='pb.AddNodeOptions.ca', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cert', full_name='pb.AddNodeOptions.cert', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key', full_name='pb.AddNodeOptions.key', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.AddNodeOptions.cpu', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='share', full_name='pb.AddNodeOptions.share', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.AddNodeOptions.memory', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.AddNodeOptions.labels', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ADDNODEOPTIONS_LABELSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2428,
serialized_end=2675,
)
_REMOVENODEOPTIONS = _descriptor.Descriptor(
name='RemoveNodeOptions',
full_name='pb.RemoveNodeOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.RemoveNodeOptions.nodename', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.RemoveNodeOptions.podname', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2677,
serialized_end=2731,
)
_GETNODEOPTIONS = _descriptor.Descriptor(
name='GetNodeOptions',
full_name='pb.GetNodeOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.GetNodeOptions.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.GetNodeOptions.nodename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2733,
serialized_end=2784,
)
_LISTNODESOPTIONS = _descriptor.Descriptor(
name='ListNodesOptions',
full_name='pb.ListNodesOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.ListNodesOptions.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='all', full_name='pb.ListNodesOptions.all', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2786,
serialized_end=2834,
)
_BUILD_ENVSENTRY = _descriptor.Descriptor(
name='EnvsEntry',
full_name='pb.Build.EnvsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Build.EnvsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Build.EnvsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3132,
serialized_end=3175,
)
_BUILD_ARGSENTRY = _descriptor.Descriptor(
name='ArgsEntry',
full_name='pb.Build.ArgsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Build.ArgsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Build.ArgsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3177,
serialized_end=3220,
)
_BUILD_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.Build.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Build.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Build.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_BUILD_ARTIFACTSENTRY = _descriptor.Descriptor(
name='ArtifactsEntry',
full_name='pb.Build.ArtifactsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Build.ArtifactsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Build.ArtifactsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3269,
serialized_end=3317,
)
_BUILD_CACHEENTRY = _descriptor.Descriptor(
name='CacheEntry',
full_name='pb.Build.CacheEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Build.CacheEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Build.CacheEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3319,
serialized_end=3363,
)
_BUILD = _descriptor.Descriptor(
name='Build',
full_name='pb.Build',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='pb.Build.base', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='repo', full_name='pb.Build.repo', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='pb.Build.version', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dir', full_name='pb.Build.dir', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='submodule', full_name='pb.Build.submodule', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commands', full_name='pb.Build.commands', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='envs', full_name='pb.Build.envs', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='args', full_name='pb.Build.args', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.Build.labels', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='artifacts', full_name='pb.Build.artifacts', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cache', full_name='pb.Build.cache', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_BUILD_ENVSENTRY, _BUILD_ARGSENTRY, _BUILD_LABELSENTRY, _BUILD_ARTIFACTSENTRY, _BUILD_CACHEENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2837,
serialized_end=3363,
)
_BUILDS_BUILDSENTRY = _descriptor.Descriptor(
name='BuildsEntry',
full_name='pb.Builds.BuildsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.Builds.BuildsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.Builds.BuildsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3431,
serialized_end=3487,
)
_BUILDS = _descriptor.Descriptor(
name='Builds',
full_name='pb.Builds',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='stages', full_name='pb.Builds.stages', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='builds', full_name='pb.Builds.builds', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_BUILDS_BUILDSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3365,
serialized_end=3487,
)
_BUILDIMAGEOPTIONS = _descriptor.Descriptor(
name='BuildImageOptions',
full_name='pb.BuildImageOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.BuildImageOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user', full_name='pb.BuildImageOptions.user', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uid', full_name='pb.BuildImageOptions.uid', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tags', full_name='pb.BuildImageOptions.tags', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='builds', full_name='pb.BuildImageOptions.builds', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tar', full_name='pb.BuildImageOptions.tar', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3489,
serialized_end=3604,
)
_HOOKOPTIONS = _descriptor.Descriptor(
name='HookOptions',
full_name='pb.HookOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='after_start', full_name='pb.HookOptions.after_start', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='before_stop', full_name='pb.HookOptions.before_stop', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='force', full_name='pb.HookOptions.force', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3606,
serialized_end=3676,
)
_HEALTHCHECKOPTIONS = _descriptor.Descriptor(
name='HealthCheckOptions',
full_name='pb.HealthCheckOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tcp_ports', full_name='pb.HealthCheckOptions.tcp_ports', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='http_port', full_name='pb.HealthCheckOptions.http_port', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url', full_name='pb.HealthCheckOptions.url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='code', full_name='pb.HealthCheckOptions.code', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3678,
serialized_end=3763,
)
_LOGOPTIONS_CONFIGENTRY = _descriptor.Descriptor(
name='ConfigEntry',
full_name='pb.LogOptions.ConfigEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.LogOptions.ConfigEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.LogOptions.ConfigEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3837,
serialized_end=3882,
)
_LOGOPTIONS = _descriptor.Descriptor(
name='LogOptions',
full_name='pb.LogOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='pb.LogOptions.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='config', full_name='pb.LogOptions.config', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_LOGOPTIONS_CONFIGENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3765,
serialized_end=3882,
)
_ENTRYPOINTOPTIONS_SYSCTLSENTRY = _descriptor.Descriptor(
name='SysctlsEntry',
full_name='pb.EntrypointOptions.SysctlsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.EntrypointOptions.SysctlsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.EntrypointOptions.SysctlsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4169,
serialized_end=4215,
)
_ENTRYPOINTOPTIONS = _descriptor.Descriptor(
name='EntrypointOptions',
full_name='pb.EntrypointOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.EntrypointOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='command', full_name='pb.EntrypointOptions.command', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='privileged', full_name='pb.EntrypointOptions.privileged', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dir', full_name='pb.EntrypointOptions.dir', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='log', full_name='pb.EntrypointOptions.log', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publish', full_name='pb.EntrypointOptions.publish', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='healthcheck', full_name='pb.EntrypointOptions.healthcheck', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hook', full_name='pb.EntrypointOptions.hook', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='restart_policy', full_name='pb.EntrypointOptions.restart_policy', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sysctls', full_name='pb.EntrypointOptions.sysctls', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ENTRYPOINTOPTIONS_SYSCTLSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3885,
serialized_end=4215,
)
_DEPLOYOPTIONS_NETWORKSENTRY = _descriptor.Descriptor(
name='NetworksEntry',
full_name='pb.DeployOptions.NetworksEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.DeployOptions.NetworksEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.DeployOptions.NetworksEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4804,
serialized_end=4851,
)
_DEPLOYOPTIONS_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='pb.DeployOptions.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.DeployOptions.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.DeployOptions.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=163,
serialized_end=208,
)
_DEPLOYOPTIONS_NODELABELSENTRY = _descriptor.Descriptor(
name='NodelabelsEntry',
full_name='pb.DeployOptions.NodelabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.DeployOptions.NodelabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.DeployOptions.NodelabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4900,
serialized_end=4949,
)
_DEPLOYOPTIONS_DATAENTRY = _descriptor.Descriptor(
name='DataEntry',
full_name='pb.DeployOptions.DataEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.DeployOptions.DataEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.DeployOptions.DataEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4951,
serialized_end=4994,
)
_DEPLOYOPTIONS = _descriptor.Descriptor(
name='DeployOptions',
full_name='pb.DeployOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='pb.DeployOptions.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='entrypoint', full_name='pb.DeployOptions.entrypoint', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podname', full_name='pb.DeployOptions.podname', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.DeployOptions.nodename', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='image', full_name='pb.DeployOptions.image', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra_args', full_name='pb.DeployOptions.extra_args', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu_quota', full_name='pb.DeployOptions.cpu_quota', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.DeployOptions.memory', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='count', full_name='pb.DeployOptions.count', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='env', full_name='pb.DeployOptions.env', index=9,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dns', full_name='pb.DeployOptions.dns', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra_hosts', full_name='pb.DeployOptions.extra_hosts', index=11,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volumes', full_name='pb.DeployOptions.volumes', index=12,
number=13, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='networks', full_name='pb.DeployOptions.networks', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='networkmode', full_name='pb.DeployOptions.networkmode', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user', full_name='pb.DeployOptions.user', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='debug', full_name='pb.DeployOptions.debug', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='openStdin', full_name='pb.DeployOptions.openStdin', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='pb.DeployOptions.labels', index=18,
number=19, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodelabels', full_name='pb.DeployOptions.nodelabels', index=19,
number=20, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deploy_method', full_name='pb.DeployOptions.deploy_method', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.DeployOptions.data', index=21,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='softlimit', full_name='pb.DeployOptions.softlimit', index=22,
number=23, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodes_limit', full_name='pb.DeployOptions.nodes_limit', index=23,
number=24, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DEPLOYOPTIONS_NETWORKSENTRY, _DEPLOYOPTIONS_LABELSENTRY, _DEPLOYOPTIONS_NODELABELSENTRY, _DEPLOYOPTIONS_DATAENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4218,
serialized_end=4994,
)
_REPLACEOPTIONS_FILTERLABELSENTRY = _descriptor.Descriptor(
name='FilterLabelsEntry',
full_name='pb.ReplaceOptions.FilterLabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.ReplaceOptions.FilterLabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.ReplaceOptions.FilterLabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5210,
serialized_end=5261,
)
_REPLACEOPTIONS_COPYENTRY = _descriptor.Descriptor(
name='CopyEntry',
full_name='pb.ReplaceOptions.CopyEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.ReplaceOptions.CopyEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.ReplaceOptions.CopyEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5263,
serialized_end=5306,
)
_REPLACEOPTIONS = _descriptor.Descriptor(
name='ReplaceOptions',
full_name='pb.ReplaceOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='deployOpt', full_name='pb.ReplaceOptions.deployOpt', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='force', full_name='pb.ReplaceOptions.force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='filter_labels', full_name='pb.ReplaceOptions.filter_labels', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='copy', full_name='pb.ReplaceOptions.copy', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='pb.ReplaceOptions.ids', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='networkinherit', full_name='pb.ReplaceOptions.networkinherit', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_REPLACEOPTIONS_FILTERLABELSENTRY, _REPLACEOPTIONS_COPYENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4997,
serialized_end=5306,
)
_CACHEIMAGEOPTIONS = _descriptor.Descriptor(
name='CacheImageOptions',
full_name='pb.CacheImageOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.CacheImageOptions.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.CacheImageOptions.nodename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='images', full_name='pb.CacheImageOptions.images', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='step', full_name='pb.CacheImageOptions.step', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5308,
serialized_end=5392,
)
_REMOVEIMAGEOPTIONS = _descriptor.Descriptor(
name='RemoveImageOptions',
full_name='pb.RemoveImageOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.RemoveImageOptions.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.RemoveImageOptions.nodename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='images', full_name='pb.RemoveImageOptions.images', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='step', full_name='pb.RemoveImageOptions.step', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='prune', full_name='pb.RemoveImageOptions.prune', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5394,
serialized_end=5494,
)
_COPYPATHS = _descriptor.Descriptor(
name='CopyPaths',
full_name='pb.CopyPaths',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='paths', full_name='pb.CopyPaths.paths', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5496,
serialized_end=5522,
)
_COPYOPTIONS_TARGETSENTRY = _descriptor.Descriptor(
name='TargetsEntry',
full_name='pb.CopyOptions.TargetsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.CopyOptions.TargetsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.CopyOptions.TargetsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5586,
serialized_end=5647,
)
_COPYOPTIONS = _descriptor.Descriptor(
name='CopyOptions',
full_name='pb.CopyOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='targets', full_name='pb.CopyOptions.targets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_COPYOPTIONS_TARGETSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5524,
serialized_end=5647,
)
_ERRORDETAIL = _descriptor.Descriptor(
name='ErrorDetail',
full_name='pb.ErrorDetail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='pb.ErrorDetail.code', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='pb.ErrorDetail.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5649,
serialized_end=5693,
)
_BUILDIMAGEMESSAGE = _descriptor.Descriptor(
name='BuildImageMessage',
full_name='pb.BuildImageMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.BuildImageMessage.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='pb.BuildImageMessage.status', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='progress', full_name='pb.BuildImageMessage.progress', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='pb.BuildImageMessage.error', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stream', full_name='pb.BuildImageMessage.stream', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error_detail', full_name='pb.BuildImageMessage.error_detail', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5696,
serialized_end=5831,
)
_CREATECONTAINERMESSAGE_CPUENTRY = _descriptor.Descriptor(
name='CpuEntry',
full_name='pb.CreateContainerMessage.CpuEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.CreateContainerMessage.CpuEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.CreateContainerMessage.CpuEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1290,
serialized_end=1332,
)
_CREATECONTAINERMESSAGE_PUBLISHENTRY = _descriptor.Descriptor(
name='PublishEntry',
full_name='pb.CreateContainerMessage.PublishEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pb.CreateContainerMessage.PublishEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pb.CreateContainerMessage.PublishEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1929,
serialized_end=1975,
)
_CREATECONTAINERMESSAGE = _descriptor.Descriptor(
name='CreateContainerMessage',
full_name='pb.CreateContainerMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='podname', full_name='pb.CreateContainerMessage.podname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.CreateContainerMessage.nodename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='pb.CreateContainerMessage.id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='pb.CreateContainerMessage.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='pb.CreateContainerMessage.error', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='pb.CreateContainerMessage.success', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cpu', full_name='pb.CreateContainerMessage.cpu', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='quota', full_name='pb.CreateContainerMessage.quota', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='pb.CreateContainerMessage.memory', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publish', full_name='pb.CreateContainerMessage.publish', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hook', full_name='pb.CreateContainerMessage.hook', index=10,
number=11, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CREATECONTAINERMESSAGE_CPUENTRY, _CREATECONTAINERMESSAGE_PUBLISHENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5834,
serialized_end=6196,
)
_REPLACECONTAINERMESSAGE = _descriptor.Descriptor(
name='ReplaceContainerMessage',
full_name='pb.ReplaceContainerMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='create', full_name='pb.ReplaceContainerMessage.create', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remove', full_name='pb.ReplaceContainerMessage.remove', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='pb.ReplaceContainerMessage.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6199,
serialized_end=6327,
)
_RUNANDWAITMESSAGE = _descriptor.Descriptor(
name='RunAndWaitMessage',
full_name='pb.RunAndWaitMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='container_id', full_name='pb.RunAndWaitMessage.container_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.RunAndWaitMessage.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6329,
serialized_end=6384,
)
_CACHEIMAGEMESSAGE = _descriptor.Descriptor(
name='CacheImageMessage',
full_name='pb.CacheImageMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='image', full_name='pb.CacheImageMessage.image', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='pb.CacheImageMessage.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nodename', full_name='pb.CacheImageMessage.nodename', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='pb.CacheImageMessage.message', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6386,
serialized_end=6472,
)
_REMOVEIMAGEMESSAGE = _descriptor.Descriptor(
name='RemoveImageMessage',
full_name='pb.RemoveImageMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='image', full_name='pb.RemoveImageMessage.image', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='pb.RemoveImageMessage.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='messages', full_name='pb.RemoveImageMessage.messages', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6474,
serialized_end=6544,
)
_REMOVECONTAINERMESSAGE = _descriptor.Descriptor(
name='RemoveContainerMessage',
full_name='pb.RemoveContainerMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.RemoveContainerMessage.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='pb.RemoveContainerMessage.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hook', full_name='pb.RemoveContainerMessage.hook', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6546,
serialized_end=6613,
)
_REALLOCRESOURCEMESSAGE = _descriptor.Descriptor(
name='ReallocResourceMessage',
full_name='pb.ReallocResourceMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.ReallocResourceMessage.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='pb.ReallocResourceMessage.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6615,
serialized_end=6668,
)
_COPYMESSAGE = _descriptor.Descriptor(
name='CopyMessage',
full_name='pb.CopyMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.CopyMessage.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='pb.CopyMessage.status', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='pb.CopyMessage.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='pb.CopyMessage.path', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='pb.CopyMessage.error', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='pb.CopyMessage.data', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6670,
serialized_end=6768,
)
_RUNANDWAITOPTIONS = _descriptor.Descriptor(
name='RunAndWaitOptions',
full_name='pb.RunAndWaitOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='DeployOptions', full_name='pb.RunAndWaitOptions.DeployOptions', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='Cmd', full_name='pb.RunAndWaitOptions.Cmd', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6770,
serialized_end=6844,
)
_CONTROLCONTAINEROPTIONS = _descriptor.Descriptor(
name='ControlContainerOptions',
full_name='pb.ControlContainerOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ids', full_name='pb.ControlContainerOptions.ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='pb.ControlContainerOptions.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6846,
serialized_end=6898,
)
_CONTROLCONTAINERMESSAGE = _descriptor.Descriptor(
name='ControlContainerMessage',
full_name='pb.ControlContainerMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pb.ControlContainerMessage.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='pb.ControlContainerMessage.error', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hook', full_name='pb.ControlContainerMessage.hook', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6900,
serialized_end=6966,
)
_LISTCONTAINERSOPTIONS_LABELSENTRY.containing_type = _LISTCONTAINERSOPTIONS
_LISTCONTAINERSOPTIONS.fields_by_name['labels'].message_type = _LISTCONTAINERSOPTIONS_LABELSENTRY
_PODS.fields_by_name['pods'].message_type = _POD
_PODRESOURCE_CPUENTRY.containing_type = _PODRESOURCE
_PODRESOURCE_MEMORYENTRY.containing_type = _PODRESOURCE
_PODRESOURCE_DIFFENTRY.containing_type = _PODRESOURCE
_PODRESOURCE_DETAILENTRY.containing_type = _PODRESOURCE
_PODRESOURCE.fields_by_name['cpu'].message_type = _PODRESOURCE_CPUENTRY
_PODRESOURCE.fields_by_name['memory'].message_type = _PODRESOURCE_MEMORYENTRY
_PODRESOURCE.fields_by_name['diff'].message_type = _PODRESOURCE_DIFFENTRY
_PODRESOURCE.fields_by_name['detail'].message_type = _PODRESOURCE_DETAILENTRY
_NETWORKS.fields_by_name['networks'].message_type = _NETWORK
_NODE_CPUENTRY.containing_type = _NODE
_NODE_LABELSENTRY.containing_type = _NODE
_NODE_INITCPUENTRY.containing_type = _NODE
_NODE.fields_by_name['cpu'].message_type = _NODE_CPUENTRY
_NODE.fields_by_name['labels'].message_type = _NODE_LABELSENTRY
_NODE.fields_by_name['init_cpu'].message_type = _NODE_INITCPUENTRY
_NODES.fields_by_name['nodes'].message_type = _NODE
_CONTAINER_CPUENTRY.containing_type = _CONTAINER
_CONTAINER_LABELSENTRY.containing_type = _CONTAINER
_CONTAINER_PUBLISHENTRY.containing_type = _CONTAINER
_CONTAINER.fields_by_name['cpu'].message_type = _CONTAINER_CPUENTRY
_CONTAINER.fields_by_name['labels'].message_type = _CONTAINER_LABELSENTRY
_CONTAINER.fields_by_name['publish'].message_type = _CONTAINER_PUBLISHENTRY
_CONTAINERS.fields_by_name['containers'].message_type = _CONTAINER
_ADDNODEOPTIONS_LABELSENTRY.containing_type = _ADDNODEOPTIONS
_ADDNODEOPTIONS.fields_by_name['labels'].message_type = _ADDNODEOPTIONS_LABELSENTRY
_BUILD_ENVSENTRY.containing_type = _BUILD
_BUILD_ARGSENTRY.containing_type = _BUILD
_BUILD_LABELSENTRY.containing_type = _BUILD
_BUILD_ARTIFACTSENTRY.containing_type = _BUILD
_BUILD_CACHEENTRY.containing_type = _BUILD
_BUILD.fields_by_name['envs'].message_type = _BUILD_ENVSENTRY
_BUILD.fields_by_name['args'].message_type = _BUILD_ARGSENTRY
_BUILD.fields_by_name['labels'].message_type = _BUILD_LABELSENTRY
_BUILD.fields_by_name['artifacts'].message_type = _BUILD_ARTIFACTSENTRY
_BUILD.fields_by_name['cache'].message_type = _BUILD_CACHEENTRY
_BUILDS_BUILDSENTRY.fields_by_name['value'].message_type = _BUILD
_BUILDS_BUILDSENTRY.containing_type = _BUILDS
_BUILDS.fields_by_name['builds'].message_type = _BUILDS_BUILDSENTRY
_BUILDIMAGEOPTIONS.fields_by_name['builds'].message_type = _BUILDS
_LOGOPTIONS_CONFIGENTRY.containing_type = _LOGOPTIONS
_LOGOPTIONS.fields_by_name['config'].message_type = _LOGOPTIONS_CONFIGENTRY
_ENTRYPOINTOPTIONS_SYSCTLSENTRY.containing_type = _ENTRYPOINTOPTIONS
_ENTRYPOINTOPTIONS.fields_by_name['log'].message_type = _LOGOPTIONS
_ENTRYPOINTOPTIONS.fields_by_name['healthcheck'].message_type = _HEALTHCHECKOPTIONS
_ENTRYPOINTOPTIONS.fields_by_name['hook'].message_type = _HOOKOPTIONS
_ENTRYPOINTOPTIONS.fields_by_name['sysctls'].message_type = _ENTRYPOINTOPTIONS_SYSCTLSENTRY
_DEPLOYOPTIONS_NETWORKSENTRY.containing_type = _DEPLOYOPTIONS
_DEPLOYOPTIONS_LABELSENTRY.containing_type = _DEPLOYOPTIONS
_DEPLOYOPTIONS_NODELABELSENTRY.containing_type = _DEPLOYOPTIONS
_DEPLOYOPTIONS_DATAENTRY.containing_type = _DEPLOYOPTIONS
_DEPLOYOPTIONS.fields_by_name['entrypoint'].message_type = _ENTRYPOINTOPTIONS
_DEPLOYOPTIONS.fields_by_name['networks'].message_type = _DEPLOYOPTIONS_NETWORKSENTRY
_DEPLOYOPTIONS.fields_by_name['labels'].message_type = _DEPLOYOPTIONS_LABELSENTRY
_DEPLOYOPTIONS.fields_by_name['nodelabels'].message_type = _DEPLOYOPTIONS_NODELABELSENTRY
_DEPLOYOPTIONS.fields_by_name['data'].message_type = _DEPLOYOPTIONS_DATAENTRY
_REPLACEOPTIONS_FILTERLABELSENTRY.containing_type = _REPLACEOPTIONS
_REPLACEOPTIONS_COPYENTRY.containing_type = _REPLACEOPTIONS
_REPLACEOPTIONS.fields_by_name['deployOpt'].message_type = _DEPLOYOPTIONS
_REPLACEOPTIONS.fields_by_name['filter_labels'].message_type = _REPLACEOPTIONS_FILTERLABELSENTRY
_REPLACEOPTIONS.fields_by_name['copy'].message_type = _REPLACEOPTIONS_COPYENTRY
_COPYOPTIONS_TARGETSENTRY.fields_by_name['value'].message_type = _COPYPATHS
_COPYOPTIONS_TARGETSENTRY.containing_type = _COPYOPTIONS
_COPYOPTIONS.fields_by_name['targets'].message_type = _COPYOPTIONS_TARGETSENTRY
_BUILDIMAGEMESSAGE.fields_by_name['error_detail'].message_type = _ERRORDETAIL
_CREATECONTAINERMESSAGE_CPUENTRY.containing_type = _CREATECONTAINERMESSAGE
_CREATECONTAINERMESSAGE_PUBLISHENTRY.containing_type = _CREATECONTAINERMESSAGE
_CREATECONTAINERMESSAGE.fields_by_name['cpu'].message_type = _CREATECONTAINERMESSAGE_CPUENTRY
_CREATECONTAINERMESSAGE.fields_by_name['publish'].message_type = _CREATECONTAINERMESSAGE_PUBLISHENTRY
_REPLACECONTAINERMESSAGE.fields_by_name['create'].message_type = _CREATECONTAINERMESSAGE
_REPLACECONTAINERMESSAGE.fields_by_name['remove'].message_type = _REMOVECONTAINERMESSAGE
_RUNANDWAITOPTIONS.fields_by_name['DeployOptions'].message_type = _DEPLOYOPTIONS
DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
DESCRIPTOR.message_types_by_name['ListContainersOptions'] = _LISTCONTAINERSOPTIONS
DESCRIPTOR.message_types_by_name['DeployStatusOptions'] = _DEPLOYSTATUSOPTIONS
DESCRIPTOR.message_types_by_name['DeployStatusMessage'] = _DEPLOYSTATUSMESSAGE
DESCRIPTOR.message_types_by_name['Pod'] = _POD
DESCRIPTOR.message_types_by_name['Pods'] = _PODS
DESCRIPTOR.message_types_by_name['PodResource'] = _PODRESOURCE
DESCRIPTOR.message_types_by_name['ListNetworkOptions'] = _LISTNETWORKOPTIONS
DESCRIPTOR.message_types_by_name['Network'] = _NETWORK
DESCRIPTOR.message_types_by_name['Networks'] = _NETWORKS
DESCRIPTOR.message_types_by_name['Node'] = _NODE
DESCRIPTOR.message_types_by_name['Nodes'] = _NODES
DESCRIPTOR.message_types_by_name['NodeAvailable'] = _NODEAVAILABLE
DESCRIPTOR.message_types_by_name['Container'] = _CONTAINER
DESCRIPTOR.message_types_by_name['ContainerDeployedOptions'] = _CONTAINERDEPLOYEDOPTIONS
DESCRIPTOR.message_types_by_name['Containers'] = _CONTAINERS
DESCRIPTOR.message_types_by_name['ContainerID'] = _CONTAINERID
DESCRIPTOR.message_types_by_name['ContainerIDs'] = _CONTAINERIDS
DESCRIPTOR.message_types_by_name['RemoveContainerOptions'] = _REMOVECONTAINEROPTIONS
DESCRIPTOR.message_types_by_name['ReallocOptions'] = _REALLOCOPTIONS
DESCRIPTOR.message_types_by_name['AddPodOptions'] = _ADDPODOPTIONS
DESCRIPTOR.message_types_by_name['RemovePodOptions'] = _REMOVEPODOPTIONS
DESCRIPTOR.message_types_by_name['GetPodOptions'] = _GETPODOPTIONS
DESCRIPTOR.message_types_by_name['AddNodeOptions'] = _ADDNODEOPTIONS
DESCRIPTOR.message_types_by_name['RemoveNodeOptions'] = _REMOVENODEOPTIONS
DESCRIPTOR.message_types_by_name['GetNodeOptions'] = _GETNODEOPTIONS
DESCRIPTOR.message_types_by_name['ListNodesOptions'] = _LISTNODESOPTIONS
DESCRIPTOR.message_types_by_name['Build'] = _BUILD
DESCRIPTOR.message_types_by_name['Builds'] = _BUILDS
DESCRIPTOR.message_types_by_name['BuildImageOptions'] = _BUILDIMAGEOPTIONS
DESCRIPTOR.message_types_by_name['HookOptions'] = _HOOKOPTIONS
DESCRIPTOR.message_types_by_name['HealthCheckOptions'] = _HEALTHCHECKOPTIONS
DESCRIPTOR.message_types_by_name['LogOptions'] = _LOGOPTIONS
DESCRIPTOR.message_types_by_name['EntrypointOptions'] = _ENTRYPOINTOPTIONS
DESCRIPTOR.message_types_by_name['DeployOptions'] = _DEPLOYOPTIONS
DESCRIPTOR.message_types_by_name['ReplaceOptions'] = _REPLACEOPTIONS
DESCRIPTOR.message_types_by_name['CacheImageOptions'] = _CACHEIMAGEOPTIONS
DESCRIPTOR.message_types_by_name['RemoveImageOptions'] = _REMOVEIMAGEOPTIONS
DESCRIPTOR.message_types_by_name['CopyPaths'] = _COPYPATHS
DESCRIPTOR.message_types_by_name['CopyOptions'] = _COPYOPTIONS
DESCRIPTOR.message_types_by_name['ErrorDetail'] = _ERRORDETAIL
DESCRIPTOR.message_types_by_name['BuildImageMessage'] = _BUILDIMAGEMESSAGE
DESCRIPTOR.message_types_by_name['CreateContainerMessage'] = _CREATECONTAINERMESSAGE
DESCRIPTOR.message_types_by_name['ReplaceContainerMessage'] = _REPLACECONTAINERMESSAGE
DESCRIPTOR.message_types_by_name['RunAndWaitMessage'] = _RUNANDWAITMESSAGE
DESCRIPTOR.message_types_by_name['CacheImageMessage'] = _CACHEIMAGEMESSAGE
DESCRIPTOR.message_types_by_name['RemoveImageMessage'] = _REMOVEIMAGEMESSAGE
DESCRIPTOR.message_types_by_name['RemoveContainerMessage'] = _REMOVECONTAINERMESSAGE
DESCRIPTOR.message_types_by_name['ReallocResourceMessage'] = _REALLOCRESOURCEMESSAGE
DESCRIPTOR.message_types_by_name['CopyMessage'] = _COPYMESSAGE
DESCRIPTOR.message_types_by_name['RunAndWaitOptions'] = _RUNANDWAITOPTIONS
DESCRIPTOR.message_types_by_name['ControlContainerOptions'] = _CONTROLCONTAINEROPTIONS
DESCRIPTOR.message_types_by_name['ControlContainerMessage'] = _CONTROLCONTAINERMESSAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), dict(
DESCRIPTOR = _EMPTY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Empty)
))
_sym_db.RegisterMessage(Empty)
ListContainersOptions = _reflection.GeneratedProtocolMessageType('ListContainersOptions', (_message.Message,), dict(
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _LISTCONTAINERSOPTIONS_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ListContainersOptions.LabelsEntry)
))
,
DESCRIPTOR = _LISTCONTAINERSOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ListContainersOptions)
))
_sym_db.RegisterMessage(ListContainersOptions)
_sym_db.RegisterMessage(ListContainersOptions.LabelsEntry)
DeployStatusOptions = _reflection.GeneratedProtocolMessageType('DeployStatusOptions', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYSTATUSOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployStatusOptions)
))
_sym_db.RegisterMessage(DeployStatusOptions)
DeployStatusMessage = _reflection.GeneratedProtocolMessageType('DeployStatusMessage', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYSTATUSMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployStatusMessage)
))
_sym_db.RegisterMessage(DeployStatusMessage)
Pod = _reflection.GeneratedProtocolMessageType('Pod', (_message.Message,), dict(
DESCRIPTOR = _POD,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Pod)
))
_sym_db.RegisterMessage(Pod)
Pods = _reflection.GeneratedProtocolMessageType('Pods', (_message.Message,), dict(
DESCRIPTOR = _PODS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Pods)
))
_sym_db.RegisterMessage(Pods)
PodResource = _reflection.GeneratedProtocolMessageType('PodResource', (_message.Message,), dict(
CpuEntry = _reflection.GeneratedProtocolMessageType('CpuEntry', (_message.Message,), dict(
DESCRIPTOR = _PODRESOURCE_CPUENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.PodResource.CpuEntry)
))
,
MemoryEntry = _reflection.GeneratedProtocolMessageType('MemoryEntry', (_message.Message,), dict(
DESCRIPTOR = _PODRESOURCE_MEMORYENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.PodResource.MemoryEntry)
))
,
DiffEntry = _reflection.GeneratedProtocolMessageType('DiffEntry', (_message.Message,), dict(
DESCRIPTOR = _PODRESOURCE_DIFFENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.PodResource.DiffEntry)
))
,
DetailEntry = _reflection.GeneratedProtocolMessageType('DetailEntry', (_message.Message,), dict(
DESCRIPTOR = _PODRESOURCE_DETAILENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.PodResource.DetailEntry)
))
,
DESCRIPTOR = _PODRESOURCE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.PodResource)
))
_sym_db.RegisterMessage(PodResource)
_sym_db.RegisterMessage(PodResource.CpuEntry)
_sym_db.RegisterMessage(PodResource.MemoryEntry)
_sym_db.RegisterMessage(PodResource.DiffEntry)
_sym_db.RegisterMessage(PodResource.DetailEntry)
ListNetworkOptions = _reflection.GeneratedProtocolMessageType('ListNetworkOptions', (_message.Message,), dict(
DESCRIPTOR = _LISTNETWORKOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ListNetworkOptions)
))
_sym_db.RegisterMessage(ListNetworkOptions)
Network = _reflection.GeneratedProtocolMessageType('Network', (_message.Message,), dict(
DESCRIPTOR = _NETWORK,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Network)
))
_sym_db.RegisterMessage(Network)
Networks = _reflection.GeneratedProtocolMessageType('Networks', (_message.Message,), dict(
DESCRIPTOR = _NETWORKS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Networks)
))
_sym_db.RegisterMessage(Networks)
Node = _reflection.GeneratedProtocolMessageType('Node', (_message.Message,), dict(
CpuEntry = _reflection.GeneratedProtocolMessageType('CpuEntry', (_message.Message,), dict(
DESCRIPTOR = _NODE_CPUENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Node.CpuEntry)
))
,
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _NODE_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Node.LabelsEntry)
))
,
InitCpuEntry = _reflection.GeneratedProtocolMessageType('InitCpuEntry', (_message.Message,), dict(
DESCRIPTOR = _NODE_INITCPUENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Node.InitCpuEntry)
))
,
DESCRIPTOR = _NODE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Node)
))
_sym_db.RegisterMessage(Node)
_sym_db.RegisterMessage(Node.CpuEntry)
_sym_db.RegisterMessage(Node.LabelsEntry)
_sym_db.RegisterMessage(Node.InitCpuEntry)
Nodes = _reflection.GeneratedProtocolMessageType('Nodes', (_message.Message,), dict(
DESCRIPTOR = _NODES,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Nodes)
))
_sym_db.RegisterMessage(Nodes)
NodeAvailable = _reflection.GeneratedProtocolMessageType('NodeAvailable', (_message.Message,), dict(
DESCRIPTOR = _NODEAVAILABLE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.NodeAvailable)
))
_sym_db.RegisterMessage(NodeAvailable)
Container = _reflection.GeneratedProtocolMessageType('Container', (_message.Message,), dict(
CpuEntry = _reflection.GeneratedProtocolMessageType('CpuEntry', (_message.Message,), dict(
DESCRIPTOR = _CONTAINER_CPUENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Container.CpuEntry)
))
,
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _CONTAINER_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Container.LabelsEntry)
))
,
PublishEntry = _reflection.GeneratedProtocolMessageType('PublishEntry', (_message.Message,), dict(
DESCRIPTOR = _CONTAINER_PUBLISHENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Container.PublishEntry)
))
,
DESCRIPTOR = _CONTAINER,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Container)
))
_sym_db.RegisterMessage(Container)
_sym_db.RegisterMessage(Container.CpuEntry)
_sym_db.RegisterMessage(Container.LabelsEntry)
_sym_db.RegisterMessage(Container.PublishEntry)
ContainerDeployedOptions = _reflection.GeneratedProtocolMessageType('ContainerDeployedOptions', (_message.Message,), dict(
DESCRIPTOR = _CONTAINERDEPLOYEDOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ContainerDeployedOptions)
))
_sym_db.RegisterMessage(ContainerDeployedOptions)
Containers = _reflection.GeneratedProtocolMessageType('Containers', (_message.Message,), dict(
DESCRIPTOR = _CONTAINERS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Containers)
))
_sym_db.RegisterMessage(Containers)
ContainerID = _reflection.GeneratedProtocolMessageType('ContainerID', (_message.Message,), dict(
DESCRIPTOR = _CONTAINERID,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ContainerID)
))
_sym_db.RegisterMessage(ContainerID)
ContainerIDs = _reflection.GeneratedProtocolMessageType('ContainerIDs', (_message.Message,), dict(
DESCRIPTOR = _CONTAINERIDS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ContainerIDs)
))
_sym_db.RegisterMessage(ContainerIDs)
RemoveContainerOptions = _reflection.GeneratedProtocolMessageType('RemoveContainerOptions', (_message.Message,), dict(
DESCRIPTOR = _REMOVECONTAINEROPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemoveContainerOptions)
))
_sym_db.RegisterMessage(RemoveContainerOptions)
ReallocOptions = _reflection.GeneratedProtocolMessageType('ReallocOptions', (_message.Message,), dict(
DESCRIPTOR = _REALLOCOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReallocOptions)
))
_sym_db.RegisterMessage(ReallocOptions)
AddPodOptions = _reflection.GeneratedProtocolMessageType('AddPodOptions', (_message.Message,), dict(
DESCRIPTOR = _ADDPODOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.AddPodOptions)
))
_sym_db.RegisterMessage(AddPodOptions)
RemovePodOptions = _reflection.GeneratedProtocolMessageType('RemovePodOptions', (_message.Message,), dict(
DESCRIPTOR = _REMOVEPODOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemovePodOptions)
))
_sym_db.RegisterMessage(RemovePodOptions)
GetPodOptions = _reflection.GeneratedProtocolMessageType('GetPodOptions', (_message.Message,), dict(
DESCRIPTOR = _GETPODOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.GetPodOptions)
))
_sym_db.RegisterMessage(GetPodOptions)
AddNodeOptions = _reflection.GeneratedProtocolMessageType('AddNodeOptions', (_message.Message,), dict(
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _ADDNODEOPTIONS_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.AddNodeOptions.LabelsEntry)
))
,
DESCRIPTOR = _ADDNODEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.AddNodeOptions)
))
_sym_db.RegisterMessage(AddNodeOptions)
_sym_db.RegisterMessage(AddNodeOptions.LabelsEntry)
RemoveNodeOptions = _reflection.GeneratedProtocolMessageType('RemoveNodeOptions', (_message.Message,), dict(
DESCRIPTOR = _REMOVENODEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemoveNodeOptions)
))
_sym_db.RegisterMessage(RemoveNodeOptions)
GetNodeOptions = _reflection.GeneratedProtocolMessageType('GetNodeOptions', (_message.Message,), dict(
DESCRIPTOR = _GETNODEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.GetNodeOptions)
))
_sym_db.RegisterMessage(GetNodeOptions)
ListNodesOptions = _reflection.GeneratedProtocolMessageType('ListNodesOptions', (_message.Message,), dict(
DESCRIPTOR = _LISTNODESOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ListNodesOptions)
))
_sym_db.RegisterMessage(ListNodesOptions)
Build = _reflection.GeneratedProtocolMessageType('Build', (_message.Message,), dict(
EnvsEntry = _reflection.GeneratedProtocolMessageType('EnvsEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILD_ENVSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build.EnvsEntry)
))
,
ArgsEntry = _reflection.GeneratedProtocolMessageType('ArgsEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILD_ARGSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build.ArgsEntry)
))
,
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILD_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build.LabelsEntry)
))
,
ArtifactsEntry = _reflection.GeneratedProtocolMessageType('ArtifactsEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILD_ARTIFACTSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build.ArtifactsEntry)
))
,
CacheEntry = _reflection.GeneratedProtocolMessageType('CacheEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILD_CACHEENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build.CacheEntry)
))
,
DESCRIPTOR = _BUILD,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Build)
))
_sym_db.RegisterMessage(Build)
_sym_db.RegisterMessage(Build.EnvsEntry)
_sym_db.RegisterMessage(Build.ArgsEntry)
_sym_db.RegisterMessage(Build.LabelsEntry)
_sym_db.RegisterMessage(Build.ArtifactsEntry)
_sym_db.RegisterMessage(Build.CacheEntry)
Builds = _reflection.GeneratedProtocolMessageType('Builds', (_message.Message,), dict(
BuildsEntry = _reflection.GeneratedProtocolMessageType('BuildsEntry', (_message.Message,), dict(
DESCRIPTOR = _BUILDS_BUILDSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Builds.BuildsEntry)
))
,
DESCRIPTOR = _BUILDS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.Builds)
))
_sym_db.RegisterMessage(Builds)
_sym_db.RegisterMessage(Builds.BuildsEntry)
BuildImageOptions = _reflection.GeneratedProtocolMessageType('BuildImageOptions', (_message.Message,), dict(
DESCRIPTOR = _BUILDIMAGEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.BuildImageOptions)
))
_sym_db.RegisterMessage(BuildImageOptions)
HookOptions = _reflection.GeneratedProtocolMessageType('HookOptions', (_message.Message,), dict(
DESCRIPTOR = _HOOKOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.HookOptions)
))
_sym_db.RegisterMessage(HookOptions)
HealthCheckOptions = _reflection.GeneratedProtocolMessageType('HealthCheckOptions', (_message.Message,), dict(
DESCRIPTOR = _HEALTHCHECKOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.HealthCheckOptions)
))
_sym_db.RegisterMessage(HealthCheckOptions)
LogOptions = _reflection.GeneratedProtocolMessageType('LogOptions', (_message.Message,), dict(
ConfigEntry = _reflection.GeneratedProtocolMessageType('ConfigEntry', (_message.Message,), dict(
DESCRIPTOR = _LOGOPTIONS_CONFIGENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.LogOptions.ConfigEntry)
))
,
DESCRIPTOR = _LOGOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.LogOptions)
))
_sym_db.RegisterMessage(LogOptions)
_sym_db.RegisterMessage(LogOptions.ConfigEntry)
EntrypointOptions = _reflection.GeneratedProtocolMessageType('EntrypointOptions', (_message.Message,), dict(
SysctlsEntry = _reflection.GeneratedProtocolMessageType('SysctlsEntry', (_message.Message,), dict(
DESCRIPTOR = _ENTRYPOINTOPTIONS_SYSCTLSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.EntrypointOptions.SysctlsEntry)
))
,
DESCRIPTOR = _ENTRYPOINTOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.EntrypointOptions)
))
_sym_db.RegisterMessage(EntrypointOptions)
_sym_db.RegisterMessage(EntrypointOptions.SysctlsEntry)
DeployOptions = _reflection.GeneratedProtocolMessageType('DeployOptions', (_message.Message,), dict(
NetworksEntry = _reflection.GeneratedProtocolMessageType('NetworksEntry', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYOPTIONS_NETWORKSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployOptions.NetworksEntry)
))
,
LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYOPTIONS_LABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployOptions.LabelsEntry)
))
,
NodelabelsEntry = _reflection.GeneratedProtocolMessageType('NodelabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYOPTIONS_NODELABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployOptions.NodelabelsEntry)
))
,
DataEntry = _reflection.GeneratedProtocolMessageType('DataEntry', (_message.Message,), dict(
DESCRIPTOR = _DEPLOYOPTIONS_DATAENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployOptions.DataEntry)
))
,
DESCRIPTOR = _DEPLOYOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.DeployOptions)
))
_sym_db.RegisterMessage(DeployOptions)
_sym_db.RegisterMessage(DeployOptions.NetworksEntry)
_sym_db.RegisterMessage(DeployOptions.LabelsEntry)
_sym_db.RegisterMessage(DeployOptions.NodelabelsEntry)
_sym_db.RegisterMessage(DeployOptions.DataEntry)
ReplaceOptions = _reflection.GeneratedProtocolMessageType('ReplaceOptions', (_message.Message,), dict(
FilterLabelsEntry = _reflection.GeneratedProtocolMessageType('FilterLabelsEntry', (_message.Message,), dict(
DESCRIPTOR = _REPLACEOPTIONS_FILTERLABELSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReplaceOptions.FilterLabelsEntry)
))
,
CopyEntry = _reflection.GeneratedProtocolMessageType('CopyEntry', (_message.Message,), dict(
DESCRIPTOR = _REPLACEOPTIONS_COPYENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReplaceOptions.CopyEntry)
))
,
DESCRIPTOR = _REPLACEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReplaceOptions)
))
_sym_db.RegisterMessage(ReplaceOptions)
_sym_db.RegisterMessage(ReplaceOptions.FilterLabelsEntry)
_sym_db.RegisterMessage(ReplaceOptions.CopyEntry)
CacheImageOptions = _reflection.GeneratedProtocolMessageType('CacheImageOptions', (_message.Message,), dict(
DESCRIPTOR = _CACHEIMAGEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CacheImageOptions)
))
_sym_db.RegisterMessage(CacheImageOptions)
RemoveImageOptions = _reflection.GeneratedProtocolMessageType('RemoveImageOptions', (_message.Message,), dict(
DESCRIPTOR = _REMOVEIMAGEOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemoveImageOptions)
))
_sym_db.RegisterMessage(RemoveImageOptions)
CopyPaths = _reflection.GeneratedProtocolMessageType('CopyPaths', (_message.Message,), dict(
DESCRIPTOR = _COPYPATHS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CopyPaths)
))
_sym_db.RegisterMessage(CopyPaths)
CopyOptions = _reflection.GeneratedProtocolMessageType('CopyOptions', (_message.Message,), dict(
TargetsEntry = _reflection.GeneratedProtocolMessageType('TargetsEntry', (_message.Message,), dict(
DESCRIPTOR = _COPYOPTIONS_TARGETSENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CopyOptions.TargetsEntry)
))
,
DESCRIPTOR = _COPYOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CopyOptions)
))
_sym_db.RegisterMessage(CopyOptions)
_sym_db.RegisterMessage(CopyOptions.TargetsEntry)
ErrorDetail = _reflection.GeneratedProtocolMessageType('ErrorDetail', (_message.Message,), dict(
DESCRIPTOR = _ERRORDETAIL,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ErrorDetail)
))
_sym_db.RegisterMessage(ErrorDetail)
BuildImageMessage = _reflection.GeneratedProtocolMessageType('BuildImageMessage', (_message.Message,), dict(
DESCRIPTOR = _BUILDIMAGEMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.BuildImageMessage)
))
_sym_db.RegisterMessage(BuildImageMessage)
CreateContainerMessage = _reflection.GeneratedProtocolMessageType('CreateContainerMessage', (_message.Message,), dict(
CpuEntry = _reflection.GeneratedProtocolMessageType('CpuEntry', (_message.Message,), dict(
DESCRIPTOR = _CREATECONTAINERMESSAGE_CPUENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CreateContainerMessage.CpuEntry)
))
,
PublishEntry = _reflection.GeneratedProtocolMessageType('PublishEntry', (_message.Message,), dict(
DESCRIPTOR = _CREATECONTAINERMESSAGE_PUBLISHENTRY,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CreateContainerMessage.PublishEntry)
))
,
DESCRIPTOR = _CREATECONTAINERMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CreateContainerMessage)
))
_sym_db.RegisterMessage(CreateContainerMessage)
_sym_db.RegisterMessage(CreateContainerMessage.CpuEntry)
_sym_db.RegisterMessage(CreateContainerMessage.PublishEntry)
ReplaceContainerMessage = _reflection.GeneratedProtocolMessageType('ReplaceContainerMessage', (_message.Message,), dict(
DESCRIPTOR = _REPLACECONTAINERMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReplaceContainerMessage)
))
_sym_db.RegisterMessage(ReplaceContainerMessage)
RunAndWaitMessage = _reflection.GeneratedProtocolMessageType('RunAndWaitMessage', (_message.Message,), dict(
DESCRIPTOR = _RUNANDWAITMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RunAndWaitMessage)
))
_sym_db.RegisterMessage(RunAndWaitMessage)
CacheImageMessage = _reflection.GeneratedProtocolMessageType('CacheImageMessage', (_message.Message,), dict(
DESCRIPTOR = _CACHEIMAGEMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CacheImageMessage)
))
_sym_db.RegisterMessage(CacheImageMessage)
RemoveImageMessage = _reflection.GeneratedProtocolMessageType('RemoveImageMessage', (_message.Message,), dict(
DESCRIPTOR = _REMOVEIMAGEMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemoveImageMessage)
))
_sym_db.RegisterMessage(RemoveImageMessage)
RemoveContainerMessage = _reflection.GeneratedProtocolMessageType('RemoveContainerMessage', (_message.Message,), dict(
DESCRIPTOR = _REMOVECONTAINERMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RemoveContainerMessage)
))
_sym_db.RegisterMessage(RemoveContainerMessage)
ReallocResourceMessage = _reflection.GeneratedProtocolMessageType('ReallocResourceMessage', (_message.Message,), dict(
DESCRIPTOR = _REALLOCRESOURCEMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ReallocResourceMessage)
))
_sym_db.RegisterMessage(ReallocResourceMessage)
CopyMessage = _reflection.GeneratedProtocolMessageType('CopyMessage', (_message.Message,), dict(
DESCRIPTOR = _COPYMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.CopyMessage)
))
_sym_db.RegisterMessage(CopyMessage)
RunAndWaitOptions = _reflection.GeneratedProtocolMessageType('RunAndWaitOptions', (_message.Message,), dict(
DESCRIPTOR = _RUNANDWAITOPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.RunAndWaitOptions)
))
_sym_db.RegisterMessage(RunAndWaitOptions)
ControlContainerOptions = _reflection.GeneratedProtocolMessageType('ControlContainerOptions', (_message.Message,), dict(
DESCRIPTOR = _CONTROLCONTAINEROPTIONS,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ControlContainerOptions)
))
_sym_db.RegisterMessage(ControlContainerOptions)
ControlContainerMessage = _reflection.GeneratedProtocolMessageType('ControlContainerMessage', (_message.Message,), dict(
DESCRIPTOR = _CONTROLCONTAINERMESSAGE,
__module__ = 'core_pb2'
# @@protoc_insertion_point(class_scope:pb.ControlContainerMessage)
))
_sym_db.RegisterMessage(ControlContainerMessage)
_LISTCONTAINERSOPTIONS_LABELSENTRY.has_options = True
_LISTCONTAINERSOPTIONS_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_PODRESOURCE_CPUENTRY.has_options = True
_PODRESOURCE_CPUENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_PODRESOURCE_MEMORYENTRY.has_options = True
_PODRESOURCE_MEMORYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_PODRESOURCE_DIFFENTRY.has_options = True
_PODRESOURCE_DIFFENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_PODRESOURCE_DETAILENTRY.has_options = True
_PODRESOURCE_DETAILENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_NODE_CPUENTRY.has_options = True
_NODE_CPUENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_NODE_LABELSENTRY.has_options = True
_NODE_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_NODE_INITCPUENTRY.has_options = True
_NODE_INITCPUENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CONTAINER_CPUENTRY.has_options = True
_CONTAINER_CPUENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CONTAINER_LABELSENTRY.has_options = True
_CONTAINER_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CONTAINER_PUBLISHENTRY.has_options = True
_CONTAINER_PUBLISHENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_ADDNODEOPTIONS_LABELSENTRY.has_options = True
_ADDNODEOPTIONS_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILD_ENVSENTRY.has_options = True
_BUILD_ENVSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILD_ARGSENTRY.has_options = True
_BUILD_ARGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILD_LABELSENTRY.has_options = True
_BUILD_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILD_ARTIFACTSENTRY.has_options = True
_BUILD_ARTIFACTSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILD_CACHEENTRY.has_options = True
_BUILD_CACHEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_BUILDS_BUILDSENTRY.has_options = True
_BUILDS_BUILDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_LOGOPTIONS_CONFIGENTRY.has_options = True
_LOGOPTIONS_CONFIGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_ENTRYPOINTOPTIONS_SYSCTLSENTRY.has_options = True
_ENTRYPOINTOPTIONS_SYSCTLSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_DEPLOYOPTIONS_NETWORKSENTRY.has_options = True
_DEPLOYOPTIONS_NETWORKSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_DEPLOYOPTIONS_LABELSENTRY.has_options = True
_DEPLOYOPTIONS_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_DEPLOYOPTIONS_NODELABELSENTRY.has_options = True
_DEPLOYOPTIONS_NODELABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_DEPLOYOPTIONS_DATAENTRY.has_options = True
_DEPLOYOPTIONS_DATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_REPLACEOPTIONS_FILTERLABELSENTRY.has_options = True
_REPLACEOPTIONS_FILTERLABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_REPLACEOPTIONS_COPYENTRY.has_options = True
_REPLACEOPTIONS_COPYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_COPYOPTIONS_TARGETSENTRY.has_options = True
_COPYOPTIONS_TARGETSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CREATECONTAINERMESSAGE_CPUENTRY.has_options = True
_CREATECONTAINERMESSAGE_CPUENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CREATECONTAINERMESSAGE_PUBLISHENTRY.has_options = True
_CREATECONTAINERMESSAGE_PUBLISHENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_CORERPC = _descriptor.ServiceDescriptor(
name='CoreRPC',
full_name='pb.CoreRPC',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=6969,
serialized_end=8580,
methods=[
_descriptor.MethodDescriptor(
name='ListPods',
full_name='pb.CoreRPC.ListPods',
index=0,
containing_service=None,
input_type=_EMPTY,
output_type=_PODS,
options=None,
),
_descriptor.MethodDescriptor(
name='AddPod',
full_name='pb.CoreRPC.AddPod',
index=1,
containing_service=None,
input_type=_ADDPODOPTIONS,
output_type=_POD,
options=None,
),
_descriptor.MethodDescriptor(
name='RemovePod',
full_name='pb.CoreRPC.RemovePod',
index=2,
containing_service=None,
input_type=_REMOVEPODOPTIONS,
output_type=_EMPTY,
options=None,
),
_descriptor.MethodDescriptor(
name='GetPod',
full_name='pb.CoreRPC.GetPod',
index=3,
containing_service=None,
input_type=_GETPODOPTIONS,
output_type=_POD,
options=None,
),
_descriptor.MethodDescriptor(
name='GetPodResource',
full_name='pb.CoreRPC.GetPodResource',
index=4,
containing_service=None,
input_type=_GETPODOPTIONS,
output_type=_PODRESOURCE,
options=None,
),
_descriptor.MethodDescriptor(
name='AddNode',
full_name='pb.CoreRPC.AddNode',
index=5,
containing_service=None,
input_type=_ADDNODEOPTIONS,
output_type=_NODE,
options=None,
),
_descriptor.MethodDescriptor(
name='RemoveNode',
full_name='pb.CoreRPC.RemoveNode',
index=6,
containing_service=None,
input_type=_REMOVENODEOPTIONS,
output_type=_POD,
options=None,
),
_descriptor.MethodDescriptor(
name='SetNodeAvailable',
full_name='pb.CoreRPC.SetNodeAvailable',
index=7,
containing_service=None,
input_type=_NODEAVAILABLE,
output_type=_NODE,
options=None,
),
_descriptor.MethodDescriptor(
name='GetNode',
full_name='pb.CoreRPC.GetNode',
index=8,
containing_service=None,
input_type=_GETNODEOPTIONS,
output_type=_NODE,
options=None,
),
_descriptor.MethodDescriptor(
name='GetContainer',
full_name='pb.CoreRPC.GetContainer',
index=9,
containing_service=None,
input_type=_CONTAINERID,
output_type=_CONTAINER,
options=None,
),
_descriptor.MethodDescriptor(
name='GetContainers',
full_name='pb.CoreRPC.GetContainers',
index=10,
containing_service=None,
input_type=_CONTAINERIDS,
output_type=_CONTAINERS,
options=None,
),
_descriptor.MethodDescriptor(
name='GetNodeByName',
full_name='pb.CoreRPC.GetNodeByName',
index=11,
containing_service=None,
input_type=_GETNODEOPTIONS,
output_type=_NODE,
options=None,
),
_descriptor.MethodDescriptor(
name='ListPodNodes',
full_name='pb.CoreRPC.ListPodNodes',
index=12,
containing_service=None,
input_type=_LISTNODESOPTIONS,
output_type=_NODES,
options=None,
),
_descriptor.MethodDescriptor(
name='ListNetworks',
full_name='pb.CoreRPC.ListNetworks',
index=13,
containing_service=None,
input_type=_LISTNETWORKOPTIONS,
output_type=_NETWORKS,
options=None,
),
_descriptor.MethodDescriptor(
name='ListContainers',
full_name='pb.CoreRPC.ListContainers',
index=14,
containing_service=None,
input_type=_LISTCONTAINERSOPTIONS,
output_type=_CONTAINERS,
options=None,
),
_descriptor.MethodDescriptor(
name='ListNodeContainers',
full_name='pb.CoreRPC.ListNodeContainers',
index=15,
containing_service=None,
input_type=_GETNODEOPTIONS,
output_type=_CONTAINERS,
options=None,
),
_descriptor.MethodDescriptor(
name='ContainerDeployed',
full_name='pb.CoreRPC.ContainerDeployed',
index=16,
containing_service=None,
input_type=_CONTAINERDEPLOYEDOPTIONS,
output_type=_EMPTY,
options=None,
),
_descriptor.MethodDescriptor(
name='Copy',
full_name='pb.CoreRPC.Copy',
index=17,
containing_service=None,
input_type=_COPYOPTIONS,
output_type=_COPYMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='BuildImage',
full_name='pb.CoreRPC.BuildImage',
index=18,
containing_service=None,
input_type=_BUILDIMAGEOPTIONS,
output_type=_BUILDIMAGEMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='CacheImage',
full_name='pb.CoreRPC.CacheImage',
index=19,
containing_service=None,
input_type=_CACHEIMAGEOPTIONS,
output_type=_CACHEIMAGEMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='RemoveImage',
full_name='pb.CoreRPC.RemoveImage',
index=20,
containing_service=None,
input_type=_REMOVEIMAGEOPTIONS,
output_type=_REMOVEIMAGEMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='DeployStatus',
full_name='pb.CoreRPC.DeployStatus',
index=21,
containing_service=None,
input_type=_DEPLOYSTATUSOPTIONS,
output_type=_DEPLOYSTATUSMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='RunAndWait',
full_name='pb.CoreRPC.RunAndWait',
index=22,
containing_service=None,
input_type=_RUNANDWAITOPTIONS,
output_type=_RUNANDWAITMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='CreateContainer',
full_name='pb.CoreRPC.CreateContainer',
index=23,
containing_service=None,
input_type=_DEPLOYOPTIONS,
output_type=_CREATECONTAINERMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='ReplaceContainer',
full_name='pb.CoreRPC.ReplaceContainer',
index=24,
containing_service=None,
input_type=_REPLACEOPTIONS,
output_type=_REPLACECONTAINERMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='RemoveContainer',
full_name='pb.CoreRPC.RemoveContainer',
index=25,
containing_service=None,
input_type=_REMOVECONTAINEROPTIONS,
output_type=_REMOVECONTAINERMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='ControlContainer',
full_name='pb.CoreRPC.ControlContainer',
index=26,
containing_service=None,
input_type=_CONTROLCONTAINEROPTIONS,
output_type=_CONTROLCONTAINERMESSAGE,
options=None,
),
_descriptor.MethodDescriptor(
name='ReallocResource',
full_name='pb.CoreRPC.ReallocResource',
index=27,
containing_service=None,
input_type=_REALLOCOPTIONS,
output_type=_REALLOCRESOURCEMESSAGE,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_CORERPC)
DESCRIPTOR.services_by_name['CoreRPC'] = _CORERPC
# @@protoc_insertion_point(module_scope)
| 38.875126 | 16,249 | 0.731339 |
b9a524c2d76717a70aa199aeb8c04e4579e1a276
| 2,217 |
py
|
Python
|
src/models/text_node.py
|
moevm/nosql1h19-text-graph
|
410f156ad4f232f8aa060d43692ab020610ddfd4
|
[
"MIT"
] | null | null | null |
src/models/text_node.py
|
moevm/nosql1h19-text-graph
|
410f156ad4f232f8aa060d43692ab020610ddfd4
|
[
"MIT"
] | null | null | null |
src/models/text_node.py
|
moevm/nosql1h19-text-graph
|
410f156ad4f232f8aa060d43692ab020610ddfd4
|
[
"MIT"
] | null | null | null |
from neomodel import StructuredNode, StringProperty, JSONProperty, \
Relationship, IntegerProperty
import numpy as np
import re
from models.text_relation import TextRelation
__all__ = ['TextNode']
| 31.671429 | 73 | 0.488498 |
b9a529f9f36fb2cce0a38f16148b6bc2117ab033
| 2,655 |
py
|
Python
|
tests/test_bishop_generate.py
|
otaviocarvalho/chess-negamax
|
21f1066611e581dac3257d3f46c71ca2b09b5964
|
[
"MIT"
] | 6 |
2015-04-04T15:58:29.000Z
|
2019-04-07T11:45:02.000Z
|
tests/test_bishop_generate.py
|
otaviocarvalho/chess-negamax
|
21f1066611e581dac3257d3f46c71ca2b09b5964
|
[
"MIT"
] | 1 |
2015-04-27T19:02:06.000Z
|
2015-04-27T19:02:06.000Z
|
tests/test_bishop_generate.py
|
otaviocarvalho/chess-negamax
|
21f1066611e581dac3257d3f46c71ca2b09b5964
|
[
"MIT"
] | 3 |
2015-10-04T00:22:17.000Z
|
2019-04-07T11:44:56.000Z
|
import unittest
from .helpers import StubBoard, StubPiece, C, WHITE, BLACK
if __name__ == '__main__':
unittest.main()
| 30.872093 | 58 | 0.581921 |
b9a5362ea01805df4bb2ad83d0b9f037b0c75078
| 481 |
py
|
Python
|
lib/fmdplugins/list_records.py
|
GonzaloAlvarez/py-ga-sysadmin
|
fbbbbcad36df9f1b3e40328ff48c22bad13a56f4
|
[
"MIT"
] | 2 |
2018-01-05T15:32:06.000Z
|
2021-06-02T13:15:05.000Z
|
lib/fmdplugins/list_records.py
|
GonzaloAlvarez/devops-tools
|
fbbbbcad36df9f1b3e40328ff48c22bad13a56f4
|
[
"MIT"
] | 67 |
2017-01-09T19:39:19.000Z
|
2018-02-28T05:33:40.000Z
|
lib/fmdplugins/list_records.py
|
GonzaloAlvarez/devops-tools
|
fbbbbcad36df9f1b3e40328ff48c22bad13a56f4
|
[
"MIT"
] | null | null | null |
from lib.fmd.namedentity import NamedEntity
from lib.fmd.decorators import Action, ListStage, GetStage
from lib.exceptions.workflow import EntryException
| 30.0625 | 63 | 0.719335 |
b9a5aa9a635301ab37ae92c6395e50231bd81a4b
| 6,033 |
py
|
Python
|
pysoa/server/action/switched.py
|
zetahernandez/pysoa
|
006e55ba877196a42c64f2ff453583d366082d55
|
[
"Apache-2.0"
] | null | null | null |
pysoa/server/action/switched.py
|
zetahernandez/pysoa
|
006e55ba877196a42c64f2ff453583d366082d55
|
[
"Apache-2.0"
] | null | null | null |
pysoa/server/action/switched.py
|
zetahernandez/pysoa
|
006e55ba877196a42c64f2ff453583d366082d55
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import (
absolute_import,
unicode_literals,
)
import abc
import six
from pysoa.server.internal.types import is_switch
__all__ = (
'SwitchedAction',
)
| 38.673077 | 117 | 0.673463 |
b9a6e1263697c6f30d94bde78d6313fed9c57e76
| 542 |
py
|
Python
|
Seeder/settings/tests.py
|
WebarchivCZ/Seeder
|
1958c5d3f6bdcbbdb2c81dcb6abc7f689125b6a8
|
[
"MIT"
] | 8 |
2017-08-16T19:18:57.000Z
|
2022-01-24T10:08:19.000Z
|
Seeder/settings/tests.py
|
WebarchivCZ/Seeder
|
1958c5d3f6bdcbbdb2c81dcb6abc7f689125b6a8
|
[
"MIT"
] | 242 |
2017-02-03T19:15:52.000Z
|
2022-03-25T08:02:52.000Z
|
Seeder/settings/tests.py
|
WebarchivCZ/Seeder
|
1958c5d3f6bdcbbdb2c81dcb6abc7f689125b6a8
|
[
"MIT"
] | 2 |
2019-03-06T12:36:29.000Z
|
2019-07-08T12:52:20.000Z
|
from .base import *
SECRET_KEY = 'test'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'sqlite3.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
},
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
},
}
| 19.357143 | 66 | 0.605166 |
b9a767c55418efb8b98d12205d59e512ca419081
| 1,860 |
py
|
Python
|
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Author: Emmanuel Odeke <[email protected]>
# This example steps you through using resty & restAssured to save pickled/serialized
# data as a blob and then later re-using it in after deserialization.
# Sample usage might be in collaborative computing ie publish results from an expensive
# computation on one machine so that other machines can load it as live data.
if __name__ == '__main__':
main()
| 31 | 87 | 0.7 |
b9a7d3f5b98af28c51ffb55578408fad9a1d3f99
| 3,066 |
py
|
Python
|
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
venv/Lib/site-packages/dataframe/_dataframe_column_set.py
|
kavanAdeshara/Expense_Tracker
|
b3e4810e858a7786e05cda6b91ba674b73b87981
|
[
"Apache-2.0"
] | null | null | null |
# dataframe: a data-frame implementation using method piping
#
# Copyright (C) 2016 Simon Dirmeier
#
# This file is part of dataframe.
#
# dataframe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dataframe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dataframe. If not, see <http://www.gnu.org/licenses/>.
#
#
# @author = 'Simon Dirmeier'
# @email = '[email protected]'
from itertools import chain
import tabulate
from ._dataframe_column import DataFrameColumn
from ._dataframe_row import DataFrameRow
def row(self, idx):
"""
Returns DataFrameRow of the DataFrame given its index.
:param idx: the index of the row in the DataFrame.
:return: returns a DataFrameRow
"""
return DataFrameRow(idx, [x[idx] for x in self], self.colnames)
| 30.356436 | 71 | 0.63242 |
b9a7d44b00e1b419e797c8637498d8abc23d4def
| 13,322 |
bzl
|
Python
|
java/image.bzl
|
Springworks/rules_docker
|
b943cd1fe3bf1c6c5fdac1889e952408599cffff
|
[
"Apache-2.0"
] | null | null | null |
java/image.bzl
|
Springworks/rules_docker
|
b943cd1fe3bf1c6c5fdac1889e952408599cffff
|
[
"Apache-2.0"
] | null | null | null |
java/image.bzl
|
Springworks/rules_docker
|
b943cd1fe3bf1c6c5fdac1889e952408599cffff
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A rule for creating a Java container image.
The signature of java_image is compatible with java_binary.
The signature of war_image is compatible with java_library.
"""
load(
"//container:container.bzl",
"container_pull",
_repositories = "repositories",
)
# Load the resolved digests.
load(
":java.bzl",
_JAVA_DIGESTS = "DIGESTS",
)
load(
":jetty.bzl",
_JETTY_DIGESTS = "DIGESTS",
)
DEFAULT_JAVA_BASE = select({
"@io_bazel_rules_docker//:fastbuild": "@java_image_base//image",
"@io_bazel_rules_docker//:debug": "@java_debug_image_base//image",
"@io_bazel_rules_docker//:optimized": "@java_image_base//image",
"//conditions:default": "@java_image_base//image",
})
DEFAULT_JETTY_BASE = select({
"@io_bazel_rules_docker//:fastbuild": "@jetty_image_base//image",
"@io_bazel_rules_docker//:debug": "@jetty_debug_image_base//image",
"@io_bazel_rules_docker//:optimized": "@jetty_image_base//image",
"//conditions:default": "@jetty_image_base//image",
})
load(
"//container:container.bzl",
_container = "container",
)
load(
"//lang:image.bzl",
"dep_layer_impl",
"layer_file_path",
)
def _jar_dep_layer_impl(ctx):
"""Appends a layer for a single dependency's runfiles."""
return dep_layer_impl(ctx, runfiles = java_files)
jar_dep_layer = rule(
attrs = dict(_container.image.attrs.items() + {
# The base image on which to overlay the dependency layers.
"base": attr.label(mandatory = True),
# The dependency whose runfiles we're appending.
"dep": attr.label(mandatory = True),
# Whether to lay out each dependency in a manner that is agnostic
# of the binary in which it is participating. This can increase
# sharing of the dependency's layer across images, but requires a
# symlink forest in the app layers.
"agnostic_dep_layout": attr.bool(default = True),
# Override the defaults.
"directory": attr.string(default = "/app"),
# https://github.com/bazelbuild/bazel/issues/2176
"data_path": attr.string(default = "."),
}.items()),
executable = True,
outputs = _container.image.outputs,
implementation = _jar_dep_layer_impl,
)
def _jar_app_layer_impl(ctx):
"""Appends the app layer with all remaining runfiles."""
available = depset()
for jar in ctx.attr.jar_layers:
available += java_files(jar)
# We compute the set of unavailable stuff by walking deps
# in the same way, adding in our binary and then subtracting
# out what it available.
unavailable = depset()
for jar in ctx.attr.deps + ctx.attr.runtime_deps:
unavailable += java_files(jar)
unavailable += java_files(ctx.attr.binary)
unavailable = [x for x in unavailable if x not in available]
classpath = ":".join([
layer_file_path(ctx, x)
for x in available + unavailable
])
# Classpaths can grow long and there is a limit on the length of a
# command line, so mitigate this by always writing the classpath out
# to a file instead.
classpath_file = ctx.new_file(ctx.attr.name + ".classpath")
ctx.actions.write(classpath_file, classpath)
binary_path = layer_file_path(ctx, ctx.files.binary[0])
classpath_path = layer_file_path(ctx, classpath_file)
entrypoint = [
"/usr/bin/java",
"-cp",
# Support optionally passing the classpath as a file.
"@" + classpath_path if ctx.attr._classpath_as_file else classpath,
] + ctx.attr.jvm_flags + [ctx.attr.main_class] + ctx.attr.args
file_map = {
layer_file_path(ctx, f): f
for f in unavailable + [classpath_file]
}
return _container.image.implementation(
ctx,
# We use all absolute paths.
directory = "/",
file_map = file_map,
entrypoint = entrypoint,
)
jar_app_layer = rule(
attrs = dict(_container.image.attrs.items() + {
# The binary target for which we are synthesizing an image.
"binary": attr.label(mandatory = True),
# The full list of dependencies that have their own layers
# factored into our base.
"jar_layers": attr.label_list(),
# The rest of the dependencies.
"deps": attr.label_list(),
"runtime_deps": attr.label_list(),
"jvm_flags": attr.string_list(),
# The base image on which to overlay the dependency layers.
"base": attr.label(mandatory = True),
# The main class to invoke on startup.
"main_class": attr.string(mandatory = True),
# Whether to lay out each dependency in a manner that is agnostic
# of the binary in which it is participating. This can increase
# sharing of the dependency's layer across images, but requires a
# symlink forest in the app layers.
"agnostic_dep_layout": attr.bool(default = True),
# Whether the classpath should be passed as a file.
"_classpath_as_file": attr.bool(default = False),
# Override the defaults.
"directory": attr.string(default = "/app"),
# https://github.com/bazelbuild/bazel/issues/2176
"data_path": attr.string(default = "."),
"legacy_run_behavior": attr.bool(default = False),
}.items()),
executable = True,
outputs = _container.image.outputs,
implementation = _jar_app_layer_impl,
)
def java_image(
name,
base = None,
main_class = None,
deps = [],
runtime_deps = [],
layers = [],
jvm_flags = [],
**kwargs):
"""Builds a container image overlaying the java_binary.
Args:
layers: Augments "deps" with dependencies that should be put into
their own layers.
**kwargs: See java_binary.
"""
binary_name = name + ".binary"
native.java_binary(
name = binary_name,
main_class = main_class,
# If the rule is turning a JAR built with java_library into
# a binary, then it will appear in runtime_deps. We are
# not allowed to pass deps (even []) if there is no srcs
# kwarg.
deps = (deps + layers) or None,
runtime_deps = runtime_deps,
jvm_flags = jvm_flags,
**kwargs
)
base = base or DEFAULT_JAVA_BASE
for index, dep in enumerate(layers):
this_name = "%s.%d" % (name, index)
jar_dep_layer(name = this_name, base = base, dep = dep)
base = this_name
visibility = kwargs.get("visibility", None)
jar_app_layer(
name = name,
base = base,
binary = binary_name,
main_class = main_class,
jvm_flags = jvm_flags,
deps = deps,
runtime_deps = runtime_deps,
jar_layers = layers,
visibility = visibility,
args = kwargs.get("args"),
)
def _war_dep_layer_impl(ctx):
"""Appends a layer for a single dependency's runfiles."""
# TODO(mattmoor): Today we run the risk of filenames colliding when
# they get flattened. Instead of just flattening and using basename
# we should use a file_map based scheme.
return _container.image.implementation(
ctx,
files = java_files(ctx.attr.dep),
)
_war_dep_layer = rule(
attrs = dict(_container.image.attrs.items() + {
# The base image on which to overlay the dependency layers.
"base": attr.label(mandatory = True),
# The dependency whose runfiles we're appending.
"dep": attr.label(mandatory = True),
# Whether to lay out each dependency in a manner that is agnostic
# of the binary in which it is participating. This can increase
# sharing of the dependency's layer across images, but requires a
# symlink forest in the app layers.
"agnostic_dep_layout": attr.bool(default = True),
# Override the defaults.
"directory": attr.string(default = "/jetty/webapps/ROOT/WEB-INF/lib"),
# WE WANT PATHS FLATTENED
# "data_path": attr.string(default = "."),
}.items()),
executable = True,
outputs = _container.image.outputs,
implementation = _war_dep_layer_impl,
)
def _war_app_layer_impl(ctx):
"""Appends the app layer with all remaining runfiles."""
available = depset()
for jar in ctx.attr.jar_layers:
available += java_files(jar)
# This is based on rules_appengine's WAR rules.
transitive_deps = depset()
transitive_deps += java_files(ctx.attr.library)
# TODO(mattmoor): Handle data files.
# If we start putting libs in servlet-agnostic paths,
# then consider adding symlinks here.
files = [d for d in transitive_deps if d not in available]
return _container.image.implementation(ctx, files = files)
_war_app_layer = rule(
attrs = dict(_container.image.attrs.items() + {
# The library target for which we are synthesizing an image.
"library": attr.label(mandatory = True),
# The full list of dependencies that have their own layers
# factored into our base.
"jar_layers": attr.label_list(),
# The base image on which to overlay the dependency layers.
"base": attr.label(mandatory = True),
"entrypoint": attr.string_list(default = []),
# Whether to lay out each dependency in a manner that is agnostic
# of the binary in which it is participating. This can increase
# sharing of the dependency's layer across images, but requires a
# symlink forest in the app layers.
"agnostic_dep_layout": attr.bool(default = True),
# Override the defaults.
"directory": attr.string(default = "/jetty/webapps/ROOT/WEB-INF/lib"),
# WE WANT PATHS FLATTENED
# "data_path": attr.string(default = "."),
"legacy_run_behavior": attr.bool(default = False),
}.items()),
executable = True,
outputs = _container.image.outputs,
implementation = _war_app_layer_impl,
)
def war_image(name, base = None, deps = [], layers = [], **kwargs):
"""Builds a container image overlaying the java_library as an exploded WAR.
TODO(mattmoor): For `bazel run` of this to be useful, we need to be able
to ctrl-C it and have the container actually terminate. More information:
https://github.com/bazelbuild/bazel/issues/3519
Args:
layers: Augments "deps" with dependencies that should be put into
their own layers.
**kwargs: See java_library.
"""
library_name = name + ".library"
native.java_library(name = library_name, deps = deps + layers, **kwargs)
base = base or DEFAULT_JETTY_BASE
for index, dep in enumerate(layers):
this_name = "%s.%d" % (name, index)
_war_dep_layer(name = this_name, base = base, dep = dep)
base = this_name
visibility = kwargs.get("visibility", None)
tags = kwargs.get("tags", None)
_war_app_layer(
name = name,
base = base,
library = library_name,
jar_layers = layers,
visibility = visibility,
tags = tags,
)
| 34.246787 | 79 | 0.642546 |
b9a831ae9aec7e87ced37e12721727df9e75bb48
| 17,427 |
py
|
Python
|
cupyx/jit/_builtin_funcs.py
|
khushi-411/cupy
|
b5221a478c800c5e60eef65545467de9eb00c0d9
|
[
"MIT"
] | null | null | null |
cupyx/jit/_builtin_funcs.py
|
khushi-411/cupy
|
b5221a478c800c5e60eef65545467de9eb00c0d9
|
[
"MIT"
] | null | null | null |
cupyx/jit/_builtin_funcs.py
|
khushi-411/cupy
|
b5221a478c800c5e60eef65545467de9eb00c0d9
|
[
"MIT"
] | null | null | null |
import warnings
import cupy
from cupy_backends.cuda.api import runtime
from cupy.cuda import device
from cupyx.jit import _cuda_types
from cupyx.jit._internal_types import BuiltinFunc
from cupyx.jit._internal_types import Data
from cupyx.jit._internal_types import Constant
from cupyx.jit._internal_types import Range
from cupyx.jit import _compile
from functools import reduce
builtin_functions_dict = {
range: RangeFunc(),
len: LenFunc(),
min: MinFunc(),
max: MaxFunc(),
}
range_ = RangeFunc()
syncthreads = SyncThreads()
syncwarp = SyncWarp()
shared_memory = SharedMemory()
grid = GridFunc('grid')
gridsize = GridFunc('gridsize')
laneid = LaneID()
# atomic functions
atomic_add = AtomicOp(
'Add',
('int32', 'uint32', 'uint64', 'float32', 'float64')
+ (() if runtime.is_hip else ('float16',)))
atomic_sub = AtomicOp(
'Sub', ('int32', 'uint32'))
atomic_exch = AtomicOp(
'Exch', ('int32', 'uint32', 'uint64', 'float32'))
atomic_min = AtomicOp(
'Min', ('int32', 'uint32', 'uint64'))
atomic_max = AtomicOp(
'Max', ('int32', 'uint32', 'uint64'))
atomic_inc = AtomicOp(
'Inc', ('uint32',))
atomic_dec = AtomicOp(
'Dec', ('uint32',))
atomic_cas = AtomicOp(
'CAS',
('int32', 'uint32', 'uint64')
+ (() if runtime.is_hip else ('uint16',)))
atomic_and = AtomicOp(
'And', ('int32', 'uint32', 'uint64'))
atomic_or = AtomicOp(
'Or', ('int32', 'uint32', 'uint64'))
atomic_xor = AtomicOp(
'Xor', ('int32', 'uint32', 'uint64'))
# warp-shuffle functions
_shfl_dtypes = (
('int32', 'uint32', 'int64', 'float32', 'float64')
+ (() if runtime.is_hip else ('uint64', 'float16')))
shfl_sync = WarpShuffleOp('', _shfl_dtypes)
shfl_up_sync = WarpShuffleOp('up', _shfl_dtypes)
shfl_down_sync = WarpShuffleOp('down', _shfl_dtypes)
shfl_xor_sync = WarpShuffleOp('xor', _shfl_dtypes)
| 35.206061 | 115 | 0.569748 |
b9aa6c3c1ecb2355209fd4db4f58118befbffa8a
| 225 |
py
|
Python
|
python-basic-grammer/python-basic/02-python-variables-and-string/string_strip_demo.py
|
jinrunheng/base-of-python
|
595bdbc8bfaf2136d8f1f9ea82c03b84aeaf0a39
|
[
"Apache-2.0"
] | null | null | null |
python-basic-grammer/python-basic/02-python-variables-and-string/string_strip_demo.py
|
jinrunheng/base-of-python
|
595bdbc8bfaf2136d8f1f9ea82c03b84aeaf0a39
|
[
"Apache-2.0"
] | null | null | null |
python-basic-grammer/python-basic/02-python-variables-and-string/string_strip_demo.py
|
jinrunheng/base-of-python
|
595bdbc8bfaf2136d8f1f9ea82c03b84aeaf0a39
|
[
"Apache-2.0"
] | null | null | null |
#
str1 = " hello "
print(str1)
print(len(str1))
#
print(str1.strip())
print(len(str1.strip()))
#
print(str1.lstrip())
print(len(str1.lstrip()))
#
print(str1.rstrip())
print(len(str1.rstrip()))
| 16.071429 | 25 | 0.688889 |
b9aaf0198d21a1cb3a68b8836041445460cf7efd
| 379 |
py
|
Python
|
bruges/util/__init__.py
|
hyperiongeo/bruges
|
6d9a3aae86aaa53107caaa20e9aafa390358b0f8
|
[
"Apache-2.0"
] | null | null | null |
bruges/util/__init__.py
|
hyperiongeo/bruges
|
6d9a3aae86aaa53107caaa20e9aafa390358b0f8
|
[
"Apache-2.0"
] | null | null | null |
bruges/util/__init__.py
|
hyperiongeo/bruges
|
6d9a3aae86aaa53107caaa20e9aafa390358b0f8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from .util import rms
from .util import moving_average
from .util import moving_avg_conv
from .util import moving_avg_fft
from .util import normalize
from .util import next_pow2
from .util import top_and_tail
from .util import extrapolate
from .util import nearest
from .util import deprecated
from .util import apply_along_axis
from .util import sigmoid
| 27.071429 | 34 | 0.804749 |
b9acae3f6c9a11754c72065d93acff3857609af2
| 5,423 |
py
|
Python
|
toontown/estate/DistributedHouseDoor.py
|
CrankySupertoon01/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1 |
2021-02-13T22:40:50.000Z
|
2021-02-13T22:40:50.000Z
|
toontown/estate/DistributedHouseDoor.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1 |
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
toontown/estate/DistributedHouseDoor.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 2 |
2019-12-02T01:39:10.000Z
|
2021-02-13T22:41:00.000Z
|
from toontown.toonbase.ToonBaseGlobal import *
from panda3d.core import *
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import *
from direct.distributed import DistributedObject
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.MessengerGlobal import messenger
from direct.fsm import ClassicFSM
from toontown.building import DistributedDoor
from toontown.hood import ZoneUtil
from toontown.suit import Suit
from toontown.building import FADoorCodes
from toontown.building import DoorTypes
from toontown.estate.DistributedHouse import DistributedHouse
| 40.17037 | 299 | 0.638761 |
b9ad055e162f0001e288ab22dec6a5a4746fd51d
| 2,786 |
py
|
Python
|
Neuro-Cognitive Models/Runs/Nonhier_run/res_nonhier.py
|
AGhaderi/spatial_attenNCM
|
1f7edf17f55d804d2ae3360d23623c9ab5035518
|
[
"MIT"
] | null | null | null |
Neuro-Cognitive Models/Runs/Nonhier_run/res_nonhier.py
|
AGhaderi/spatial_attenNCM
|
1f7edf17f55d804d2ae3360d23623c9ab5035518
|
[
"MIT"
] | null | null | null |
Neuro-Cognitive Models/Runs/Nonhier_run/res_nonhier.py
|
AGhaderi/spatial_attenNCM
|
1f7edf17f55d804d2ae3360d23623c9ab5035518
|
[
"MIT"
] | null | null | null |
#!/home/a.ghaderi/.conda/envs/envjm/bin/python
# Model 2
import pystan
import pandas as pd
import numpy as np
import sys
sys.path.append('../../')
import utils
parts = 1
data = utils.get_data() #loading dateset
data = data[data['participant']==parts]
mis = np.where((data['n200lat']<.101)|(data['n200lat']>.248))[0] # missing data for n200lat
obs = np.where((data['n200lat']>.101)&(data['n200lat']<.248))[0] # observation and missing data for n200lat
N_mis = mis.shape[0] # number of missing data
N_obs = obs.shape[0] # number of observed data
modelfile = '../../stans/res_nonhier.stan' #reading the model span
f = open(modelfile, 'r')
model_wiener = f.read()
sm = pystan.StanModel(model_code=model_wiener)# Compile the model stan
ncohers = 2 #Number of coherence conditions
nspats = 2 #Number of spatial conditions
nconds = 4 #Number of conditions
y = data['y'].to_numpy()
cond_coher = data['cond_coher'].to_numpy()
cond_spat = data['cond_spat'].to_numpy()
conds = data['conds'].to_numpy()
n200lat = data['n200lat'].to_numpy()
#set inistial data for molde span
data_winner = {'N_obs':N_obs, #Number of trial-level observations
'N_mis':N_mis, #Number of trial-level mising data
'ncohers':ncohers, #Number of coherence conditions
'nspats':nspats, #Number of spatial conditions
'nconds':nconds, #Number of conditions
'y':np.concatenate([y[obs],y[mis]]), #acc*rt in seconds for obervation and missing data
'cond_coher':np.concatenate([cond_coher[obs],cond_coher[mis]]), #Coherence index for each trial
'cond_spat':np.concatenate([cond_spat[obs],cond_spat[mis]]), #sptial index for each trial
'conds':np.concatenate([conds[obs],conds[mis]]), #sptial index for each trial
'n200lat_obs':n200lat[obs]}; #n200 latency for each trial observation
# setting MCMC arguments
niter = 10000
nwarmup = 4000
nchains = 1
thin = 1
initials = [] # initial sampling
for c in range(0, nchains):
chaininit = {
'delta': np.random.uniform(1, 3, size=ncohers),
'alpha': np.random.uniform(.5, 1.),
'eta': np.random.uniform(.01, .2),
'res': np.random.uniform(.01, .02, size=nspats),
'n200sub': np.random.uniform(.11, .2, size=nconds),
'lambda': np.random.uniform(.01, .02),
'n200lat_mis': np.random.uniform(.11, .2, size = N_mis)
}
initials.append(chaininit)
# Train the model and generate samples
fit = sm.sampling(data=data_winner, iter=niter, chains=nchains, warmup=nwarmup, thin=thin, init=initials)
utils.to_pickle(stan_model=sm, stan_fit=fit, save_path='../../save/nonhier/'+str(parts)+'_res_nonhier.pkl')
| 39.8 | 116 | 0.648959 |
b9adc3a3c0f82e03cf53dd13486c80b1bb9dbf85
| 6,691 |
py
|
Python
|
rq_dashboard/dashboard.py
|
refgenomics/rq-dashboard
|
cdfadd2b9aa9a66b0594fd5573e3c45fa8643f05
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
rq_dashboard/dashboard.py
|
refgenomics/rq-dashboard
|
cdfadd2b9aa9a66b0594fd5573e3c45fa8643f05
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
rq_dashboard/dashboard.py
|
refgenomics/rq-dashboard
|
cdfadd2b9aa9a66b0594fd5573e3c45fa8643f05
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
from redis import Redis
from redis import from_url
from rq import push_connection, pop_connection
from rq.job import Job
from functools import wraps
import times
from flask import Blueprint
from flask import current_app, url_for, abort
from flask import render_template
from rq import Queue, Worker
from rq import cancel_job, requeue_job
from rq import get_failed_queue
from math import ceil
dashboard = Blueprint('rq_dashboard', __name__,
template_folder='templates',
static_folder='static',
)
def jsonify(f):
return _wrapped
def serialize_queues(queues):
return [dict(name=q.name, count=q.count, url=url_for('.overview',
queue_name=q.name)) for q in queues]
def serialize_date(dt):
if dt is None:
return None
return times.format(dt, 'UTC')
def serialize_job(job):
return dict(
id=job.id,
created_at=serialize_date(job.created_at),
enqueued_at=serialize_date(job.enqueued_at),
ended_at=serialize_date(job.ended_at),
origin=job.origin,
result=job._result,
exc_info=job.exc_info,
description=job.description)
def remove_none_values(input_dict):
return dict([ (k,v) for k,v in input_dict.items() if v is not None ])
def pagination_window(total_items, cur_page, per_page=5, window_size=10):
all_pages = range(1, int(ceil(total_items / float(per_page))) + 1)
results = all_pages
if (window_size >= 1):
pages_window_start = int(max(0, min(len(all_pages) - window_size, (cur_page-1) - ceil(window_size / 2.0))))
pages_window_end = int(pages_window_start + window_size)
result = all_pages[pages_window_start:pages_window_end]
return result
| 29.606195 | 115 | 0.67658 |
b9ae95988166d56c353c46926e3d21a79d84f88c
| 769 |
py
|
Python
|
layers/layer1_python3/0300_acquisition/acquisition/__init__.py
|
moas/mfdata
|
ca9460c3783ddfd6ad022c96a0a8bf0e65fa36b2
|
[
"BSD-3-Clause"
] | null | null | null |
layers/layer1_python3/0300_acquisition/acquisition/__init__.py
|
moas/mfdata
|
ca9460c3783ddfd6ad022c96a0a8bf0e65fa36b2
|
[
"BSD-3-Clause"
] | null | null | null |
layers/layer1_python3/0300_acquisition/acquisition/__init__.py
|
moas/mfdata
|
ca9460c3783ddfd6ad022c96a0a8bf0e65fa36b2
|
[
"BSD-3-Clause"
] | null | null | null |
from acquisition.step import AcquisitionStep
from acquisition.stats import AcquisitionStatsDClient
from acquisition.move_step import AcquisitionMoveStep
from acquisition.delete_step import AcquisitionDeleteStep
from acquisition.batch_step import AcquisitionBatchStep
from acquisition.reinject_step import AcquisitionReinjectStep
from acquisition.fork_step import AcquisitionForkStep
from acquisition.archive_step import AcquisitionArchiveStep
from acquisition.listener import AcquisitionListener
__all__ = ['AcquisitionStep', 'AcquisitionBatchStep',
'AcquisitionMoveStep', 'AcquisitionDeleteStep',
'AcquisitionReinjectStep', 'AcquisitionForkStep',
'AcquisitionArchiveStep', 'AcquisitionStatsDClient',
'AcquisitionListener']
| 48.0625 | 63 | 0.830949 |
b9b2dd8fc97fddaaa64ec64957043ee8e8088e39
| 615 |
py
|
Python
|
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
frappe-bench/apps/erpnext/erpnext/non_profit/doctype/member/member.py
|
Semicheche/foa_frappe_docker
|
a186b65d5e807dd4caf049e8aeb3620a799c1225
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
| 29.285714 | 72 | 0.786992 |
b9b3501a4a1a7bee83abdc50e1932071f97c2394
| 12,427 |
py
|
Python
|
networks/networks.py
|
ayyuriss/TRHPO
|
56a06d3593504647b75589ab87b5c96bdab74c9f
|
[
"MIT"
] | null | null | null |
networks/networks.py
|
ayyuriss/TRHPO
|
56a06d3593504647b75589ab87b5c96bdab74c9f
|
[
"MIT"
] | null | null | null |
networks/networks.py
|
ayyuriss/TRHPO
|
56a06d3593504647b75589ab87b5c96bdab74c9f
|
[
"MIT"
] | null | null | null |
from torch import nn
import numpy as np
import base.basenetwork as BaseN
from networks.cholesky import CholeskyBlock
| 50.51626 | 91 | 0.508168 |
b9b4c8dedee4c99c1bae0987e46ca8c83899e4d8
| 1,435 |
py
|
Python
|
icenews/api_important_words.py
|
sverrirab/icenews
|
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
|
[
"Apache-2.0"
] | 4 |
2019-04-25T21:09:39.000Z
|
2020-07-26T08:57:00.000Z
|
icenews/api_important_words.py
|
sverrirab/icenews
|
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
|
[
"Apache-2.0"
] | 1 |
2019-08-11T00:27:18.000Z
|
2019-08-12T17:36:42.000Z
|
icenews/api_important_words.py
|
sverrirab/icenews
|
10a5e13d4dcd5e95f746c4fec9821b4b48fa440e
|
[
"Apache-2.0"
] | null | null | null |
import logging
from pydantic import BaseModel, Field
from typing import List
from .similar import important_words
from .server import app
_MAX_LENGTH = 2000
logger = logging.getLogger(__name__)
# Strange things happen with error handling when using alias - splitting up into two input models
| 25.625 | 97 | 0.724042 |
b9b51a38157d5d825f921ab19fba044fe3545044
| 206 |
py
|
Python
|
try-except.py
|
kmarcini/Learn-Python---Full-Course-for-Beginners-Tutorial-
|
8ea4ef004d86fdf393980fd356edcf5b769bfeac
|
[
"BSD-3-Clause"
] | null | null | null |
try-except.py
|
kmarcini/Learn-Python---Full-Course-for-Beginners-Tutorial-
|
8ea4ef004d86fdf393980fd356edcf5b769bfeac
|
[
"BSD-3-Clause"
] | null | null | null |
try-except.py
|
kmarcini/Learn-Python---Full-Course-for-Beginners-Tutorial-
|
8ea4ef004d86fdf393980fd356edcf5b769bfeac
|
[
"BSD-3-Clause"
] | null | null | null |
try:
# num = 10 / 0
number = int(input("Enter a number: "))
print(number)
# catch specific errors
except ZeroDivisionError as err:
print(err)
except ValueError:
print("Invalid input")
| 17.166667 | 43 | 0.650485 |
b9b691941c62b002880bb1f21ca60b0e932e41c1
| 3,574 |
py
|
Python
|
peaksampl.py
|
Gattocrucco/sipmfilter
|
74215d6c53b998808fc6c677b46030234d996bdf
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
peaksampl.py
|
Gattocrucco/sipmfilter
|
74215d6c53b998808fc6c677b46030234d996bdf
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
peaksampl.py
|
Gattocrucco/sipmfilter
|
74215d6c53b998808fc6c677b46030234d996bdf
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
import numpy as np
def _yz(y, z, t, yout):
"""
Shared implementation of peaksampl and sumpeaks.
"""
y = np.asarray(y)
z = np.asarray(z)
t = np.asarray(t)
y = np.pad(y, [(0, 0)] * (y.ndim - 1) + [(1, 1)], constant_values=yout)
offset = np.argmax(np.abs(y), axis=-1)
ampl = np.take_along_axis(y, np.expand_dims(offset, -1), -1)
ampl = np.squeeze(ampl, -1)
indices = t[..., :, None] - t[..., None, :] + offset[..., None, None]
indices = np.minimum(indices, y.shape[-1] - 1)
indices = np.maximum(indices, 0)
N = t.shape[-1]
indices = indices.reshape(indices.shape[:-2] + (N * N,))
n = max(y.ndim, indices.ndim)
y, indices = _adddims(y, indices)
y = np.take_along_axis(y, indices, -1)
eps = np.finfo(float).eps * N * N * ampl
y[..., ::N + 1] += np.expand_dims(eps, -1)
y = y.reshape(y.shape[:-1] + (N, N))
z = z[..., None]
y, z = _adddims(y, z)
return y, z
def peaksampl(y, z, t, yout=0):
"""
Get peak amplitudes given their sum.
This assumes that the position of the signals is given by peaks positions
even when they are summed.
Parameters
----------
y : array (..., M,)
The single signal shape.
z : array (..., N,)
The peak height in the sum of the signals for each peak.
t : int array (..., N,)
The indices of the peaks in the sum.
yout : scalar
The value of the signal outside the provided values, default 0.
Return
------
a : array (..., N),
The amplitudes such that z_i = sum_j a_j * y[t_i - t_j].
Broadcasted along non-last axis.
"""
y, z = _yz(y, z, t, yout)
a = np.linalg.solve(y, z)
return np.squeeze(a, -1)
def sumpeaks(y, a, t, yout=0):
"""
Compute the peak heights of a sum of signals.
This assumes that the position of the peaks is given by the signal
positions even when they are summed.
Parameters
----------
y : array (..., M,)
The single signal shape.
a : array (..., N,)
The amplitudes of the signals (`y` is multiplied by `a`).
t : int array (..., N,)
The indices of the position of the signals.
yout : scalar
The value of the signal outside the provided values, default 0.
Return
------
z : array (..., N,)
The peak height in the sum of the signals for each signal. Broadcasted
along non-last axis.
"""
y, a = _yz(y, a, t, yout)
z = np.matmul(y, a)
return np.squeeze(z, axis=-1)
if __name__ == '__main__':
from matplotlib import pyplot as plt
from scipy import signal
y = np.exp(-np.linspace(0, 10, 1000) / 10)
i = np.arange(1, 1000)
t0 = np.array([10, 340, 523])
a0 = np.array([3, 2, 1])
indices = i - t0[:, None]
z = np.take(y, indices, mode='clip') * a0[:, None]
z = np.where((indices < 0) | (indices >= len(y)), 0, z)
z = np.sum(z, axis=0)
t, = signal.argrelmax(z)
assert len(t) == len(t0)
a = peaksampl(y, z[t], t)
h = sumpeaks(y, a, t)
fig, ax = plt.subplots(num='peaksampl', clear=True)
ax.plot(z, color='#f55')
ax.vlines(t0, 0, a0, color='gray', zorder=3)
ax.vlines(t, 0, a, linestyle='--', zorder=3)
ax.plot(t, h, 'ok')
ax.grid('major', linestyle='--')
fig.tight_layout()
fig.show()
| 28.822581 | 78 | 0.546726 |
b9b6f990f4ce20ca5842a3bb309cb667f69ccc3e
| 453 |
py
|
Python
|
arachne/hdl/xilinx/ps8/resources/pmu.py
|
shrine-maiden-heavy-industries/arachne
|
1d0320bf6e77653656f8ce1874900743452dbac4
|
[
"BSD-3-Clause"
] | 3 |
2021-09-13T20:23:42.000Z
|
2022-01-19T13:12:32.000Z
|
arachne/hdl/xilinx/ps8/resources/pmu.py
|
shrine-maiden-heavy-industries/arachne
|
1d0320bf6e77653656f8ce1874900743452dbac4
|
[
"BSD-3-Clause"
] | null | null | null |
arachne/hdl/xilinx/ps8/resources/pmu.py
|
shrine-maiden-heavy-industries/arachne
|
1d0320bf6e77653656f8ce1874900743452dbac4
|
[
"BSD-3-Clause"
] | null | null | null |
# SPDX-License-Identifier: BSD-3-Clause
from amaranth import *
from amaranth.build import *
from .common import PS8Resource, MIOSet
__all__ = (
'PMUResource',
)
| 19.695652 | 46 | 0.699779 |
b9b9340675c6ceead7ff166bf8fe4d65fa580b58
| 4,597 |
py
|
Python
|
backend/Washlist/tests.py
|
henrikhorluck/tdt4140-washlists
|
a75c3bc38a3f915eb48cf3e9ecba848f46a2bcaa
|
[
"MIT"
] | null | null | null |
backend/Washlist/tests.py
|
henrikhorluck/tdt4140-washlists
|
a75c3bc38a3f915eb48cf3e9ecba848f46a2bcaa
|
[
"MIT"
] | 2 |
2020-05-02T18:17:44.000Z
|
2020-05-02T18:18:02.000Z
|
backend/Washlist/tests.py
|
henrikhorluck/tdt4140-washlists
|
a75c3bc38a3f915eb48cf3e9ecba848f46a2bcaa
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from Dormroom.models import Dormroom
from SIFUser.mixins import AuthTestMixin
from StudentVillage.models import StudentVillage
from Washlist.jobs import reset_washlists
from Washlist.models.Templates import TemplateListItem, TemplateWashList
from Washlist.models.WashLists import ListItem
from Washlist.serializer import TemplateWashListSerializer
| 35.091603 | 92 | 0.67022 |
b9ba39e57d52ad0baaeb81fbe95a03b7bb17d4ad
| 3,792 |
py
|
Python
|
torchvision/prototype/models/mobilenetv3.py
|
piyush01123/vision
|
c6722307e6860057b4855483d237fe00a213dcf6
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/prototype/models/mobilenetv3.py
|
piyush01123/vision
|
c6722307e6860057b4855483d237fe00a213dcf6
|
[
"BSD-3-Clause"
] | null | null | null |
torchvision/prototype/models/mobilenetv3.py
|
piyush01123/vision
|
c6722307e6860057b4855483d237fe00a213dcf6
|
[
"BSD-3-Clause"
] | null | null | null |
from functools import partial
from typing import Any, Optional, List
from torchvision.prototype.transforms import ImageNetEval
from torchvision.transforms.functional import InterpolationMode
from ...models.mobilenetv3 import MobileNetV3, _mobilenet_v3_conf, InvertedResidualConfig
from ._api import WeightsEnum, Weights
from ._meta import _IMAGENET_CATEGORIES
from ._utils import handle_legacy_interface, _ovewrite_named_param
__all__ = [
"MobileNetV3",
"MobileNet_V3_Large_Weights",
"MobileNet_V3_Small_Weights",
"mobilenet_v3_large",
"mobilenet_v3_small",
]
_COMMON_META = {
"task": "image_classification",
"architecture": "MobileNetV3",
"publication_year": 2019,
"size": (224, 224),
"min_size": (1, 1),
"categories": _IMAGENET_CATEGORIES,
"interpolation": InterpolationMode.BILINEAR,
}
| 34.472727 | 119 | 0.704114 |
b9bb675bdbf31f94537da2d2380efe251bd20dd2
| 1,036 |
py
|
Python
|
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
rest_auth/registration/urls.py
|
soul4code/django-rest-auth
|
b7a2e06e7736865b18f6aab79dcd42210e06c28b
|
[
"MIT"
] | null | null | null |
from django.urls import re_path
from django.views.generic import TemplateView
from .views import RegisterView, VerifyEmailView
urlpatterns = [
re_path(r'^$', RegisterView.as_view(), name='rest_register'),
re_path(r'^verify-email/$', VerifyEmailView.as_view(), name='rest_verify_email'),
# This url is used by django-allauth and empty TemplateView is
# defined just to allow reverse() call inside app, for example when email
# with verification link is being sent, then it's required to render email
# content.
# account_confirm_email - You should override this view to handle it in
# your API client somehow and then, send post to /verify-email/ endpoint
# with proper key.
# If you don't want to use API on that step, then just use ConfirmEmailView
# view from:
# django-allauth https://github.com/pennersr/django-allauth/blob/master/allauth/account/views.py
re_path(r'^account-confirm-email/(?P<key>[-:\w]+)/$', TemplateView.as_view(),
name='account_confirm_email'),
]
| 41.44 | 100 | 0.721042 |
b9bb907819b5835937644fde4b8d08e5dd987580
| 1,036 |
py
|
Python
|
crawler/tests.py
|
mental689/paddict
|
493268b62531c698687d42416edf61c602250133
|
[
"MIT"
] | 1 |
2019-06-22T10:28:21.000Z
|
2019-06-22T10:28:21.000Z
|
crawler/tests.py
|
mental689/paddict
|
493268b62531c698687d42416edf61c602250133
|
[
"MIT"
] | 4 |
2020-09-05T01:48:18.000Z
|
2022-03-02T04:29:25.000Z
|
crawler/tests.py
|
mental689/paddict
|
493268b62531c698687d42416edf61c602250133
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
# Create your tests here.
from crawler.download import *
from crawler.models import *
| 32.375 | 106 | 0.608108 |
b9bfcc9ca3f71d3591d1b453eea9313adf491d9f
| 452 |
py
|
Python
|
test_scripts/xml_example.py
|
petervdb/testrep1
|
76b6eb3de2deb9596c055f252191e28587d5520c
|
[
"MIT"
] | 1 |
2015-11-17T21:35:44.000Z
|
2015-11-17T21:35:44.000Z
|
test_scripts/xml_example.py
|
petervdb/testrep1
|
76b6eb3de2deb9596c055f252191e28587d5520c
|
[
"MIT"
] | null | null | null |
test_scripts/xml_example.py
|
petervdb/testrep1
|
76b6eb3de2deb9596c055f252191e28587d5520c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
from urllib.request import urlopen
from xml.etree.ElementTree import parse
# Download the RSS feed and parse it
u = urlopen('http://planet.python.org/rss20.xml')
doc = parse(u)
# Extract and output tags of interest
for item in doc.iterfind('channel/item'):
title = item.findtext('title')
date = item.findtext('pubDate')
link = item.findtext('link')
print(title)
print(date)
print(link)
print()
print("Program executed.")
| 20.545455 | 49 | 0.725664 |
b9c06414f6de5d6df932f87abe0ac2addfe2d410
| 1,489 |
py
|
Python
|
contacts/urls.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | 1 |
2019-04-21T18:57:57.000Z
|
2019-04-21T18:57:57.000Z
|
contacts/urls.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | null | null | null |
contacts/urls.py
|
anthowen/duplify
|
846d01c1b21230937fdf0281b0cf8c0b08a8c24e
|
[
"MIT"
] | null | null | null |
"""dedupper_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from contacts import views
admin.autodiscover()
urlpatterns = [
path('', views.index, name='contact_index'),
path('', views.index, name='lead_index'),
path('contacts/', views.contacts, name='contacts'),
path('leads/', views.leads, name='leads'),
path('table/', views.table, name='table'),
path('plotly/', views.plotly, name='plotly'),
# url(r'^keys', views.upload, name='keys'),
# path('key-gen/', views.key_gen, name='key-gen'),
# path('heroku/', generic.ListView.as_view(model=models.Contact), name='heroku'),
# path('run/', views.run, name='run'),
# path('sorted/<id>', views.merge, name='merge'),
# path('sorted/export/<type>', views.download, name='export'),
# path('sorted/report/<type>', views.download_times, name='report'),
]
| 38.179487 | 85 | 0.672263 |
b9c1739f7de89fe26aec7c763524b35992652ab0
| 92 |
py
|
Python
|
pydm/PyQt/uic.py
|
klauer/pydm
|
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
pydm/PyQt/uic.py
|
klauer/pydm
|
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
pydm/PyQt/uic.py
|
klauer/pydm
|
e26aad58a7a0eb6f7321c61aa1dace646ff652bd
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from . import qtlib
QT_LIB = qtlib.QT_LIB
if QT_LIB == 'PyQt5':
from PyQt5.uic import *
| 18.4 | 27 | 0.684783 |
b9c1d738b7414d020a32d72c8b5b4b39a4b6d1d4
| 2,667 |
py
|
Python
|
CPB100/lab2b/scheduled/ingestapp.py
|
pranaynanda/training-data-analyst
|
f10ab778589129239fd5b277cfdefb41638eded5
|
[
"Apache-2.0"
] | null | null | null |
CPB100/lab2b/scheduled/ingestapp.py
|
pranaynanda/training-data-analyst
|
f10ab778589129239fd5b277cfdefb41638eded5
|
[
"Apache-2.0"
] | null | null | null |
CPB100/lab2b/scheduled/ingestapp.py
|
pranaynanda/training-data-analyst
|
f10ab778589129239fd5b277cfdefb41638eded5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START app]
import os
import logging
import transform
import flask
import google.cloud.storage as gcs
# [start config]
app = flask.Flask(__name__)
# Configure this environment variable via app.yaml
CLOUD_STORAGE_BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
#
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
# [end config]
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
# [END app]
| 32.13253 | 214 | 0.683915 |
b9c3b62c39c9c9979a0f73775051d1aecd742c75
| 73 |
py
|
Python
|
index.py
|
StarSky1/microsoft-python-study
|
7fdc1ad87ac0eeb497013d7792f499416aac32d9
|
[
"MIT"
] | null | null | null |
index.py
|
StarSky1/microsoft-python-study
|
7fdc1ad87ac0eeb497013d7792f499416aac32d9
|
[
"MIT"
] | null | null | null |
index.py
|
StarSky1/microsoft-python-study
|
7fdc1ad87ac0eeb497013d7792f499416aac32d9
|
[
"MIT"
] | null | null | null |
name=input('input your name:');
print('hello');
print(name.capitalize());
| 24.333333 | 31 | 0.69863 |
b9c4867749ea67a9a267a9e62cea575f210f9260
| 2,526 |
py
|
Python
|
credentials.py
|
Machel54/-pass-locker-
|
8ddf14cf755924ca903919177f9f878f65a08042
|
[
"MIT"
] | null | null | null |
credentials.py
|
Machel54/-pass-locker-
|
8ddf14cf755924ca903919177f9f878f65a08042
|
[
"MIT"
] | null | null | null |
credentials.py
|
Machel54/-pass-locker-
|
8ddf14cf755924ca903919177f9f878f65a08042
|
[
"MIT"
] | 1 |
2020-01-29T02:05:49.000Z
|
2020-01-29T02:05:49.000Z
|
import pyperclip
import random
import string
| 30.071429 | 111 | 0.665479 |
b9c5365f366487d350d0993e89760939da233546
| 80 |
py
|
Python
|
tests/test_dice.py
|
mehulsatardekar/dice-on-demand
|
fa1ce1214975ba70c5d61390408a4de2418cf997
|
[
"MIT"
] | 1 |
2020-12-03T14:27:20.000Z
|
2020-12-03T14:27:20.000Z
|
tests/test_dice.py
|
mehulsatardekar/dice-on-demand
|
fa1ce1214975ba70c5d61390408a4de2418cf997
|
[
"MIT"
] | 11 |
2020-10-21T17:51:12.000Z
|
2020-11-09T12:02:52.000Z
|
tests/test_dice.py
|
mehulsatardekar/dice-on-demand
|
fa1ce1214975ba70c5d61390408a4de2418cf997
|
[
"MIT"
] | 27 |
2021-09-09T22:53:21.000Z
|
2021-11-20T22:46:16.000Z
|
import unittest
import app
| 11.428571 | 33 | 0.6625 |
b9c62ba9c79d4ffcb00ede68fc940fc877d45118
| 5,614 |
py
|
Python
|
annotations/rip_annotated_junctions.py
|
ChristopherWilks/snaptron
|
82ea3c5c5f2fbb726bba6d8c2bd0f7713291833a
|
[
"MIT"
] | 25 |
2016-01-08T02:02:36.000Z
|
2021-12-29T14:00:58.000Z
|
annotations/rip_annotated_junctions.py
|
ChristopherWilks/snaptron
|
82ea3c5c5f2fbb726bba6d8c2bd0f7713291833a
|
[
"MIT"
] | 11 |
2016-02-25T01:44:46.000Z
|
2021-07-02T05:52:55.000Z
|
annotations/rip_annotated_junctions.py
|
ChristopherWilks/snaptron
|
82ea3c5c5f2fbb726bba6d8c2bd0f7713291833a
|
[
"MIT"
] | 7 |
2016-02-13T01:45:15.000Z
|
2021-11-22T11:04:12.000Z
|
#!/usr/bin/env python
"""
rip_annotated_junctions.py
Non-reference/species verson of this script, no lift-over
Rips junctions from annotation files contained in
jan_24_2016_annotations.tar.gz, as described in annotation_definition.md.
Junctions are dumped to stdout, which we record as annotated_junctions.tsv.gz
in runs/sra (same directory as this file). annotated_junctions.tsv.gz is
required by tables.py. The format of annotated_junctions.tsv.gz is
(tab-separated fields), one per junction
1. Chromosome
2. Start position (1-based, inclusive)
3. End position (1-based, inclusive)
4. Strand (+ or -)
5. anno source (abbreviation)
Must have
Stats are written to stderr
From the runs/sra/v2 directory, we ran
pypy rip_annotated_junctions.py
--hisat2-dir /path/to/hisat2-2.0.1-beta
--annotations path/to/jan_24_2016_annotations.tar.gz
| sort -k1,1 -k2,2n -k3,3n | gzip >annotated_junctions.tsv.gz
"""
import subprocess
import tarfile
import argparse
import tempfile
import atexit
import shutil
import glob
import os
import gzip
import sys
#file2source = {"hg19/gencode.v19.annotation.gtf.gz":"gC19","hg19/refGene.txt.gz":"rG19","hg19/acembly.txt.gz":"aC19","hg19/ccdsGene.txt.gz":"cG19","hg19/vegaGene.txt.gz":"vG19","hg19/knownGene.txt.gz":"kG19","hg19/mgcGenes.txt.gz":"mG19","hg19/lincRNAsTranscripts.txt.gz":"lR19","hg19/sibGene.txt.gz":"sG19","hg38/refGene.txt.gz":"rG38","hg38/ccdsGene.txt.gz":"cG38","hg38/gencode.v24.annotation.gtf.gz":"gC38","hg38/knownGene.txt.gz":"kG38","hg38/mgcGenes.txt.gz":"mG38","hg38/lincRNAsTranscripts.txt.gz":"lR38","hg38/sibGene.txt.gz":"sG38"}
#file2source = {"mm10/mouse10_ucsc_genes.gtf.gz":"kG10","mm10/mouse10_gencodevm11_comp.gtf.gz":"gC11","mm10/mouse10_gencodevm09_comp.gtf.gz":"gC09","mm10/mouse10_refseq_refgene.gtf.gz":"rG10"}
file2source = {"mouse10_ucsc_genes.gtf.gz":"kG10","mouse10_gencodevm11_comp.gtf.gz":"gC11","mouse10_gencodevm09_comp.gtf.gz":"gC09","mouse10_refseq_refgene.gtf.gz":"rG10"}
if __name__ == '__main__':
# Print file's docstring if -h is invoked
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
# Add command-line arguments
parser.add_argument('--extract-script-dir', type=str, required=True,
help=('path to directory containing extract_splice_sites.py script (from HISAT2)')
)
parser.add_argument('--annotations', type=str, required=True,
help=('full path to directory that has the annotation GTF(s) in gzipped format')
)
args = parser.parse_args()
extract_destination = tempfile.mkdtemp()
atexit.register(shutil.rmtree, extract_destination)
#with tarfile.open(args.annotations, 'r:gz') as tar:
# tar.extractall(path=extract_destination)
extract_splice_sites_path = os.path.join(args.extract_script_dir,
'extract_splice_sites.py')
containing_dir = os.path.dirname(os.path.realpath(__file__))
annotated_junctions_ = set()
for junction_file in glob.glob(
os.path.join(args.annotations, '*')
):
label = os.path.basename(junction_file)
datasource_code = file2source[label]
unique_junctions = set()
#extract_splice_sites_path prints 0-based, exon coords around junctions
#hence the +2 for the start here
extract_process = subprocess.Popen(' '.join([
sys.executable,
extract_splice_sites_path,
'<(gzip -cd %s)'
% junction_file
]),
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE
)
for line in extract_process.stdout:
tokens = line.strip().split('\t')
tokens[1] = int(tokens[1]) + 2
tokens[2] = int(tokens[2])
if tokens[2] < tokens[1]:
print >>sys.stderr, (
'Invalid junction ({}, {}, {}) from file {}. '
'Skipping.'
).format(
tokens[0], tokens[1], tokens[2], junction_file
)
continue
tokens.append(datasource_code)
junction_to_add = tuple(tokens)
annotated_junctions_.add(junction_to_add)
unique_junctions.add(junction_to_add)
extract_process.stdout.close()
exit_code = extract_process.wait()
if exit_code != 0:
raise RuntimeError(
'extract_splice_sites.py had nonzero exit code {}.'.format(
exit_code
)
)
print >>sys.stderr, 'Junctions in {}: {}'.format(
label,
len(unique_junctions)
)
junc2datasource = {}
for junction in annotated_junctions_:
if junction[:4] not in junc2datasource:
junc2datasource[junction[:4]]=set()
junc2datasource[junction[:4]].add(junction[4])
seen = set()
for junction in annotated_junctions_:
if junction[:4] not in seen:
sources = ",".join(sorted(junc2datasource[junction[:4]]))
print "%s\t%s" % ('\t'.join(map(str, junction[:4])),sources)
seen.add(junction[:4])
| 44.555556 | 543 | 0.603669 |
b9c731695680778a55c685fcfc15ab5e3eccf437
| 5,438 |
py
|
Python
|
dramkit/_tmp/VMD.py
|
Genlovy-Hoo/dramkit
|
fa3d2f35ebe9effea88a19e49d876b43d3c5c4c7
|
[
"MIT"
] | null | null | null |
dramkit/_tmp/VMD.py
|
Genlovy-Hoo/dramkit
|
fa3d2f35ebe9effea88a19e49d876b43d3c5c4c7
|
[
"MIT"
] | null | null | null |
dramkit/_tmp/VMD.py
|
Genlovy-Hoo/dramkit
|
fa3d2f35ebe9effea88a19e49d876b43d3c5c4c7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
def vmd( signal, alpha, tau, K, DC, init, tol):
'''
VMD
VMD
VMD u
omega2*pi/fs*omega
Input and Parameters:
signal - the time domain signal (1D) to be decomposed
alpha - the balancing parameter of the data-fidelity constraint
tau - time-step of the dual ascent ( pick 0 for noise-slack )
K - the number of modes to be recovered
DC - true if the first mode is put and kept at DC (0-freq)
init - 0 = all omegas start at 0
1 = all omegas start uniformly distributed
2 = all omegas initialized randomly
tol - tolerance of convergence criterion; typically around 1e-6
Output:
u - the collection of decomposed modes
u_hat - spectra of the modes
omega - estimated mode center-frequencies
'''
# Period and sampling frequency of input signal
#1s,1/ length(signal) length(signal)
save_T = len(signal)
fs = 1 / save_T
# extend the signal by mirroring
T = save_T
f_mirror = []
temp = signal[0:T//2]
f_mirror.extend(temp[::-1]) #temp[::-1]
f_mirror.extend(signal)
temp = signal[T//2:T]
f_mirror.extend(temp[::-1])
f = f_mirror
# Time Domain 0 to T (of mirrored signal)
T = len(f)
t = [(i + 1) / T for i in range(T)] # 1
# Spectral Domain discretization
#freqs
freqs = np.array( [i - 0.5 - 1 / T for i in t] )
# Maximum number of iterations (if not converged yet, then it won't anyway)
N = 500
# For future generalizations: individual alpha for each mode
Alpha = alpha * np.ones(K)
# Construct and center f_hat
transformed = np.fft.fft(f) # fft
f_hat = np.fft.fftshift(transformed) # fftshift
f_hat_plus = f_hat
f_hat_plus[0:T // 2] = 0
# f_hat_plus[0:T] = 1 #????????????????????????????////////////
# matrix keeping track of every iterant // could be discarded for mem
u_hat_plus = [np.zeros((N, len(freqs)), dtype=complex) for i in range(K)]
# Initialization of omega_k
omega_plus = np.zeros((N, K))
if init == 1:
for i in range(K):
omega_plus[0, i] = (0.5 / K) * i
elif init == 2:
omega_plus[0, :] = np.sort(np.exp(np.log(fs) + (np.log(0.5) - np.log(fs)) * np.random.rand(K)))
else:
omega_plus[0, :] = 0
# if DC mode imposed, set its omega to 0
if DC:
omega_plus[0, 0] = 0
# start with empty dual variables
lambda_hat = np.zeros( (N, len(freqs)), dtype=complex)
# other inits
eps = 2.2204e-16 # pythoneps
uDiff = tol + eps # update step
n = 1 # loop counter
sum_uk = 0 # accumulator
#----------- Main loop for iterative updates----------
while (uDiff > tol and n < N ): #not converged and below iterations limit
#update first mode accumulator
k = 0
sum_uk = u_hat_plus[K-1][n-1,:]+ sum_uk - u_hat_plus[0][n-1,:] #sum_uk 01,2000????????????????
#update spectrum of first mode through Wiener filter of residuals
u_hat_plus[k][n,:] = (f_hat_plus - sum_uk - lambda_hat[n-1,:]/2)/(1+Alpha[k]*(freqs - omega_plus[n-1,k])**2)
#update first omega if not held at 0
if not DC:
omega_plus[n,k] = (freqs[T//2:T]*np.mat(np.abs(u_hat_plus[k][n, T//2:T])**2).H)/np.sum(np.abs(u_hat_plus[k][n,T//2:T])**2)
#update of any other mode
for k in range(K-1):
#accumulator
sum_uk = u_hat_plus[k][n,:] + sum_uk - u_hat_plus[k+1][n-1,:]
#mode spectrum
u_hat_plus[k+1][n,:] = (f_hat_plus - sum_uk - lambda_hat[n-1,:]/2)/(1+Alpha[k+1]*(freqs - omega_plus[n-1,k+1])**2)
#center frequencies
omega_plus[n,k+1] = (freqs[T//2:T]*np.mat(np.abs(u_hat_plus[k+1][n, T//2:T])**2).H)/np.sum(np.abs(u_hat_plus[k+1][n,T//2:T])**2)
#Dual ascent
lambda_hat[n,:] = lambda_hat[n-1,:] + tau*(np.sum([ u_hat_plus[i][n,:] for i in range(K)],0) - f_hat_plus)
#loop counter
n = n+1
#converged yet?
uDiff = eps
for i in range(K):
uDiff = uDiff + 1/T*(u_hat_plus[i][n-1,:]-u_hat_plus[i][n-2,:])*np.mat((u_hat_plus[i][n-1,:]-u_hat_plus[i][n-2,:]).conjugate()).H
uDiff = np.abs(uDiff)
# ------ Postprocessing and cleanup-------
#discard empty space if converged early
N = min(N,n)
omega = omega_plus[0:N,:]
#Signal reconstruction
u_hat = np.zeros((T, K), dtype=complex)
temp = [u_hat_plus[i][N-1,T//2:T] for i in range(K) ]
u_hat[T//2:T,:] = np.squeeze(temp).T
temp = np.squeeze(np.mat(temp).conjugate())
u_hat[1:(T//2+1),:] = temp.T[::-1]
u_hat[0,:] = (u_hat[-1,:]).conjugate()
u = np.zeros((K,len(t)))
for k in range(K):
u[k,:]=np.real(np.fft.ifft(np.fft.ifftshift(u_hat[:,k])))
#remove mirror part
u = u[:,T//4:3*T//4]
#recompute spectrum
u_hat = np.zeros((T//2, K), dtype=complex)
for k in range(K):
u_hat[:,k]= np.squeeze( np.mat( np.fft.fftshift(np.fft.fft(u[k,:])) ).H)
return u, u_hat, omega
| 37.503448 | 141 | 0.580912 |
b9c81413c2bd63d72d0731352d31911ef52240f6
| 480 |
py
|
Python
|
forum/main.py
|
asmaasalih/my_project
|
89183d7a2578fa302e94ea29570ab527e9ca47b5
|
[
"MIT"
] | 1 |
2018-03-21T07:51:36.000Z
|
2018-03-21T07:51:36.000Z
|
forum/main.py
|
asmaasalih/my_project
|
89183d7a2578fa302e94ea29570ab527e9ca47b5
|
[
"MIT"
] | null | null | null |
forum/main.py
|
asmaasalih/my_project
|
89183d7a2578fa302e94ea29570ab527e9ca47b5
|
[
"MIT"
] | null | null | null |
import models
import stores
member1 =models.Member("ahmed",33)
member2 =models.Member("mohamed",30)
post1=models.Post("Post1", "Content1")
post2= models.Post("Post2", "Content2")
post3= models.Post("Post3", "Content3")
#member store
member_store=stores.MemberStore()
member_store.add(member1)
member_store.add(member2)
print (member_store.get_all())
post_store=stores.PostStore()
post_store.add(post1)
post_store.add(post2)
post_store.add(post3)
print (post_store.get_all())
| 20.869565 | 39 | 0.772917 |
b9c89d9ad7d4587730637df2e5c8576e03c43ad8
| 3,115 |
py
|
Python
|
shellfind.py
|
bhavyanshu/Shell-Finder
|
308b3ba7f1a53b8a6cc738d69c01f4b7108d0860
|
[
"Apache-2.0"
] | 4 |
2016-06-15T22:08:29.000Z
|
2019-10-16T13:12:51.000Z
|
shellfind.py
|
kaitolegion/Shell-Finder
|
308b3ba7f1a53b8a6cc738d69c01f4b7108d0860
|
[
"Apache-2.0"
] | null | null | null |
shellfind.py
|
kaitolegion/Shell-Finder
|
308b3ba7f1a53b8a6cc738d69c01f4b7108d0860
|
[
"Apache-2.0"
] | 7 |
2015-07-08T22:21:52.000Z
|
2021-05-31T14:05:47.000Z
|
#!/usr/bin/env python
'''
Author : Bhavyanshu Parasher
Email : [email protected]
Description : shellfind.py is a Python command line utility which lets you look for shells on a site that the hacker must have uploaded. It considers all the shells available and tries all possibilities via dictionary match.
'''
import socket
import sys
import httplib
from urlparse import urlparse
import time as t
import urllib2
from urllib2 import Request, urlopen, URLError
negative = '\033[91m'
positive = '\033[32m'
wait = '\033[95m'
final = '\033[93m'
total_scanned_global=0
found_scanned_global=0
if __name__ == '__main__':
main()
| 35 | 224 | 0.742857 |
b9c964b752a9622a17123202e7aae50d1718a48a
| 1,345 |
py
|
Python
|
question3.py
|
nosisky/algo-solution
|
a9276f73ba63b1a0965c194885aea6cadfab0e0b
|
[
"MIT"
] | 1 |
2019-08-14T12:32:49.000Z
|
2019-08-14T12:32:49.000Z
|
question3.py
|
nosisky/algo-solution
|
a9276f73ba63b1a0965c194885aea6cadfab0e0b
|
[
"MIT"
] | null | null | null |
question3.py
|
nosisky/algo-solution
|
a9276f73ba63b1a0965c194885aea6cadfab0e0b
|
[
"MIT"
] | null | null | null |
# A string S consisting of N characters is considered to be properly nested if any of the following conditions is true:
# S is empty;
# S has the form "(U)" or "[U]" or "{U}" where U is a properly nested string; S has the form "VW" where V and W are properly nested strings.
# For example, the string "{[()()]}" is properly nested but "([)()]" is not.
# Write a function:
# int solution(char *S);
# that, given a string S consisting of N characters, returns 1 if S is properly nested and 0 otherwise.
# For example, given S = "{[()()]}", the function should return 1 and given S = "([)()]", the function should return 0, as explained above.
# Assume that:
# N is an integer within the range [0..200,000];
# string S consists only of the following characters: "(", "{", "[", "]", "}" and/or ")". Complexity:
# expected worst-case time complexity is O(N);
# expected worst-case space complexity is O(N) (not counting the storage required for input arguments).
print(solution("()[]{}"))
| 42.03125 | 140 | 0.66171 |
b9ca4ff833bf2ee267f7f1b8ecf69069cd8c4b31
| 1,996 |
py
|
Python
|
Teil_27_Game_of_Life_3d.py
|
chrMenzel/A-beautiful-code-in-Python
|
92ee43c1fb03c299384d4de8bebb590c5ba1b623
|
[
"MIT"
] | 50 |
2018-12-23T15:46:16.000Z
|
2022-03-28T15:49:59.000Z
|
Teil_27_Game_of_Life_3d.py
|
chrMenzel/A-beautiful-code-in-Python
|
92ee43c1fb03c299384d4de8bebb590c5ba1b623
|
[
"MIT"
] | 9 |
2018-12-03T10:31:29.000Z
|
2022-01-20T14:41:33.000Z
|
Teil_27_Game_of_Life_3d.py
|
chrMenzel/A-beautiful-code-in-Python
|
92ee43c1fb03c299384d4de8bebb590c5ba1b623
|
[
"MIT"
] | 69 |
2019-02-02T11:59:09.000Z
|
2022-03-28T15:54:28.000Z
|
import bpy
import random as rnd
from collections import Counter
import itertools as iter
feld_von, feld_bis = -4, 4
spielfeld_von, spielfeld_bis = feld_von-6, feld_bis+6
anz = int((feld_bis-feld_von)**3*.3)
spielfeld = {(rnd.randint(feld_von, feld_bis), rnd.randint(
feld_von, feld_bis), rnd.randint(feld_von, feld_bis)) for _ in range(anz)}
animate_frame = 8
bpy.ops.mesh.primitive_cube_add(size=0.001, location=(0, 0, 0))
orig_cube = bpy.context.active_object
n = "cube"
m = orig_cube.data.copy()
cubes = {}
for x,y,z in iter.product(range(spielfeld_von,spielfeld_bis), repeat = 3):
o = bpy.data.objects.new(n, m)
o.location = (x, y, z)
cubes[x, y, z] = o
bpy.context.collection.objects.link(o)
o.select_set(False)
for i in range(200):
print(f'Durchlauf No. {i}, Anz. Zellen = {len(spielfeld)}')
spielfeld2 = nchsteGeneration(spielfeld)
dead = spielfeld - spielfeld2
new = spielfeld2 - spielfeld
spielfeld = spielfeld2
if not new and not dead:
break
for zelle in new | dead:
if zelle not in cubes:
continue
ob = cubes[zelle]
if zelle in new:
scale_rotate(ob, 0.001, -3.141/2, (i-1)*animate_frame)
scale_rotate(ob, 750, 3.141/2, i * animate_frame)
else:
scale_rotate(ob, 750, 3.141/2, (i-1) * animate_frame)
scale_rotate(ob, 0.001, -3.141/2, i * animate_frame)
if not spielfeld:
break
bpy.context.scene.frame_current = 1
| 28.927536 | 102 | 0.67986 |
b9ca98991068e30844d7bcc8e336f70de5eef5a9
| 1,824 |
py
|
Python
|
power_perceiver/xr_batch_processor/reduce_num_pv_systems.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | null | null | null |
power_perceiver/xr_batch_processor/reduce_num_pv_systems.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | 33 |
2022-02-16T07:51:41.000Z
|
2022-03-31T11:24:11.000Z
|
power_perceiver/xr_batch_processor/reduce_num_pv_systems.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass
import numpy as np
import xarray as xr
from power_perceiver.load_prepared_batches.data_sources import PV
from power_perceiver.load_prepared_batches.data_sources.prepared_data_source import XarrayBatch
| 39.652174 | 97 | 0.721491 |
b9cc65aafe29eb9820f902e036880e65947e1e2d
| 857 |
py
|
Python
|
HelloWorld_python/log/demo_log_3.py
|
wang153723482/HelloWorld_my
|
b8642ad9742f95cfebafc61f25b00e917485e50c
|
[
"Apache-2.0"
] | null | null | null |
HelloWorld_python/log/demo_log_3.py
|
wang153723482/HelloWorld_my
|
b8642ad9742f95cfebafc61f25b00e917485e50c
|
[
"Apache-2.0"
] | null | null | null |
HelloWorld_python/log/demo_log_3.py
|
wang153723482/HelloWorld_my
|
b8642ad9742f95cfebafc61f25b00e917485e50c
|
[
"Apache-2.0"
] | null | null | null |
#encoding=utf8
#
import logging
import time
from logging.handlers import TimedRotatingFileHandler
#----------------------------------------------------------------------
if __name__ == "__main__":
logFilePath = "timed_test.log"
logger = logging.getLogger("YouLoggerName")
logger.setLevel(logging.INFO)
handler = TimedRotatingFileHandler(logFilePath,
when="d",
interval=1,
backupCount=7)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
logger.addHandler(handler)
for i in range(6):
logger.info("This is a info!")
logger.debug("This is a debug!")
# time.sleep(61)
| 29.551724 | 89 | 0.536756 |
b9cd3a4333e6169069ae770751a00e82db1b741a
| 736 |
py
|
Python
|
bot_da_os/statemachine/person/person_action.py
|
Atsocs/bot-da-os
|
e6d54057f4a3b703f303e9944a39e291ac87c40f
|
[
"MIT"
] | null | null | null |
bot_da_os/statemachine/person/person_action.py
|
Atsocs/bot-da-os
|
e6d54057f4a3b703f303e9944a39e291ac87c40f
|
[
"MIT"
] | null | null | null |
bot_da_os/statemachine/person/person_action.py
|
Atsocs/bot-da-os
|
e6d54057f4a3b703f303e9944a39e291ac87c40f
|
[
"MIT"
] | null | null | null |
from operator import eq
# Static fields; an enumeration of instances:
PersonAction.compliment = PersonAction("person compliments")
PersonAction.informing = PersonAction("person gives information about the service order")
PersonAction.query = PersonAction("person wants to know about his/her order")
PersonAction.angry = PersonAction("person is pissed off")
| 29.44 | 89 | 0.72962 |
b9cd7df81437d8aa9311f772f8fd75744e9a395a
| 5,108 |
py
|
Python
|
MyServer.py
|
bisw1jit/MyServer
|
cbd7bc4015482ce8f24314894148f7e20ef66b21
|
[
"MIT"
] | 3 |
2019-11-09T17:29:55.000Z
|
2021-12-19T21:52:12.000Z
|
MyServer.py
|
bisw1jit/MyServer
|
cbd7bc4015482ce8f24314894148f7e20ef66b21
|
[
"MIT"
] | null | null | null |
MyServer.py
|
bisw1jit/MyServer
|
cbd7bc4015482ce8f24314894148f7e20ef66b21
|
[
"MIT"
] | null | null | null |
# Tool Name :- MyServer
# Author :- LordReaper
# Date :- 13/11/2018 - 9/11/2019
# Powered By :- H1ckPro Software's
import sys
import os
from time import sleep
from core.system import *
if len(sys.argv)>1:
pass
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
sys.exit()
if sys.argv[1]=="-s":
if len(sys.argv)==2:
if system=="ubuntu":
os.system("sudo python3 core/s.py "+sys.argv[1])
else:
os.system("python3 core/s.py "+sys.argv[1])
elif len(sys.argv)==3:
if sys.argv[2]=="apache":
if system=="ubuntu":
os.system("sudo python3 core/server.py -apa")
else:
os.system("python3 core/server.py -apa")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif len(sys.argv)==6:
if sys.argv[2]=="-php":
if system=="ubuntu":
os.system("sudo python3 core/server.py -php "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -php "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
elif sys.argv[2]=="-py":
if system=="ubuntu":
os.system("sudo python3 core/server.py -py "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -py "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
elif sys.argv[2]=="-ng":
if system=="ubuntu":
os.system("sudo python3 core/server.py -ng "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -ng "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif len(sys.argv)==5:
if system=="ubuntu":
os.system("sudo python3 core/server.py -d "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
os.system("python3 core/server.py -d "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif sys.argv[1]=="-h":
if len(sys.argv)==2:
if system=="ubuntu":
os.system("sudo python3 core/s.py "+sys.argv[1])
else:
os.system("python3 core/s.py "+sys.argv[1])
elif len(sys.argv)==5:
if system=="ubuntu":
os.system("sudo python3 core/host.py "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
os.system("python3 core/host.py "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
elif sys.argv[1]=="-db":
if len(sys.argv)==3:
if sys.argv[2]=="start":
if system=="ubuntu":
os.system("sudo python3 core/mysql.py "+sys.argv[2])
else:
os.system("python3 core/mysql.py "+sys.argv[2])
elif sys.argv[2]=="stop":
if system=="ubuntu":
os.system("sudo python3 core/mysql.py "+sys.argv[2])
else:
os.system("python3 core/mysql.py "+sys.argv[2])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif sys.argv[1]=="rm":
if len(sys.argv)==3:
if sys.argv[2]=="-T" or sys.argv[2]=="-t":
if system=="ubuntu":
os.system("sudo python3 core/un.py")
else:
os.system("python3 core/un.py")
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
elif sys.argv[1]=="update":
if system=="ubuntu":
os.system("sudo python3 core/upd.py")
else:
os.system("python3 core/upd.py")
elif sys.argv[1]=="start":
if system=="ubuntu":
os.system("sudo python3 .MyServer.py")
else:
os.system("python3 .MyServer.py")
elif sys.argv[1]=="--help" or sys.argv[1]=="-help" or sys.argv[1]=="help":
print ("")
print ("Usage: myserver [command]... [arguments]...")
print ("")
print (" Commands:")
print (" -s <hostname> <port> <path> to start default localhost server.")
print (" -s -ng <hostname> <port> <path> to start php localhost server.")
print (" -s -php <hostname> <port> <path> to start php localhost server.")
print (" -s -py <hostname> <port> <path> to start python localhost server.")
print (" -h <hostname> <localhost_port> <port> to access localhost server on internet.")
print (" -db [start/stop] to start/stop MySQL database server.")
print (" -s apache to start apache web server.")
print (" update update MyServer.")
print (" rm -t uninstall MyServer.")
print (" start start MyServer menu.")
print ("")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
| 34.748299 | 98 | 0.576155 |
b9cda5cbb2749647d6a78abf80d9eb5c24205425
| 341 |
py
|
Python
|
tests/test_gen_epub.py
|
ffreemt/tmx2epub
|
55a59cb2a9b7f42031a65f64c29e5c43fdb487ea
|
[
"MIT"
] | null | null | null |
tests/test_gen_epub.py
|
ffreemt/tmx2epub
|
55a59cb2a9b7f42031a65f64c29e5c43fdb487ea
|
[
"MIT"
] | null | null | null |
tests/test_gen_epub.py
|
ffreemt/tmx2epub
|
55a59cb2a9b7f42031a65f64c29e5c43fdb487ea
|
[
"MIT"
] | null | null | null |
""" test gen_epub. """
from tmx2epub.gen_epub import gen_epub
def test_gen_epub2():
""" test_gen_epub2. """
from pathlib import Path
infile = r"tests\2.tmx"
stem = Path(infile).absolute().stem
outfile = f"{Path(infile).absolute().parent / stem}.epub"
assert gen_epub(infile, debug=True) == outfile
# assert 0
| 22.733333 | 61 | 0.653959 |
b9cde2fbd07898c518510cadb194827f6566c927
| 716 |
py
|
Python
|
pub_sub/python/http/checkout/app.py
|
amulyavarote/quickstarts
|
c21a8f58d515b28eaa8a3680388fa06995c2331b
|
[
"Apache-2.0"
] | null | null | null |
pub_sub/python/http/checkout/app.py
|
amulyavarote/quickstarts
|
c21a8f58d515b28eaa8a3680388fa06995c2331b
|
[
"Apache-2.0"
] | null | null | null |
pub_sub/python/http/checkout/app.py
|
amulyavarote/quickstarts
|
c21a8f58d515b28eaa8a3680388fa06995c2331b
|
[
"Apache-2.0"
] | null | null | null |
import json
import time
import random
import logging
import requests
import os
logging.basicConfig(level=logging.INFO)
base_url = os.getenv('BASE_URL', 'http://localhost') + ':' + os.getenv(
'DAPR_HTTP_PORT', '3500')
PUBSUB_NAME = 'order_pub_sub'
TOPIC = 'orders'
logging.info('Publishing to baseURL: %s, Pubsub Name: %s, Topic: %s' % (
base_url, PUBSUB_NAME, TOPIC))
for i in range(1, 10):
order = {'orderId': i}
# Publish an event/message using Dapr PubSub via HTTP Post
result = requests.post(
url='%s/v1.0/publish/%s/%s' % (base_url, PUBSUB_NAME, TOPIC),
json=order
)
logging.info('Published data: ' + json.dumps(order))
time.sleep(1)
| 25.571429 | 72 | 0.642458 |
b9ce404499c062b33e8623b446d27dfebe6f033f
| 52,312 |
py
|
Python
|
jj.py
|
smailedge/pro
|
f86347d4368bc97aa860b37caa9ba10e84a93738
|
[
"Unlicense"
] | 1 |
2019-08-14T04:17:06.000Z
|
2019-08-14T04:17:06.000Z
|
jj.py
|
smailedge/pro
|
f86347d4368bc97aa860b37caa9ba10e84a93738
|
[
"Unlicense"
] | null | null | null |
jj.py
|
smailedge/pro
|
f86347d4368bc97aa860b37caa9ba10e84a93738
|
[
"Unlicense"
] | 7 |
2018-10-27T11:58:45.000Z
|
2021-02-11T19:45:30.000Z
|
# -*- coding: utf-8 -*-
from linepy import *
from datetime import datetime
from time import sleep
from humanfriendly import format_timespan, format_size, format_number, format_length
import time, random, sys, json, codecs, threading, glob, re, string, os, requests, subprocess, six, ast, pytz, urllib, urllib.parse
#==============================================================================#
botStart = time.time()
cl = LINE()
#cl = LINE("TOKEN KAMU")
#cl = LINE("Email","Password")
cl.log("Auth Token : " + str(cl.authToken))
channelToken = cl.getChannelResult()
cl.log("Channel Token : " + str(channelToken))
clMID = cl.profile.mid
clProfile = cl.getProfile()
lineSettings = cl.getSettings()
oepoll = OEPoll(cl)
#==============================================================================#
readOpen = codecs.open("read.json","r","utf-8")
settingsOpen = codecs.open("temp.json","r","utf-8")
read = json.load(readOpen)
settings = json.load(settingsOpen)
myProfile = {
"displayName": "",
"statusMessage": "",
"pictureStatus": ""
}
msg_dict = {}
bl = [""]
myProfile["displayName"] = clProfile.displayName
myProfile["statusMessage"] = clProfile.statusMessage
myProfile["pictureStatus"] = clProfile.pictureStatus
#==============================================================================#
wait = {
"share":False,
"sender" :{},
}
admin =['ud5ff1dff426cf9e3030c7ac2a61512f0','ua10c2ad470b4b6e972954e1140ad1891',clMID]
owners = ["ua10c2ad470b4b6e972954e1140ad1891","ud5ff1dff426cf9e3030c7ac2a61512f0"]
#if clMID not in owners:
# python = sys.executable
# os.execl(python, python, *sys.argv)
#==============================================================================#
#==============================================================================#
while True:
try:
ops = oepoll.singleTrace(count=50)
if ops is not None:
for op in ops:
lineBot(op)
oepoll.setRevision(op.revision)
except Exception as e:
logError(e)
| 51.742829 | 168 | 0.404267 |
b9cea3f3b51bf703897e952ed45d88260e3502a1
| 1,190 |
py
|
Python
|
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | 2 |
2018-12-17T10:10:49.000Z
|
2018-12-17T11:18:32.000Z
|
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | null | null | null |
dd_app/messaging/backend.py
|
datadealer/dd_app
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
[
"Artistic-2.0"
] | 1 |
2021-06-06T22:28:12.000Z
|
2021-06-06T22:28:12.000Z
|
def send(self, channel_id, payload):
# Fanout emitter
return self.connection.publish(channel_id, payload)
| 28.333333 | 73 | 0.608403 |
b9cf5fa54caecef97e6454178f438ce16bc99d7b
| 241 |
py
|
Python
|
fetch_data.py
|
bitfag/bt-macd-binance
|
eeffe52f8e561ff521629839078ff886e7bf700e
|
[
"MIT"
] | null | null | null |
fetch_data.py
|
bitfag/bt-macd-binance
|
eeffe52f8e561ff521629839078ff886e7bf700e
|
[
"MIT"
] | null | null | null |
fetch_data.py
|
bitfag/bt-macd-binance
|
eeffe52f8e561ff521629839078ff886e7bf700e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from btmacd.binance_fetcher import BinanceFetcher
if __name__ == "__main__":
main()
| 18.538462 | 93 | 0.705394 |
b9d0d7e9fc82e29bf1385d169d21f03d43d467e2
| 25,508 |
py
|
Python
|
tensorflow_probability/python/mcmc/diagnostic.py
|
Frightera/probability
|
deac4562cbc1056e6abebc7450218d38444fe65d
|
[
"Apache-2.0"
] | 1 |
2022-03-06T15:37:18.000Z
|
2022-03-06T15:37:18.000Z
|
tensorflow_probability/python/mcmc/diagnostic.py
|
Frightera/probability
|
deac4562cbc1056e6abebc7450218d38444fe65d
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/mcmc/diagnostic.py
|
Frightera/probability
|
deac4562cbc1056e6abebc7450218d38444fe65d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utilities for Markov Chain Monte Carlo (MCMC) sampling.
@@effective_sample_size
@@potential_scale_reduction
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import stats
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import nest_util
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow.python.util import nest # pylint: disable=g-direct-tensorflow-import
__all__ = [
'effective_sample_size',
'potential_scale_reduction',
]
def effective_sample_size(states,
filter_threshold=0.,
filter_beyond_lag=None,
filter_beyond_positive_pairs=False,
cross_chain_dims=None,
validate_args=False,
name=None):
"""Estimate a lower bound on effective sample size for each independent chain.
Roughly speaking, "effective sample size" (ESS) is the size of an iid sample
with the same variance as `state`.
More precisely, given a stationary sequence of possibly correlated random
variables `X_1, X_2, ..., X_N`, identically distributed, ESS is the
number such that
```
Variance{ N**-1 * Sum{X_i} } = ESS**-1 * Variance{ X_1 }.
```
If the sequence is uncorrelated, `ESS = N`. If the sequence is positively
auto-correlated, `ESS` will be less than `N`. If there are negative
correlations, then `ESS` can exceed `N`.
Some math shows that, with `R_k` the auto-correlation sequence,
`R_k := Covariance{X_1, X_{1+k}} / Variance{X_1}`, we have
```
ESS(N) = N / [ 1 + 2 * ( (N - 1) / N * R_1 + ... + 1 / N * R_{N-1} ) ]
```
This function estimates the above by first estimating the auto-correlation.
Since `R_k` must be estimated using only `N - k` samples, it becomes
progressively noisier for larger `k`. For this reason, the summation over
`R_k` should be truncated at some number `filter_beyond_lag < N`. This
function provides two methods to perform this truncation.
* `filter_threshold` -- since many MCMC methods generate chains where `R_k >
0`, a reasonable criterion is to truncate at the first index where the
estimated auto-correlation becomes negative. This method does not estimate
the `ESS` of super-efficient chains (where `ESS > N`) correctly.
* `filter_beyond_positive_pairs` -- reversible MCMC chains produce
an auto-correlation sequence with the property that pairwise sums of the
elements of that sequence are positive [Geyer][1], i.e.
`R_{2k} + R_{2k + 1} > 0` for `k in {0, ..., N/2}`. Deviations are only
possible due to noise. This method truncates the auto-correlation sequence
where the pairwise sums become non-positive.
The arguments `filter_beyond_lag`, `filter_threshold` and
`filter_beyond_positive_pairs` are filters intended to remove noisy tail terms
from `R_k`. You can combine `filter_beyond_lag` with `filter_threshold` or
`filter_beyond_positive_pairs. E.g., combining `filter_beyond_lag` and
`filter_beyond_positive_pairs` means that terms are removed if they were to be
filtered under the `filter_beyond_lag` OR `filter_beyond_positive_pairs`
criteria.
This function can also compute cross-chain ESS following
[Vehtari et al. (2019)][2] by specifying the `cross_chain_dims` argument.
Cross-chain ESS takes into account the cross-chain variance to reduce the ESS
in cases where the chains are not mixing well. In general, this will be a
smaller number than computing the ESS for individual chains and then summing
them. In an extreme case where the chains have fallen into K non-mixing modes,
this function will return ESS ~ K. Even when chains are mixing well it is
still preferrable to compute cross-chain ESS via this method because it will
reduce the noise in the estimate of `R_k`, reducing the need for truncation.
Args:
states: `Tensor` or Python structure of `Tensor` objects. Dimension zero
should index identically distributed states.
filter_threshold: `Tensor` or Python structure of `Tensor` objects. Must
broadcast with `state`. The sequence of auto-correlations is truncated
after the first appearance of a term less than `filter_threshold`.
Setting to `None` means we use no threshold filter. Since `|R_k| <= 1`,
setting to any number less than `-1` has the same effect. Ignored if
`filter_beyond_positive_pairs` is `True`.
filter_beyond_lag: `Tensor` or Python structure of `Tensor` objects. Must
be `int`-like and scalar valued. The sequence of auto-correlations is
truncated to this length. Setting to `None` means we do not filter based
on the size of lags.
filter_beyond_positive_pairs: Python boolean. If `True`, only consider the
initial auto-correlation sequence where the pairwise sums are positive.
cross_chain_dims: An integer `Tensor` or a structure of integer `Tensors`
corresponding to each state component. If a list of `states` is provided,
then this argument should also be a list of the same length. Which
dimensions of `states` to treat as independent chains that ESS will be
summed over. If `None`, no summation is performed. Note this requires at
least 2 chains.
validate_args: Whether to add runtime checks of argument validity. If False,
and arguments are incorrect, correct behavior is not guaranteed.
name: `String` name to prepend to created ops.
Returns:
ess: `Tensor` structure parallel to `states`. The effective sample size of
each component of `states`. If `cross_chain_dims` is None, the shape will
be `states.shape[1:]`. Otherwise, the shape is `tf.reduce_mean(states,
cross_chain_dims).shape[1:]`.
Raises:
ValueError: If `states` and `filter_threshold` or `states` and
`filter_beyond_lag` are both structures of different shapes.
ValueError: If `cross_chain_dims` is not `None` and there are less than 2
chains.
#### Examples
We use ESS to estimate standard error.
```
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
target = tfd.MultivariateNormalDiag(scale_diag=[1., 2.])
# Get 1000 states from one chain.
states = tfp.mcmc.sample_chain(
num_burnin_steps=200,
num_results=1000,
current_state=tf.constant([0., 0.]),
trace_fn=None,
kernel=tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target.log_prob,
step_size=0.05,
num_leapfrog_steps=20))
states.shape
==> (1000, 2)
ess = effective_sample_size(states, filter_beyond_positive_pairs=True)
==> Shape (2,) Tensor
mean, variance = tf.nn.moments(states, axis=0)
standard_error = tf.sqrt(variance / ess)
```
#### References
[1]: Charles J. Geyer, Practical Markov chain Monte Carlo (with discussion).
Statistical Science, 7:473-511, 1992.
[2]: Aki Vehtari, Andrew Gelman, Daniel Simpson, Bob Carpenter, Paul-Christian
Burkner. Rank-normalization, folding, and localization: An improved R-hat
for assessing convergence of MCMC, 2019. Retrieved from
http://arxiv.org/abs/1903.08008
"""
if cross_chain_dims is None:
cross_chain_dims = nest_util.broadcast_structure(states, None)
filter_beyond_lag = nest_util.broadcast_structure(states, filter_beyond_lag)
filter_threshold = nest_util.broadcast_structure(states, filter_threshold)
filter_beyond_positive_pairs = nest_util.broadcast_structure(
states, filter_beyond_positive_pairs)
# Process items, one at a time.
with tf.name_scope('effective_sample_size' if name is None else name):
return nest.map_structure_up_to(
states,
single_state,
states, filter_beyond_lag, filter_threshold,
filter_beyond_positive_pairs, cross_chain_dims)
def _effective_sample_size_single_state(states, filter_beyond_lag,
filter_threshold,
filter_beyond_positive_pairs,
cross_chain_dims,
validate_args):
"""ESS computation for one single Tensor argument."""
with tf.name_scope('effective_sample_size_single_state'):
states = tf.convert_to_tensor(states, name='states')
dt = states.dtype
# filter_beyond_lag == None ==> auto_corr is the full sequence.
auto_cov = stats.auto_correlation(
states, axis=0, max_lags=filter_beyond_lag, normalize=False)
n = _axis_size(states, axis=0)
if cross_chain_dims is not None:
num_chains = _axis_size(states, cross_chain_dims)
num_chains_ = tf.get_static_value(num_chains)
assertions = []
msg = ('When `cross_chain_dims` is not `None`, there must be > 1 chain '
'in `states`.')
if num_chains_ is not None:
if num_chains_ < 2:
raise ValueError(msg)
elif validate_args:
assertions.append(
assert_util.assert_greater(num_chains, 1., message=msg))
with tf.control_dependencies(assertions):
# We're computing the R[k] from equation 10 of Vehtari et al.
# (2019):
#
# R[k] := 1 - (W - 1/C * Sum_{c=1}^C s_c**2 R[k, c]) / (var^+),
#
# where:
# C := number of chains
# N := length of chains
# x_hat[c] := 1 / N Sum_{n=1}^N x[n, c], chain mean.
# x_hat := 1 / C Sum_{c=1}^C x_hat[c], overall mean.
# W := 1/C Sum_{c=1}^C s_c**2, within-chain variance.
# B := N / (C - 1) Sum_{c=1}^C (x_hat[c] - x_hat)**2, between chain
# variance.
# s_c**2 := 1 / (N - 1) Sum_{n=1}^N (x[n, c] - x_hat[c])**2, chain
# variance
# R[k, m] := auto_corr[k, m, ...], auto-correlation indexed by chain.
# var^+ := (N - 1) / N * W + B / N
cross_chain_dims = ps.non_negative_axis(
cross_chain_dims, ps.rank(states))
# B / N
between_chain_variance_div_n = _reduce_variance(
tf.reduce_mean(states, axis=0),
biased=False, # This makes the denominator be C - 1.
axis=cross_chain_dims - 1)
# W * (N - 1) / N
biased_within_chain_variance = tf.reduce_mean(auto_cov[0],
cross_chain_dims - 1)
# var^+
approx_variance = (
biased_within_chain_variance + between_chain_variance_div_n)
# 1/C * Sum_{c=1}^C s_c**2 R[k, c]
mean_auto_cov = tf.reduce_mean(auto_cov, cross_chain_dims)
auto_corr = 1. - (biased_within_chain_variance -
mean_auto_cov) / approx_variance
else:
auto_corr = auto_cov / auto_cov[:1]
num_chains = 1
# With R[k] := auto_corr[k, ...],
# ESS = N / {1 + 2 * Sum_{k=1}^N R[k] * (N - k) / N}
# = N / {-1 + 2 * Sum_{k=0}^N R[k] * (N - k) / N} (since R[0] = 1)
# approx N / {-1 + 2 * Sum_{k=0}^M R[k] * (N - k) / N}
# where M is the filter_beyond_lag truncation point chosen above.
# Get the factor (N - k) / N, and give it shape [M, 1,...,1], having total
# ndims the same as auto_corr
k = tf.range(0., _axis_size(auto_corr, axis=0))
nk_factor = (n - k) / n
if tensorshape_util.rank(auto_corr.shape) is not None:
new_shape = [-1] + [1] * (tensorshape_util.rank(auto_corr.shape) - 1)
else:
new_shape = tf.concat(
([-1],
tf.ones([tf.rank(auto_corr) - 1], dtype=tf.int32)),
axis=0)
nk_factor = tf.reshape(nk_factor, new_shape)
weighted_auto_corr = nk_factor * auto_corr
if filter_beyond_positive_pairs:
# Pairwise sums are all positive for auto-correlation spectra derived from
# reversible MCMC chains.
# E.g. imagine the pairwise sums are [0.2, 0.1, -0.1, -0.2]
# Step 1: mask = [False, False, True, True]
mask = _sum_pairs(auto_corr) < 0.
# Step 2: mask = [0, 0, 1, 1]
mask = tf.cast(mask, dt)
# Step 3: mask = [0, 0, 1, 2]
mask = tf.cumsum(mask, axis=0)
# Step 4: mask = [1, 1, 0, 0]
mask = tf.maximum(1. - mask, 0.)
# N.B. this reduces the length of weighted_auto_corr by a factor of 2.
# It still works fine in the formula below.
weighted_auto_corr = _sum_pairs(weighted_auto_corr) * mask
elif filter_threshold is not None:
filter_threshold = tf.convert_to_tensor(
filter_threshold, dtype=dt, name='filter_threshold')
# Get a binary mask to zero out values of auto_corr below the threshold.
# mask[i, ...] = 1 if auto_corr[j, ...] > threshold for all j <= i,
# mask[i, ...] = 0, otherwise.
# So, along dimension zero, the mask will look like [1, 1, ..., 0, 0,...]
# Building step by step,
# Assume auto_corr = [1, 0.5, 0.0, 0.3], and filter_threshold = 0.2.
# Step 1: mask = [False, False, True, False]
mask = auto_corr < filter_threshold
# Step 2: mask = [0, 0, 1, 0]
mask = tf.cast(mask, dtype=dt)
# Step 3: mask = [0, 0, 1, 1]
mask = tf.cumsum(mask, axis=0)
# Step 4: mask = [1, 1, 0, 0]
mask = tf.maximum(1. - mask, 0.)
weighted_auto_corr *= mask
return num_chains * n / (-1 + 2 * tf.reduce_sum(weighted_auto_corr, axis=0))
def potential_scale_reduction(chains_states,
independent_chain_ndims=1,
split_chains=False,
validate_args=False,
name=None):
"""Gelman and Rubin (1992)'s potential scale reduction for chain convergence.
Given `N > 1` states from each of `C > 1` independent chains, the potential
scale reduction factor, commonly referred to as R-hat, measures convergence of
the chains (to the same target) by testing for equality of means.
Specifically, R-hat measures the degree to which variance (of the means)
between chains exceeds what one would expect if the chains were identically
distributed. See [Gelman and Rubin (1992)][1]; [Brooks and Gelman (1998)][2].
Some guidelines:
* The initial state of the chains should be drawn from a distribution
overdispersed with respect to the target.
* If all chains converge to the target, then as `N --> infinity`, R-hat --> 1.
Before that, R-hat > 1 (except in pathological cases, e.g. if the chain
paths were identical).
* The above holds for any number of chains `C > 1`. Increasing `C` does
improve effectiveness of the diagnostic.
* Sometimes, R-hat < 1.2 is used to indicate approximate convergence, but of
course this is problem-dependent. See [Brooks and Gelman (1998)][2].
* R-hat only measures non-convergence of the mean. If higher moments, or
other statistics are desired, a different diagnostic should be used. See
[Brooks and Gelman (1998)][2].
Args:
chains_states: `Tensor` or Python structure of `Tensor`s representing the
states of a Markov Chain at each result step. The `ith` state is
assumed to have shape `[Ni, Ci1, Ci2,...,CiD] + A`.
Dimension `0` indexes the `Ni > 1` result steps of the Markov Chain.
Dimensions `1` through `D` index the `Ci1 x ... x CiD` independent
chains to be tested for convergence to the same target.
The remaining dimensions, `A`, can have any shape (even empty).
independent_chain_ndims: Integer type `Tensor` with value `>= 1` giving the
number of dimensions, from `dim = 1` to `dim = D`, holding independent
chain results to be tested for convergence.
split_chains: Python `bool`. If `True`, divide samples from each chain into
first and second halves, treating these as separate chains. This makes
R-hat more robust to non-stationary chains, and is recommended in [3].
validate_args: Whether to add runtime checks of argument validity. If False,
and arguments are incorrect, correct behavior is not guaranteed.
name: `String` name to prepend to created tf. Default:
`potential_scale_reduction`.
Returns:
`Tensor` structure parallel to `chains_states` representing the
R-hat statistic for the state(s). Same `dtype` as `state`, and
shape equal to `state.shape[1 + independent_chain_ndims:]`.
Raises:
ValueError: If `independent_chain_ndims < 1`.
#### Examples
Diagnosing convergence by monitoring 10 chains that each attempt to
sample from a 2-variate normal.
```python
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
target = tfd.MultivariateNormalDiag(scale_diag=[1., 2.])
# Get 10 (2x) overdispersed initial states.
initial_state = target.sample(10) * 2.
==> (10, 2)
# Get 1000 samples from the 10 independent chains.
chains_states = tfp.mcmc.sample_chain(
num_burnin_steps=200,
num_results=1000,
current_state=initial_state,
trace_fn=None,
kernel=tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target.log_prob,
step_size=0.05,
num_leapfrog_steps=20))
chains_states.shape
==> (1000, 10, 2)
rhat = tfp.mcmc.diagnostic.potential_scale_reduction(
chains_states, independent_chain_ndims=1)
# The second dimension needed a longer burn-in.
rhat.eval()
==> [1.05, 1.3]
```
To see why R-hat is reasonable, let `X` be a random variable drawn uniformly
from the combined states (combined over all chains). Then, in the limit
`N, C --> infinity`, with `E`, `Var` denoting expectation and variance,
```R-hat = ( E[Var[X | chain]] + Var[E[X | chain]] ) / E[Var[X | chain]].```
Using the law of total variance, the numerator is the variance of the combined
states, and the denominator is the total variance minus the variance of the
the individual chain means. If the chains are all drawing from the same
distribution, they will have the same mean, and thus the ratio should be one.
#### References
[1]: Stephen P. Brooks and Andrew Gelman. General Methods for Monitoring
Convergence of Iterative Simulations. _Journal of Computational and
Graphical Statistics_, 7(4), 1998.
[2]: Andrew Gelman and Donald B. Rubin. Inference from Iterative Simulation
Using Multiple Sequences. _Statistical Science_, 7(4):457-472, 1992.
[3]: Aki Vehtari, Andrew Gelman, Daniel Simpson, Bob Carpenter, Paul-Christian
Burkner. Rank-normalization, folding, and localization: An improved R-hat
for assessing convergence of MCMC, 2019. Retrieved from
http://arxiv.org/abs/1903.08008
"""
# tf.get_static_value returns None iff a constant value (as a numpy
# array) is not efficiently computable. Therefore, we try constant_value then
# check for None.
icn_const_ = tf.get_static_value(
ps.convert_to_shape_tensor(independent_chain_ndims))
if icn_const_ is not None:
independent_chain_ndims = icn_const_
if icn_const_ < 1:
raise ValueError(
'Argument `independent_chain_ndims` must be `>= 1`, found: {}'.format(
independent_chain_ndims))
with tf.name_scope('potential_scale_reduction' if name is None else name):
return tf.nest.map_structure(single_state, chains_states)
def _potential_scale_reduction_single_state(state, independent_chain_ndims,
split_chains, validate_args):
"""potential_scale_reduction for one single state `Tensor`."""
# casting integers to floats for floating-point division
# check to see if the `state` is a numpy object for the numpy test suite
if dtype_util.as_numpy_dtype(state.dtype) is np.int64:
state = tf.cast(state, tf.float64)
elif dtype_util.is_integer(state.dtype):
state = tf.cast(state, tf.float32)
with tf.name_scope('potential_scale_reduction_single_state'):
# We assume exactly one leading dimension indexes e.g. correlated samples
# from each Markov chain.
state = tf.convert_to_tensor(state, name='state')
n_samples_ = tf.compat.dimension_value(state.shape[0])
if n_samples_ is not None: # If available statically.
if split_chains and n_samples_ < 4:
raise ValueError(
'Must provide at least 4 samples when splitting chains. '
'Found {}'.format(n_samples_))
if not split_chains and n_samples_ < 2:
raise ValueError(
'Must provide at least 2 samples. Found {}'.format(n_samples_))
elif validate_args:
if split_chains:
assertions = [assert_util.assert_greater(
ps.shape(state)[0], 4,
message='Must provide at least 4 samples when splitting chains.')]
with tf.control_dependencies(assertions):
state = tf.identity(state)
else:
assertions = [assert_util.assert_greater(
ps.shape(state)[0], 2,
message='Must provide at least 2 samples.')]
with tf.control_dependencies(assertions):
state = tf.identity(state)
# Define so it's not a magic number.
# Warning! `if split_chains` logic assumes this is 1!
sample_ndims = 1
if split_chains:
# Split the sample dimension in half, doubling the number of
# independent chains.
# For odd number of samples, keep all but the last sample.
state_shape = ps.shape(state)
n_samples = state_shape[0]
state = state[:n_samples - n_samples % 2]
# Suppose state = [0, 1, 2, 3, 4, 5]
# Step 1: reshape into [[0, 1, 2], [3, 4, 5]]
# E.g. reshape states of shape [a, b] into [2, a//2, b].
state = tf.reshape(
state,
ps.concat([[2, n_samples // 2], state_shape[1:]], axis=0)
)
# Step 2: Put the size `2` dimension in the right place to be treated as a
# chain, changing [[0, 1, 2], [3, 4, 5]] into [[0, 3], [1, 4], [2, 5]],
# reshaping [2, a//2, b] into [a//2, 2, b].
state = tf.transpose(
a=state,
perm=ps.concat(
[[1, 0], ps.range(2, ps.rank(state))], axis=0))
# We're treating the new dim as indexing 2 chains, so increment.
independent_chain_ndims += 1
sample_axis = ps.range(0, sample_ndims)
chain_axis = ps.range(sample_ndims,
sample_ndims + independent_chain_ndims)
sample_and_chain_axis = ps.range(
0, sample_ndims + independent_chain_ndims)
n = _axis_size(state, sample_axis)
m = _axis_size(state, chain_axis)
# In the language of Brooks and Gelman (1998),
# B / n is the between chain variance, the variance of the chain means.
# W is the within sequence variance, the mean of the chain variances.
b_div_n = _reduce_variance(
tf.reduce_mean(state, axis=sample_axis, keepdims=True),
sample_and_chain_axis,
biased=False)
w = tf.reduce_mean(
_reduce_variance(state, sample_axis, keepdims=True, biased=False),
axis=sample_and_chain_axis)
# sigma^2_+ is an estimate of the true variance, which would be unbiased if
# each chain was drawn from the target. c.f. "law of total variance."
sigma_2_plus = ((n - 1) / n) * w + b_div_n
return ((m + 1.) / m) * sigma_2_plus / w - (n - 1.) / (m * n)
# TODO(b/72873233) Move some variant of this to tfd.sample_stats.
def _axis_size(x, axis=None):
"""Get number of elements of `x` in `axis`, as type `x.dtype`."""
if axis is None:
return ps.cast(ps.size(x), x.dtype)
return ps.cast(
ps.reduce_prod(
ps.gather(ps.shape(x), axis)), x.dtype)
| 43.015177 | 85 | 0.663361 |
b9d22fbf764d6a06a81fe68e7bedb0cb2069ff17
| 2,360 |
py
|
Python
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 13 |
2020-02-24T16:57:37.000Z
|
2021-12-14T16:47:41.000Z
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 3 |
2021-01-08T14:06:33.000Z
|
2021-09-07T13:39:46.000Z
|
mpl/models/leaf.py
|
jiangyuang/ModelPruningLibrary
|
9c8ba5a3c5d118f37768d5d42254711f48d88745
|
[
"MIT"
] | 3 |
2020-05-30T17:59:43.000Z
|
2021-04-13T04:55:33.000Z
|
from torch import nn as nn
from .base_model import BaseModel
from ..nn.conv2d import DenseConv2d
from ..nn.linear import DenseLinear
__all__ = ["Conv2", "conv2", "Conv4", "conv4"]
def conv2() -> Conv2:
return Conv2()
def conv4() -> Conv4:
return Conv4()
# TODO: define pretrain etc.
| 36.307692 | 96 | 0.469068 |
b9d2bd5114a0540a0095f6c31a8ad07b71899f53
| 29,424 |
py
|
Python
|
scripts/generate_network_interactomix.py
|
quimaguirre/NetworkAnalysis
|
c7a4da3ba5696800738b4767065ce29fa0020d79
|
[
"MIT"
] | 1 |
2017-07-10T17:33:31.000Z
|
2017-07-10T17:33:31.000Z
|
scripts/generate_network_interactomix.py
|
quimaguirre/NetworkAnalysis
|
c7a4da3ba5696800738b4767065ce29fa0020d79
|
[
"MIT"
] | null | null | null |
scripts/generate_network_interactomix.py
|
quimaguirre/NetworkAnalysis
|
c7a4da3ba5696800738b4767065ce29fa0020d79
|
[
"MIT"
] | null | null | null |
import argparse
import ConfigParser
import sys, os, re
import biana
try: from biana import *
except: sys.exit(10)
import methods_dictionaries as methods_dicts
def generate_network(options):
"""
Generates a protein-protein interaction network extracting information from BIANA.
"""
#----------------------#
# FIXED PARAMETERS #
#----------------------#
# Parameters that I have decided to fix
restricted_to_seeds = False
minimum_number_of_methods = 1
minimum_number_of_db = 1
seed_score = 0.1
#--------------------------------------#
# GET INFORMATION FROM CONFIG FILE #
#--------------------------------------#
# Get the program path
main_path = os.path.abspath(os.path.dirname(__file__))
# Read the config file
config_file = os.path.join(main_path, 'config.ini')
config = ConfigParser.ConfigParser()
config.read(config_file)
#--------------------------------------#
# LOAD THE DICTIONARIES OF METHODS #
#--------------------------------------#
# Get the affinity dictionary
affinity_dict = methods_dicts.affinity_dict
affinity=set(affinity_dict.keys())
# Get the complementation dictionary
complementation_dict = methods_dicts.complementation_dict
complementation=set(complementation_dict.keys())
#---------------------------------------#
# GET METHODS THAT WILL BE FILTERED #
#---------------------------------------#
# Check if the user has introduced a file with methods that must be included
if not fileExist(options.restricted_to_user):
print "No restriction on methods selected by the user"
user_selection=False
else:
use_methods=[]
with open(options.restricted_to_user) as input_method_fd:
for line in input_method_fd:
fields = line.strip().split("\t")
use_methods.append(fields[0])
user_selection=True
print "Input to use only Methods:",repr(use_methods)
# Check if the user has introduced a file with methods that have to be excluded
if not fileExist(options.except_user):
print "No rejection of methods selected by the user"
user_rejection=False
else:
no_methods=[]
with open(options.except_user) as input_method_fd:
for line in input_method_fd:
fields = line.strip().split("\t")
no_methods.append(fields[0])
user_rejection=True
print "Input of rejected Methods:",repr(no_methods)
#---------------------------#
# START A BIANA SESSION #
#---------------------------#
print "Open session"
session = create_new_session( sessionID="biana_session",
dbname=config.get('BIANA', 'database'),
dbhost=config.get('BIANA', 'host'),
dbuser=config.get('BIANA', 'user'),
dbpassword=config.get('BIANA', 'password'),
unification_protocol=config.get('BIANA', 'unification_protocol') )
print "Continue"
#------------------------------#
# DEFINE A USER ENTITY SET #
#------------------------------#
# Create network network of expansion if the radius is larger than 0
if restricted_to_seeds or options.radius>0:
# Check if the seeds file exists
if not fileExist(options.seed):
print "File with seeds is missing or not found"
sys.exit(10)
else:
level=options.radius
seed_list = get_seeds_from_file(options.seed)
# If we have Taxonomy restriction, we add it
if options.taxid != "0":
print("Check Proteome %s"%(repr(options.taxid)))
proteome = session.create_new_user_entity_set( identifier_description_list =seed_list,
attribute_restriction_list=[("taxid",options.taxid)],
id_type=options.stype,new_user_entity_set_id="proteome",
negative_attribute_restriction_list=[] )
else:
print('Proteome without Taxonomy restriction')
proteome = session.create_new_user_entity_set( identifier_description_list =seed_list,
id_type=options.stype,new_user_entity_set_id="proteome",
negative_attribute_restriction_list=[] )
else:
level=0
proteome = session.create_new_user_entity_set( identifier_description_list = [("taxid",options.taxid)],
attribute_restriction_list=[], id_type="embedded",
new_user_entity_set_id="proteome",
negative_attribute_restriction_list=[] )
#----------------------------------------------------#
# SELECT THE INTERACTIONS OF THE USER ENTITY SET #
#----------------------------------------------------#
print ("Selecting interactions")
# Select interactions that have been detected at least by affinity technology
if options.restricted_to_TAP:
print ('Using interactions at least described by affinity methods (i.e. Tandem Affinity Purification)')
session.create_network( user_entity_set_id = "proteome" , level = level, relation_type_list=["interaction"] ,
relation_attribute_restriction_list = [("Method_id",400)],
#relation_attribute_restriction_list = [("psimi_name","affinity technology")],
include_relations_last_level = (not restricted_to_seeds) , use_self_relations = False)
# Select interactions that have been detected at least by yeast two hybrid
elif options.restricted_to_Y2H:
print ('Using interactions at least described by yeast-two-hybrid methods (Y2H)')
session.create_network( user_entity_set_id = "proteome" , level = level, relation_type_list=["interaction"] ,
relation_attribute_restriction_list = [("Method_id",18)],
#relation_attribute_restriction_list = [("psimi_name","y2h2")],
include_relations_last_level = (not restricted_to_seeds) , use_self_relations = False)
# Select all interactions
else:
session.create_network( user_entity_set_id = "proteome" , level = level, relation_type_list=["interaction"] ,
include_relations_last_level = (not restricted_to_seeds) , use_self_relations = False)
# Summary of interactions
out_network = open(options.edge,'w')
all_interactions = proteome.getRelations()
print "Num interactions:", len(all_interactions)
#--------------------------------------#
# FILTER THE SELECTED INTERACTIONS #
#--------------------------------------#
nodes=set()
# Get all the user entity ids from the user entity set 'proteome'
all_uEs = proteome.get_user_entity_ids()
# Obtain a dictionary user entity ID => type
uEId_to_type = session.dbAccess.get_user_entity_type(config.get('BIANA', 'unification_protocol'), all_uEs)
skip_interactions=0
for (uE_id1, uE_id2) in all_interactions:
#self.dbAccess.get_external_entities_dict( externalEntityIdsList = [external_entity_relation_id] )
# Get TYPE of user entity
uE1_type = uEId_to_type[uE_id1]
uE2_type = uEId_to_type[uE_id2]
# If type is not protein, we skip the interaction
if uE1_type != 'protein' or uE2_type != 'protein':
if options.verbose:
print('Skipping interaction because the type of one of the user entities is not protein!')
print('Node 1: {}\tType: {}'.format(uE_id1, uE1_type))
print('Node 2: {}\tType: {}'.format(uE_id2, uE2_type))
skip_interactions=skip_interactions+1
continue
eErIDs_list = proteome.get_external_entity_relation_ids(uE_id1, uE_id2)
method_names = set()
method_ids = set()
source_databases = set()
use_method_ids=set()
pubmed_ids = set()
unused_method_names = set()
relationObj_dict = session.dbAccess.get_external_entities_dict(
externalEntityIdsList = eErIDs_list, attribute_list = [],
relation_attribute_list = ["method_id","psimi_name","pubmed"], participant_attribute_list = [] )
num_methods=0
for current_eErID in eErIDs_list:
relationObj = relationObj_dict[current_eErID]
if options.verbose:
print "Interaction: (",uE_id1,",",uE_id2,")"
print relationObj
#if relationObj.get_attribute(attribute_identifier="psimi_name") is not None:
# print "\t".join([ x.value for x in relationObj.get_attribute(attribute_identifier="psimi_name") ])
#if relationObj.get_attribute(attribute_identifier="method_id") is not None:
#print "\t".join([ x.value for x in relationObj.get_attribute(attribute_identifier="method_id") ])
#print relationObj.get_attributes_dict()
#print [ x.value for x in relationObj.get_attributes_dict()["psimi_name"] ]
#print [ x.value for x in relationObj.get_attributes_dict()["method_id"] ]
if "psimi_name" in relationObj.get_attributes_dict():
method_names.update([ str(x.value) for x in relationObj.get_attributes_dict()["psimi_name"] ])
if "method_id" in relationObj.get_attributes_dict():
method_ids.update([ x.value for x in relationObj.get_attributes_dict()["method_id"]])
if "pubmed" in relationObj.get_attributes_dict():
pubmed_ids.update([ x.value for x in relationObj.get_attributes_dict()["pubmed"]])
source_databases.add(str(session.dbAccess.get_external_database(
database_id = relationObj.get_source_database()) ))
if options.except_TAP:
for m in method_ids:
if m not in affinity:
use_method_ids.add(m)
#print "Add", m
else:
unused_method_names.add(affinity_dict[m])
elif options.except_Y2H:
#print "check Y2H"
for m in method_ids:
if m not in complementation:
use_method_ids.add(m)
#print "Add", m
else:
unused_method_names.add(complementation_dict[m])
elif user_rejection:
for m in method_ids:
if m not in no_methods:
use_method_ids.add(m)
elif user_selection:
for m in method_ids:
#print "Check",repr(use_methods)
if m in set(use_methods):
use_method_ids.add(m)
if options.verbose:
print "Not among selected methods ",m
else:
use_method_ids.update(method_ids)
if len(source_databases) > 0:
info_sources=";".join([str(x) for x in source_databases])
else:
if options.verbose:
print('Skipping interaction it has no source database!')
print('Node 1: {}\tNode 2: {}'.format(uE_id1, uE_id2))
skip_interactions=skip_interactions+1
continue
if len(method_names) > 0:
method_names = [x for x in method_names if x not in unused_method_names] # Remove method names that were excluded
info_methods=";".join([str(x) for x in method_names])
else:
info_methods='-'
if len(use_method_ids) > 0:
info_methods_ids=";".join([str(x) for x in use_method_ids])
else:
if options.verbose:
print('Skipping interaction it has no method!')
print('Node 1: {}\tNode 2: {}'.format(uE_id1, uE_id2))
skip_interactions=skip_interactions+1
continue
if len(pubmed_ids) > 0:
info_pubmed_ids=";".join([str(x) for x in pubmed_ids])
else:
info_pubmed_ids='-'
num_databases=len(source_databases)
num_methods=len(use_method_ids)
num_pubmeds = len(pubmed_ids)
if options.verbose:
print "Methods",num_methods,info_methods,"\tSelected:",info_methods_ids
print "Databases",num_databases,info_sources
print "Pubmeds",num_pubmeds,info_pubmed_ids
# Check if the number of methods is higher than the minimum established
if num_methods >= minimum_number_of_methods:
use=True
else:
use=False
# Check if the number of database is higher than the minimum established
if use and num_databases >= minimum_number_of_db:
use=True
else:
use=False
if not use:
skip_interactions=skip_interactions+1
#print method_names, method_ids, source_databases
#----------------------#
# OUTPUT EDGE FILE #
#----------------------#
if use:
#print uE_id1, uE_id/2
nodes.add(uE_id1)
nodes.add(uE_id2)
#print "Attribute ",(uE_id1,uE_id2).get_attribute(
if options.format == 'multi-fields' :
out_network.write("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\n".
format(uE_id1,uE_id2,info_sources,info_methods_ids,info_methods,info_pubmed_ids))
elif options.format == 'netscore':
out_network.write('\t{}\t{}\t{:.2f}\n'.format(uE_id1,uE_id2,1.))
elif options.format == 'raw':
out_network.write("{}\t{}\n".format(uE_id1,uE_id2))
else:
# If the format is not multi-fields, netscore or raw, the output format is sif
out_network.write("{}\t{:.2f}\t{}\n".format(uE_id1,1.,uE_id2))
print "Num neglected interactions:", skip_interactions
out_network.close()
#---------------------------------------#
# OUTPUT NODE AND TRANSLATION FILES #
#---------------------------------------#
# If we wanted the complete interactome, the translation will be done differently
if options.radius <= 0:
# Output node file
out_proteins = open(options.node,'w')
for protein in nodes:
if options.format == 'multi-fields':
out_proteins.write("{0}\t{1:.2f}\t{2:.2f}\t{3:.2f}\n".format(protein,1.,1.,0.1))
elif options.format == 'netscore':
out_proteins.write("{0}\t{1:.2f}\t{2:.2f}\t{3:.2f}\n".format(protein,1.,1.,0.1))
else:
out_proteins.write("{0}\t{1:.2f}\n".format(protein,0.1))
out_proteins.close()
################################# TRANSLATION ####################################
out_translation = open(options.translation_file,'w')
# TRANSLATION TO 'stype'
trans_stype=False
if options.stype != 'proteinsequence' and options.stype != options.ttype:
trans_stype = True
out_trans_stype = open(options.translation_file+'.'+options.stype+'.trans','w')
for protein in nodes:
uE = session.get_user_entity(protein)
translate=set()
translate_stype=set()
if options.ttype == "proteinsequence":
maxlen=0;
for current_id in uE.get_attribute(attribute_identifier=options.ttype):
if maxlen < len(current_id.value.get_sequence().upper()):
maxlen=len(current_id.value.get_sequence().upper())
translation=",".join([str(current_id.value.get_sequence().upper()) for current_id in uE.get_attribute(attribute_identifier=options.ttype) if len(str(current_id.value.get_sequence().upper())) == maxlen ] )
#print "Translation",protein,translation
#print("{0}\t'{1}'\n".format(protein,translation))
else:
##### TRANSLATION TO 'ttype'
for current_id in uE.get_attribute(attribute_identifier=options.ttype):
translate.add(current_id.value.upper())
translation="','".join(["{0}".format(x) for x in translate])
out_translation.write("{0}\t'{1}'\n".format(protein,translation))
##### TRANSLATION TO STYPE
if trans_stype:
for current_id in uE.get_attribute(attribute_identifier=options.stype):
translate_stype.add(current_id.value.upper())
translation_stype="','".join(["{0}".format(x) for x in translate_stype])
out_trans_stype.write("{0}\t'{1}'\n".format(protein,translation_stype))
out_translation.close()
if trans_stype:
out_trans_stype.close()
####################################################################################
# If we wanted a network of expansion, the translation will be done differently
elif options.radius > 0:
# Read the seeds
seeds=set()
input_seed = open(options.seed,'r')
for line in input_seed:
fields = line.strip().split("\t")
seeds.add(fields[0].lower())
input_seed.close()
# Output node file
out_proteins = open(options.node,'w')
translate={}
for protein in nodes:
score=seed_score
uE = session.get_user_entity(protein)
for current_id in uE.get_attribute(attribute_identifier=options.stype):
if current_id.value.lower() in seeds:
translate.setdefault(current_id.value.lower(),[])
translate[current_id.value.lower()].append(protein)
score=1.0
if options.format == 'multi-fields':
out_proteins.write("{0}\t{1:.2f}\t{2:.2f}\t{3:.2f}\n".format(protein,1.,1.,score))
elif options.format == 'netscore':
out_proteins.write("{0}\t{1:.2f}\t{2:.2f}\t{3:.2f}\n".format(protein,1.,1.,score))
else:
out_proteins.write("{0}\t{1:.2f}\n".format(protein,score))
out_proteins.close()
# Get the IDS of single nodes that were not previously found in the network
single=set()
for uE_id in proteome.get_unconnected_nodes():
single.add(uE_id)
for protein in single:
uE = session.get_user_entity(protein)
for current_id in uE.get_attribute(attribute_identifier=options.stype):
if current_id.value.lower() in seeds:
translate.setdefault(current_id.value.lower(),[])
translate[current_id.value.lower()].append(protein)
# Get all IDS of SEEDS, defined as "proteome", and check missing codes to be
# added for translation
allseed=set()
for uE_id in proteome.get_user_entity_ids():
allseed.add(uE_id)
for protein in allseed:
if protein not in single and protein not in nodes:
uE = session.get_user_entity(protein)
for current_id in uE.get_attribute(attribute_identifier=options.stype):
if current_id.value.lower() in seeds:
translate.setdefault(current_id.value.lower(),[])
translate[current_id.value.lower()].append(protein)
################################# TRANSLATION ####################################
out_translation = open(options.translation_seeds_file,'w')
for s in seeds:
if s == '': continue
if s in translate:
codes=set(translate[s])
translation="','".join([str(x) for x in codes])
#out_translation.write("%s\t'%s'\n" % (s.upper(),translation))
out_translation.write("{0}\t'{1}'\n".format(s.upper(),translation))
else:
out_translation.write("{0}\t'Unknown'\n".format(s.upper()))
out_translation.close()
# Output translation file
# TRANSLATION TO 'ttype'
out_translation = open(options.translation_file,'w')
# TRANSLATION TO 'stype'
trans_stype=False
if options.stype != 'proteinsequence' and options.stype != options.ttype:
trans_stype = True
out_trans_stype = open(options.translation_file+'.'+options.stype+'.trans','w')
for protein in nodes:
uE = session.get_user_entity(protein)
translate=set()
translate_stype=set()
if options.ttype == "proteinsequence":
maxlen=0;
for current_id in uE.get_attribute(attribute_identifier=options.ttype):
if maxlen < len(current_id.value.get_sequence().upper()):
maxlen=len(current_id.value.get_sequence().upper())
translation=",".join([str(current_id.value.get_sequence().upper()) for current_id in uE.get_attribute(attribute_identifier=options.ttype) if len(str(current_id.value.get_sequence().upper())) == maxlen ] )
#print "Translation",protein,translation
#print("{0}\t'{1}'\n".format(protein,translation))
else:
for current_id in uE.get_attribute(attribute_identifier=options.ttype):
translate.add(current_id.value.upper())
translation="','".join(["{0}".format(x) for x in translate])
out_translation.write("{0}\t'{1}'\n".format(protein,translation))
##### TRANSLATION TO STYPE
if trans_stype:
for current_id in uE.get_attribute(attribute_identifier=options.stype):
translate_stype.add(current_id.value.upper())
translation_stype="','".join(["{0}".format(x) for x in translate_stype])
out_trans_stype.write("{0}\t'{1}'\n".format(protein,translation_stype))
out_translation.close()
if trans_stype:
out_trans_stype.close()
####################################################################################
print('Generation of the network done!')
return
def fileExist(file):
"""
Checks if a file exists AND is a file
"""
return os.path.exists(file) and os.path.isfile(file)
def get_seeds_from_file(seed_file):
"""
Obtain the seeds from a file and introduce them to a Python list.
The seeds must be separated by new lines!
"""
seed_set = set()
with open(seed_file, 'r') as seed_file_fd:
for line in seed_file_fd:
fields = line.strip().split('\t')
seed_set.add(fields[0])
return list(seed_set)
if __name__ == "__main__":
main()
| 48.157119 | 591 | 0.569195 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.