hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d084cbae54b480c7285413e8320b17434455ebf4 | 1,666 | py | Python | python_exercises/main.py | nchristie/slide-python-intro | dd52781b5d25435f97aa83cfff58c175fa7fdd1c | [
"MIT"
]
| 1 | 2018-06-07T12:40:37.000Z | 2018-06-07T12:40:37.000Z | python_exercises/main.py | nchristie/slide-python-intro | dd52781b5d25435f97aa83cfff58c175fa7fdd1c | [
"MIT"
]
| 3 | 2018-06-07T14:39:19.000Z | 2019-01-15T16:35:23.000Z | python_exercises/main.py | nchristie/slide-python-intro | dd52781b5d25435f97aa83cfff58c175fa7fdd1c | [
"MIT"
]
| 9 | 2018-05-30T17:12:27.000Z | 2021-07-01T03:22:48.000Z | """
Press run above to start
"""
from exercises.question_runner import run
from question_directory import (
boolean_operators,
boolean_review,
changing_lists,
dictionaries,
equality_and_booleans,
for_loops,
functions,
functions_quick_review,
greater_than_less_than_and_booleans,
inbuilt_functions_and_operators,
indexing_lists,
variables_equality_and_booleans,
while_loops,
)
from unit_tests.test_instructor_code import * # noqa
if input("\n\nPress enter to start\n") != "test":
# LESSON ONE
# https://kathrinschuler.github.io/slide-python-intro/#/10/3
run(equality_and_booleans.TASKS, equality_and_booleans.BLURB)
run(greater_than_less_than_and_booleans.TASKS, greater_than_less_than_and_booleans.BLURB)
# https://kathrinschuler.github.io/slide-python-intro/#/11/4
run(variables_equality_and_booleans.TASKS, variables_equality_and_booleans.BLURB)
run(boolean_operators.TASKS, boolean_operators.BLURB)
# LESSON TWO
run(inbuilt_functions_and_operators.TASKS, inbuilt_functions_and_operators.BLURB)
# LESSON THREE
# https://kathrinschuler.github.io/slide-python-intro/#/25/4
run(boolean_review.TASKS, boolean_review.BLURB)
run(while_loops.TASKS, while_loops.BLURB)
run(for_loops.TASKS, for_loops.BLURB)
run(functions.TASKS, functions.BLURB)
# LESSON FOUR
run(indexing_lists.TASKS, indexing_lists.BLURB)
run(functions_quick_review.TASKS, functions_quick_review.BLURB)
run(changing_lists.TASKS, changing_lists.BLURB)
run(dictionaries.TASKS, dictionaries.BLURB)
else:
if __name__ == "__main__":
unittest.main() # noqa
| 32.038462 | 93 | 0.758703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 319 | 0.191477 |
d085434851368c058d877fff6243d0f3f90e2c23 | 462 | py | Python | config.py | LandRegistry/historian-alpha | 3f2c2060d0f747772e8362dbe4867aea24731180 | [
"MIT"
]
| null | null | null | config.py | LandRegistry/historian-alpha | 3f2c2060d0f747772e8362dbe4867aea24731180 | [
"MIT"
]
| null | null | null | config.py | LandRegistry/historian-alpha | 3f2c2060d0f747772e8362dbe4867aea24731180 | [
"MIT"
]
| 1 | 2021-04-11T06:07:16.000Z | 2021-04-11T06:07:16.000Z | import os
class Config(object):
DEBUG = False
STORAGE = os.environ['STORAGE']
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
# these are on heroku only so get safely
BASIC_AUTH_USERNAME = os.environ.get('BASIC_AUTH_USERNAME')
BASIC_AUTH_PASSWORD = os.environ.get('BASIC_AUTH_PASSWORD')
class DevelopmentConfig(Config):
DEBUG = True
class TestConfig(DevelopmentConfig):
TESTING = True
STORAGE = os.environ['STORAGE']
| 25.666667 | 63 | 0.727273 | 446 | 0.965368 | 0 | 0 | 0 | 0 | 0 | 0 | 114 | 0.246753 |
d085e4d4c18167f75fdb378a2d6a53bb684ea18f | 1,124 | py | Python | scripts/removeComments.py | doggy8088/azure-devops-cli-extension | 2f6b1a6ffbc49ae454df640a8bb00dac991d6514 | [
"MIT"
]
| 326 | 2019-04-10T12:38:23.000Z | 2022-03-31T23:07:49.000Z | scripts/removeComments.py | doggy8088/azure-devops-cli-extension | 2f6b1a6ffbc49ae454df640a8bb00dac991d6514 | [
"MIT"
]
| 562 | 2019-04-10T07:36:12.000Z | 2022-03-28T07:37:54.000Z | scripts/removeComments.py | doggy8088/azure-devops-cli-extension | 2f6b1a6ffbc49ae454df640a8bb00dac991d6514 | [
"MIT"
]
| 166 | 2019-04-10T07:59:40.000Z | 2022-03-16T14:17:13.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
def remove_comment_from_file(filePath):
temp_file = filePath + ".tmp"
file = open(filePath, "r")
file_new = open(temp_file, "w")
comment_in_progress = False
for line in file:
if "\"\"\"" in line:
comment_in_progress = not comment_in_progress
if not comment_in_progress and "\"\"\"" not in line and not line.startswith("#"):
file_new.write(line)
file.close()
file_new.close()
os.replace(temp_file, filePath)
for path, subdirs, files in os.walk('.'):
for name in files:
file_path = os.path.join(path, name)
if file_path.endswith(".py") and "devops_sdk" in file_path:
print('removing comments from ' + file_path)
remove_comment_from_file(file_path)
| 36.258065 | 94 | 0.544484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 417 | 0.370996 |
d086ce5911f7ac6a2a4bd8994adb6dc6191adc49 | 7,407 | py | Python | scripts/update_tables.py | EnsemblGSOC/tony-gsoc-2018 | 7b727e3a82654a4f102d735fb0b2c4ab12470ff6 | [
"Apache-2.0"
]
| 1 | 2018-08-12T08:34:51.000Z | 2018-08-12T08:34:51.000Z | scripts/update_tables.py | EnsemblGSOC/tony-gsoc-2018 | 7b727e3a82654a4f102d735fb0b2c4ab12470ff6 | [
"Apache-2.0"
]
| 19 | 2018-05-11T12:46:28.000Z | 2018-08-13T11:28:44.000Z | scripts/update_tables.py | EnsemblGSOC/tony-gsoc-2018 | 7b727e3a82654a4f102d735fb0b2c4ab12470ff6 | [
"Apache-2.0"
]
| 1 | 2018-09-22T04:58:55.000Z | 2018-09-22T04:58:55.000Z | """
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import ast
import sys
import xml.etree.ElementTree as ET
from datetime import datetime
import requests
from sqlalchemy import create_engine, Table, MetaData, func, or_
from sqlalchemy.orm import sessionmaker
from base import *
# setup config
config_path = sys.argv[1]
with open(config_path) as configfile:
config = ast.literal_eval(configfile.read())
tony_assembly = config["tony_assembly"]
results_dir = config ["results_dir"]
udocker_root = config["udocker_root"]
toil_dir = config["toil_dir"]
workflow_dir = config["workflow_dir"]
log_dir = config["log_dir"]
registry = config["registry"]
def xml_download(ena_accession):
"""
pulling xml record from ENA
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=60).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def xml_download_retry(ena_accession):
"""
pulling xml record from ENA, some of the records take a longer time to connect, this retry set timeout to be 5 mins
:param ena_accession:
:return:
"""
try:
xml = ET.fromstring(requests.get("https://www.ebi.ac.uk/ena/data/view/{}&display=xml".format(ena_accession),
stream=True, timeout=300).content)
return xml
except requests.exceptions.ReadTimeout:
stderr.write("Could not download XML file with accession {}\n".format(ena_accession))
return None
def chromosome_number(xml):
"""
find the number of chromosomes within the assembly. If the assembly is assembled to scaffold level, returns 0
:param xml:
:return:
"""
try:
chroms_number = len(xml.find("ASSEMBLY").find("CHROMOSOMES").findall("CHROMOSOME"))
return chroms_number
except AttributeError:
return 0
def get_chromosomes(xml):
for record in xml.find("ASSEMBLY").find("CHROMOSOMES").findall("CHROMOSOME"):
yield record
def chromosome_data(xml):
"""
extract md5 and length of the chromosome from the chromosome's xml record
:param xml:
:return:
"""
for xref in xml.find("entry").findall("xref"):
if xref.attrib["db"] == "MD5":
md5 = xref.attrib["id"]
break
length = xml.find("entry").attrib["sequenceLength"]
return md5, int(length)
def get_scaffold_number(xml):
for attribute in xml.find("ASSEMBLY").find("ASSEMBLY_ATTRIBUTES").findall("ASSEMBLY_ATTRIBUTE"):
if attribute.find("TAG").text == "scaffold-count":
return int(attribute.find("VALUE").text)
stderr = open("{log_dir}/log_update_tables.txt".format(log_dir=log_dir), "a")
stderr.write(str(datetime.now()) + "\n")
stderr.write("====\n")
registry_engine = create_engine(registry)
assembly = Table("assembly", MetaData(), autoload=True, autoload_with=registry_engine)
engine = create_engine(tony_assembly)
session = sessionmaker(bind=engine)
s = session()
old_accessions = s.query(GCA.accession).all()
r_session = sessionmaker(bind=registry_engine)
rs = r_session()
sub_concat = func.concat(assembly.c.chain, ".", assembly.c.version)
new_accessions = rs.query(sub_concat).filter(sub_concat.notin_(old_accessions)).all()
rs.close()
s = session()
for entry in new_accessions:
gca = GCA()
gca.accession = entry[0]
# print(gca.accession)
gca_xml = xml_download(gca.accession)
if gca_xml is not None: # only add to GCA table if the xml record of the assembly exists
try:
gca.assembly_level = gca_xml.find("ASSEMBLY").find("ASSEMBLY_LEVEL").text
except AttributeError:
gca.assembly_level = "No Level"
stderr.write("{} has no assembly_level attribute, not added to database\n".format(gca.accession))
if gca.assembly_level in ["chromosome", "complete genome"]:
gca.records = chromosome_number(gca_xml)
s.add(gca)
# print(gca.accession, gca.assembly_level, gca.records)
for chrom_record in get_chromosomes(gca_xml):
chromosome = Chromosome()
chromosome.GCA_accession = gca.accession
chromosome.accession = chrom_record.attrib["accession"]
# print(chromosome.accession)
chromosome.name = chrom_record.find("NAME").text
chromosome.status = 1
chrom_xml = xml_download(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome was added "
"without md5 and length.\n".format(chromosome.accession))
s.add(chromosome)
# print(chromosome.accession, chromosome.GCA_accession,
# chromosome.name, chromosome.length, chromosome.md5)
if not s.query(Jobs).filter(Jobs.chromosome_accession == chromosome.accession).all():
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=chromosome.accession,
job_name=job))
# print(chromosome.accession, job)
elif gca.assembly_level in ["scaffold", "contig"]:
gca.records = get_scaffold_number(gca_xml)
s.add(gca)
for job in ["get_fasta", "GC", "trf", "CpG"]:
s.add(Jobs(chromosome_accession=gca.accession,
job_name=job))
# print(gca.accession, gca.assembly_level, gca.records)
s.commit()
else:
stderr.write("{} was not added because XML record is unavailable\n".format(gca.accession))
stderr.flush()
# retry download chromosome xml record with a longer timeout
for chromosome in s.query(Chromosome).filter(or_(Chromosome.md5 == None, Chromosome.length == None)).all():
chrom_xml = xml_download_retry(chromosome.accession)
if chrom_xml is not None:
try:
chromosome.md5, chromosome.length = chromosome_data(chrom_xml)
except AttributeError:
stderr.write("Chromosome {} doesn't exit or has corrupted xml file. Chromosome data was not added\n"
.format(chromosome.accession))
s.commit()
stderr.flush()
s.close()
stderr.close()
| 38.780105 | 119 | 0.652491 | 0 | 0 | 128 | 0.017281 | 0 | 0 | 0 | 0 | 2,635 | 0.355745 |
d086d7aab191c7687a9c71aa884d413b256ec344 | 8,518 | py | Python | lda_loader.py | abramhindle/organize-conference-sessions-by-paper-topics | 076adf1049fe9588c04f37840e938b20dd32d102 | [
"Apache-2.0"
]
| 1 | 2017-12-24T23:48:33.000Z | 2017-12-24T23:48:33.000Z | lda_loader.py | abramhindle/organize-conference-sessions-by-paper-topics | 076adf1049fe9588c04f37840e938b20dd32d102 | [
"Apache-2.0"
]
| 1 | 2017-12-29T15:31:24.000Z | 2018-01-03T17:39:58.000Z | lda_loader.py | abramhindle/organize-conference-sessions-by-paper-topics | 076adf1049fe9588c04f37840e938b20dd32d102 | [
"Apache-2.0"
]
| 2 | 2017-12-29T15:14:00.000Z | 2021-01-16T13:33:50.000Z | # Copyright (C) 2014 Alex Wilson
# Copyright (C) 2012-14 Abram Hindle
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import json, os
from config import Config
from databases import BulkOps, ElasticSearch
from topics_controller import *
import lda
import common, project, task_queue
class LDALoader(TopicsLoader):
def __init__(self, run, topic_count=100, docs=None):
self.run = run
self.client = self.run.client
self.docs = docs or self.client.get_all_docs()
super(LDALoader, self).__init__(self.run
,all_ids=self.docs.keys()
,topics=range(topic_count))
self.topic_count = topic_count
self.ids = None # later, we assign to this the ids that we analyze
@staticmethod
def create_loader_for_inference(run):
'''creates a loader for a project, with the topic_count
deduced from the data already created'''
topic_count = TopicsController(run.client).count_topics()
return LDALoader(run, topic_count)
def compute_lda(self):
# guards against missing docs
assert self.docs != None
self.ids = self.all_ids
self.lda_model, self.token_counts = \
lda.LDA.create_from_docs(self.docs, self.ids)
# This dumps the documents, because we need the memory, it also means the objects cannot recompute lda
self.docs = None
self.run.dump_token_ids(self.lda_model.token_ids)
self.run.dump_bug_ids(self.all_ids)
self.vw = lda.VowpalWabbit(self.run, self.lda_model,
topic_count=self.topic_count)
self.vw.make_lda_input(self.token_counts, self.ids)
self.vw.clean_files()
common.run_command(self.vw.lda_command())
def compute_lda_inference(self):
self.lda_model, analyzed_doc_ids = \
self.run.load_lda_model_for_inference()
bug_db = self.client.connect_to_db()
self.ids = [id for id in self.all_ids if id not in analyzed_doc_ids]
token_counts = [self.lda_model.convert_and_filter_token_counts(
self.lda_model.add_doc_for_inference(doc))
for id, doc in bug_db.mget(self.ids)]
self.vw = lda.VowpalWabbit(self.run, self.lda_model,
topic_count=self.topic_count)
self.vw.make_lda_input(token_counts, self.ids)
self.vw.clean_files_for_inference()
common.run_command(self.vw.lda_inference_command())
def update_summaries_in_database(self):
topics_db = self.client.connect_to_topics_db()
self.vw.inform_loader(self)
summary = self.vw.create_summary()
print 'saving report'
reports_db = self.client.connect_to_generated_db()
report = summary.make_report_doc(self.project)
reports_db[report['doc_id']] = report
print 'update summaries in database'
super(LDALoader, self).update_summaries_in_database(self.client)
updater = BulkOps(self.client, ElasticSearch.TOPICS)
# TODO: what if there are custom topics before we do LDA
print "Saving Summaries"
for i in range(0, len(summary.topic_summaries)):
updater.update(str(i), {
'method': 'lda'
,'topic_id': i
,'words': ' '.join(summary.topic_summaries[i])
})
if (i > 0 and i % 50 == 0):
print 'Committing documents %s' % i
updater.apply()
print 'LDA bulk update'
updater.apply()
return summary
class LDARun(TopicsRun):
def __init__(self, project, client=None):
if client is None:
client = ElasticSearch(project)
self.project = project
self.client = client
config = Config.getInstance()
workdir = config.workdir_path('out')
#print workdir
os.system('mkdir -p {}'.format(workdir))
def wd(path):
return config.workdir_path('out', path.format(project))
self.BUG_IDS_DUMP_FILE = wd("{}-bug-ids.json")
self.TOKEN_DUMP_FILE = wd("{}-token-ids.json")
self.LDA_INPUT_FNAME = wd("{}-vr_lda_input.lda.txt")
self.LDA_ID_INPUT_FNAME = wd("{}-vr_lda_input.id.txt")
self.LDA_WORDS_INPUT_FNAME = wd("{}-vr_lda_input.words.txt")
self.LDA_PREDICTIONS_FNAME = wd("{}-predictions.dat")
self.LDA_TOPICS_FNAME = wd("{}-topics.dat")
self.LDA_CACHE_FILE = wd("{}-topics.dat.cache")
def load_token_ids(self):
if not os.path.exists(self.TOKEN_DUMP_FILE):
return []
with open(self.TOKEN_DUMP_FILE) as f:
return json.load(f)
def dump_token_ids(self, ids):
with open(self.TOKEN_DUMP_FILE, 'w') as f:
return json.dump(ids, f, indent=2)
def load_bug_ids(self):
if not os.path.exists(self.BUG_IDS_DUMP_FILE):
return []
with open(self.BUG_IDS_DUMP_FILE) as f:
return json.load(f)
def dump_bug_ids(self, ids):
with open(self.BUG_IDS_DUMP_FILE, 'w') as f:
return json.dump(ids, f, indent=2)
def load_lda_model_for_inference(self):
'''creates an LDA instance for inference. It has the
token ids from previously analyzed documents.
returns lda_model, analyzed_doc_ids
where analyzed_doc_ids is a set of previously analyzed docs'
ids
'''
lda_model = lda.LDA()
lda_model.load_token_ids(self.load_token_ids())
analyzed_doc_ids = set(self.load_bug_ids())
return lda_model, analyzed_doc_ids
# tasks for task_queue
class LDATask(task_queue.Task):
recoverable = True
def __init__(self, project, topic_count):
self.topic_count = topic_count
self.project = project
def run(self, worker=None, client=None):
run = LDARun(self.project, client)
loader = LDALoader(run, self.topic_count)
loader.compute_lda()
summaries = loader.update_summaries_in_database()
project.Project(self.project, client).update_timestamps([
project.Project.TOPICS_TIMESTAMP,
project.Project.TOPIC_SCORES_TIMESTAMP])
return loader, summaries
def __repr__(self):
return "LDATASK {} : {}".format(self.project, str(self.topic_count))
@staticmethod
def recover(data):
return LDATask(data['project'], data['topic_count'])
class LDAIncrementalTask(task_queue.Task):
recoverable = True
def __init__(self, project):
self.project = project
def run(self, worker=None, client=None):
run = LDARun(self.project, client)
loader = LDALoader.create_loader_for_inference(run)
loader.compute_lda_inference()
summaries = loader.update_summaries_in_database()
project.Project(self.project, client).update_timestamps([
project.Project.TOPIC_SCORES_TIMESTAMP])
return loader, summaries
@staticmethod
def recover(data):
return LDAIncrementalTask(data['project'])
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('LDA analyser')
parser.add_argument('project', help='project name')
parser.add_argument('--topics', type=int, default=100,
help='number of topics to generate (no effect on incremental)')
parser.add_argument('--incremental', help='do an incremental analysis')
Config.add_args(parser)
args = parser.parse_args()
config = Config.build(args)
if args.incremental:
LDAIncrementalTask(args.project).run()
else:
print 'running LDA analysis on {} with {} topics'.format(
args.project, args.topics)
LDATask(args.project, args.topics).run()
| 34.909836 | 110 | 0.646513 | 6,813 | 0.799836 | 0 | 0 | 465 | 0.05459 | 0 | 0 | 2,025 | 0.237732 |
d086eb141826a0dd9e722b35bf04940deba291b8 | 1,003 | py | Python | api-gateway/services/mail_service.py | Niweera/DNSTool-Middleware-API | 0e83d9f62fb65d9223b86a7876b3f30b2771befb | [
"Apache-2.0"
]
| null | null | null | api-gateway/services/mail_service.py | Niweera/DNSTool-Middleware-API | 0e83d9f62fb65d9223b86a7876b3f30b2771befb | [
"Apache-2.0"
]
| 9 | 2021-06-12T05:39:59.000Z | 2021-08-14T09:20:00.000Z | api-gateway/services/mail_service.py | Niweera/DNSTool-Middleware-API | 0e83d9f62fb65d9223b86a7876b3f30b2771befb | [
"Apache-2.0"
]
| 2 | 2021-05-22T15:33:50.000Z | 2021-08-28T08:51:25.000Z | from flask_mail import Message
from mailer import mailer
from middleware.error_handling import write_log
class MailService:
@staticmethod
def send_welcome_email(email: str, full_name: str) -> None:
try:
email = Message(
subject="Welcome to DNSTool!",
recipients=[email],
body=f"Welcome {full_name} to DNSTool!",
)
mailer.send(email)
return
except Exception as e:
write_log("error", e)
@staticmethod
def send_verification_email(email: str, verification_link: str) -> None:
try:
email = Message(
subject="Verification Required!",
recipients=[email],
html=f"<h3>Please click on the following link to verify you</h3><br><a href='{verification_link}'>Click Here</a>",
)
mailer.send(email)
return
except Exception as e:
write_log("error", e)
| 31.34375 | 130 | 0.564307 | 895 | 0.892323 | 0 | 0 | 866 | 0.86341 | 0 | 0 | 201 | 0.200399 |
d0879d6a8986f5d89d403d25cf640af496e2854b | 2,390 | py | Python | tests/system/test_grpc_streams.py | danoscarmike/gapic-generator-python | 805645d5571dde05c6fb947c81f0f41f2ba10a98 | [
"Apache-2.0"
]
| null | null | null | tests/system/test_grpc_streams.py | danoscarmike/gapic-generator-python | 805645d5571dde05c6fb947c81f0f41f2ba10a98 | [
"Apache-2.0"
]
| null | null | null | tests/system/test_grpc_streams.py | danoscarmike/gapic-generator-python | 805645d5571dde05c6fb947c81f0f41f2ba10a98 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google import showcase
metadata = (("showcase-trailer", "hello world"),)
def test_unary_stream(echo):
content = 'The hail in Wales falls mainly on the snails.'
responses = echo.expand({
'content': content,
}, metadata=metadata)
# Consume the response and ensure it matches what we expect.
# with pytest.raises(exceptions.NotFound) as exc:
for ground_truth, response in zip(content.split(' '), responses):
assert response.content == ground_truth
assert ground_truth == 'snails.'
assert responses.trailing_metadata() == metadata
def test_stream_unary(echo):
requests = []
requests.append(showcase.EchoRequest(content="hello"))
requests.append(showcase.EchoRequest(content="world!"))
response = echo.collect(iter(requests))
assert response.content == 'hello world!'
def test_stream_unary_passing_dict(echo):
requests = [{'content': 'hello'}, {'content': 'world!'}]
response = echo.collect(iter(requests))
assert response.content == 'hello world!'
def test_stream_stream(echo):
requests = []
requests.append(showcase.EchoRequest(content="hello"))
requests.append(showcase.EchoRequest(content="world!"))
responses = echo.chat(iter(requests), metadata=metadata)
contents = []
for response in responses:
contents.append(response.content)
assert contents == ['hello', 'world!']
assert responses.trailing_metadata() == metadata
def test_stream_stream_passing_dict(echo):
requests = [{'content': 'hello'}, {'content': 'world!'}]
responses = echo.chat(iter(requests), metadata=metadata)
contents = []
for response in responses:
contents.append(response.content)
assert contents == ['hello', 'world!']
assert responses.trailing_metadata() == metadata
| 32.297297 | 74 | 0.706695 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 924 | 0.386611 |
d088e305cc713c10d76eb917ab7c20df555901b9 | 1,469 | py | Python | day02/python/subesokun/solution.py | clssn/aoc-2019 | a978e5235855be937e60a1e7f88d1ef9b541be15 | [
"MIT"
]
| 22 | 2019-11-27T08:28:46.000Z | 2021-04-27T05:37:08.000Z | day02/python/subesokun/solution.py | sancho1241/aoc-2019 | e0f63824c8250e0f84a42805e1a7ff7d9232002c | [
"MIT"
]
| 77 | 2019-11-16T17:22:42.000Z | 2021-05-10T20:36:36.000Z | day02/python/subesokun/solution.py | sancho1241/aoc-2019 | e0f63824c8250e0f84a42805e1a7ff7d9232002c | [
"MIT"
]
| 43 | 2019-11-27T06:36:51.000Z | 2021-11-03T20:56:48.000Z | INPUT_FILE_NAME = 'input.txt'
puzzle_input = None
with open(INPUT_FILE_NAME) as input_file:
puzzle_input = list(map(lambda val: int(val), input_file.readline().rstrip('\n').split(',')))
def run_instruction(opcode, param_1, param_2, param_3, memory):
if opcode == 1:
memory[param_3] = memory[param_1] + memory[param_2]
elif opcode == 2:
memory[param_3] = memory[param_1] * memory[param_2]
else:
raise Exception('Ooooppps')
def run_program(memory):
instruction_pointer = 0
while memory[instruction_pointer] != 99:
run_instruction(memory[instruction_pointer + 0], memory[instruction_pointer + 1], memory[instruction_pointer + 2], memory[instruction_pointer + 3], memory)
instruction_pointer += 4
return memory
memory_solution_part1 = puzzle_input.copy()
memory_solution_part1[1] = 12
memory_solution_part1[2] = 2
solution_part_1 = run_program(memory_solution_part1)
print('Solution to part 1: %i' % (solution_part_1[0],))
def find_noun_verb(output, memory):
for noun in range(0, 100):
for verb in range(0, 100):
memory_copy = memory.copy()
memory_copy[1] = noun
memory_copy[2] = verb
result = run_program(memory_copy)[0]
if result == output:
return (noun, verb)
(noun, verb) = find_noun_verb(19690720, puzzle_input)
solution_part_2 = 100 * noun + verb
print('Solution to part 2: %i' % (solution_part_2,))
| 31.934783 | 163 | 0.673247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 76 | 0.051736 |
d08a58e73bc10ca50c82c660e481d5fd9ab9cf8d | 6,299 | py | Python | opendata_module/anonymizer/iio/mongodb_manager.py | ria-ee/monitor | d5cb9384abf38394b35e760729649136cbbc7548 | [
"MIT"
]
| 10 | 2017-12-01T11:59:54.000Z | 2021-11-08T10:30:35.000Z | opendata_module/anonymizer/iio/mongodb_manager.py | ria-ee/monitor | d5cb9384abf38394b35e760729649136cbbc7548 | [
"MIT"
]
| 16 | 2019-11-15T08:45:33.000Z | 2021-06-10T18:06:03.000Z | opendata_module/anonymizer/iio/mongodb_manager.py | ria-ee/monitor | d5cb9384abf38394b35e760729649136cbbc7548 | [
"MIT"
]
| 13 | 2017-11-22T08:46:57.000Z | 2021-12-16T06:51:07.000Z | from pymongo import MongoClient
import pymongo
import datetime
import sqlite3 as sql
import os
import signal
from signal import SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP
import traceback
from anonymizer.utils import logger_manager
import sys
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
ATEXIT_SINGLETON = None
def store_last_processed_timestamp(*args):
ATEXIT_SINGLETON.update_last_processed_timestamp(max_timestamp=ATEXIT_SINGLETON.last_processed_timestamp)
sys.exit(1)
for sig in (SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIGHUP):
signal.signal(sig, store_last_processed_timestamp)
class MongoDB_Manager(object):
def __init__(self, config, previous_run_manager=None):
self._logger = logger_manager.LoggerManager(logger_name='opendata-anonymizer', module_name='opendata')
global ATEXIT_SINGLETON
ATEXIT_SINGLETON = self
self._config = config
self.mongo_connection_string = "mongodb://{user}:{password}@{host}:{port}/{database}".format(
**{'user': config.mongo_db['user'],
'password': config.mongo_db['password'],
'host': config.mongo_db['host_address'],
'port': config.mongo_db['port'],
'database': config.mongo_db['auth_db']})
self._mongo_client = MongoClient(self.mongo_connection_string)
self._previous_run_manager = previous_run_manager if previous_run_manager else PreviousRunManager(config)
self.last_processed_timestamp = self._get_last_processed_timestamp()
def get_records(self, allowed_fields):
collection = self._mongo_client[self._config.mongo_db['database_name']][self._config.mongo_db['table_name']]
min_timestamp = self._get_last_processed_timestamp()
projection = {field: True for field in allowed_fields}
projection['correctorTime'] = True
batch_idx = 0
current_timestamp = datetime.datetime.now().timestamp()
for document in collection.find({
'correctorTime': {'$gt': min_timestamp, '$lte': current_timestamp},
'correctorStatus': 'done',
'client.clientXRoadInstance': {'$ne': None}
}, projection=projection, no_cursor_timeout=True).sort('correctorTime', pymongo.ASCENDING):
if batch_idx == 1000:
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
batch_idx = 0
self.last_processed_timestamp = document['correctorTime']
del document['_id']
del document['correctorTime']
yield self._add_missing_fields(document, allowed_fields)
batch_idx += 1
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
def is_alive(self):
try:
self._mongo_client[self._config.mongo_db['database_name']][self._config.mongo_db['table_name']].find_one()
return True
except Exception:
self._logger.log_error('mongodb_connection_failed',
("Failed to connect to mongodb with connection string {0}. ERROR: {1}".format(
self.mongo_connection_string, traceback.format_exc().replace('\n', '')))
)
return False
def _add_missing_fields(self, document, allowed_fields):
try:
existing_agents = [agent for agent in ['client', 'producer'] if agent in document]
for field in allowed_fields:
field_path = field.split('.')
if len(field_path) == 2 and field_path[0] in existing_agents:
if field_path[0] not in document:
document[field_path[0]] = {}
if field_path[1] not in document[field_path[0]]:
document[field_path[0]][field_path[1]] = self._get_default_value(field_path)
elif len(field_path) == 1:
if field_path[0] not in document:
document[field_path[0]] = self._get_default_value(field_path)
return document
except Exception:
self._logger.log_error('adding_missing_fields_failed',
("Failed adding missing fields from {0} to document {1}. ERROR: {2}".format(
str(allowed_fields), str(document), traceback.format_exc().replace('\n', ''))))
raise
def _get_default_value(self, field_path):
return None
def _get_last_processed_timestamp(self):
min_timestamp = self._previous_run_manager.get_previous_run()
return min_timestamp
def update_last_processed_timestamp(self, max_timestamp):
if max_timestamp:
self._previous_run_manager.set_previous_run(max_timestamp)
def __del__(self):
self.update_last_processed_timestamp(max_timestamp=self.last_processed_timestamp)
class PreviousRunManager(object):
initial_value = 0.0
def __init__(self, config):
self._config = config
self.mongo_connection_string = "mongodb://{user}:{password}@{host}:{port}/{database}".format(
**{'user': config.mongo_db['user'],
'password': config.mongo_db['password'],
'host': config.mongo_db['host_address'],
'port': config.mongo_db['port'],
'database': config.mongo_db['auth_db']})
self._mongo_client = MongoClient(self.mongo_connection_string)
def get_previous_run(self):
collection = self._mongo_client[self._config.mongo_db['state']['database_name']][
self._config.mongo_db['state']['table_name']]
entry = collection.find_one({'key': 'last_mongodb_timestamp'})
if entry:
return float(entry['value'])
else:
return self.initial_value
def set_previous_run(self, max_timestamp):
collection = self._mongo_client[self._config.mongo_db['state']['database_name']][
self._config.mongo_db['state']['table_name']]
collection.update(
{'key': 'last_mongodb_timestamp'},
{'key': 'last_mongodb_timestamp', 'value': str(max_timestamp)},
upsert=True
)
| 40.378205 | 118 | 0.640895 | 5,672 | 0.90046 | 1,224 | 0.194317 | 0 | 0 | 0 | 0 | 915 | 0.145261 |
d08c99da891c1e82a3ced1b3133024eb4b5c39fb | 826 | py | Python | viewer/models.py | davhanks/digitalmyworld | 0f718be3967d399dddc6105f0b9d4cbc0ab35764 | [
"Apache-2.0"
]
| null | null | null | viewer/models.py | davhanks/digitalmyworld | 0f718be3967d399dddc6105f0b9d4cbc0ab35764 | [
"Apache-2.0"
]
| null | null | null | viewer/models.py | davhanks/digitalmyworld | 0f718be3967d399dddc6105f0b9d4cbc0ab35764 | [
"Apache-2.0"
]
| null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
class Calculation(models.Model):
'''Just an example model: a log of all calculations made'''
log_date = models.DateTimeField(auto_now_add=True)
num1 = models.FloatField(blank=True, null=True)
num2 = models.FloatField(blank=True, null=True)
operation = models.CharField(max_length=20, blank=True, null=True)
result = models.FloatField(blank=True, null=True)
def __str__(self):
return '%s %s %s = %s' % (self.num1, self.operation, self.num2, self.result)
# from polls import models as pmod
# questions = pmod.Question.objects.all()
# pmod.Question.objects.filter(question_text='This is the third question')
# q1 = pmod.Question.objects.get(id=2)
# .exclude() you can chain them together | 35.913043 | 80 | 0.733656 | 473 | 0.572639 | 0 | 0 | 0 | 0 | 0 | 0 | 303 | 0.366828 |
d08cfb1d89201e35321b4d716317704e5a60a247 | 552 | py | Python | tests/test_dbapi_sqlite.py | cyanodbc/cyanodbc | 6ed49ded15a545edf4b78886868daebc8c5d4874 | [
"MIT"
]
| 2 | 2020-07-10T17:36:00.000Z | 2020-08-12T14:57:48.000Z | tests/test_dbapi_sqlite.py | detule/cyanodbc | e7713c3cc3333a018409ec50ee1e5836a8d85f06 | [
"MIT"
]
| 15 | 2018-09-09T12:05:15.000Z | 2020-07-07T12:06:16.000Z | tests/test_dbapi_sqlite.py | detule/cyanodbc | e7713c3cc3333a018409ec50ee1e5836a8d85f06 | [
"MIT"
]
| 1 | 2020-07-02T10:58:07.000Z | 2020-07-02T10:58:07.000Z | import cyanodbc
import dbapi20
from distro import linux_distribution
import pytest
class CyanodbcDBApiTest(dbapi20.DatabaseAPI20Test):
driver = cyanodbc
connect_args = ("Driver={SQLite3 ODBC Driver};Database="
"example.db;Timeout=1000;", )
""
def test_setoutputsize(self):
pass
def test_nextset(self):
pass # for sqlite no nextset()
@pytest.mark.skipif(linux_distribution()[2]=="xenial",
reason = "Strange behavior seen in Xenial")
def test_rowcount(self):
super().test_rowcount()
| 26.285714 | 60 | 0.684783 | 467 | 0.846014 | 0 | 0 | 171 | 0.309783 | 0 | 0 | 134 | 0.242754 |
d08f1ec8fdeeec92aebf3f03615a051f9221f14d | 802 | py | Python | problems/p050.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
]
| null | null | null | problems/p050.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
]
| null | null | null | problems/p050.py | davisschenk/project-euler-python | 1375412e6c8199ab02250bd67223c758d4df1725 | [
"MIT"
]
| 2 | 2020-10-08T23:35:03.000Z | 2020-10-09T00:28:36.000Z | from problem import Problem
from utils.primes import sieve_of_eratosthenes, simple_is_prime
class ConsecutivePrimeSum(Problem, name="Consecutive prime sum", expected=997651):
@Problem.solution()
def brute_force(self):
upper_bound = 1_000_000
primes = list(sieve_of_eratosthenes(4000))
max_length = 0
max_prime = 0
for start in range(len(primes)):
current_length = 0
for end in range(start, len(primes)):
s = sum(primes[start:end])
current_length += 1
if s > upper_bound:
break
elif simple_is_prime(s) and current_length > max_length:
max_length = current_length
max_prime = s
return max_prime
| 30.846154 | 82 | 0.588529 | 707 | 0.881546 | 0 | 0 | 620 | 0.773067 | 0 | 0 | 23 | 0.028678 |
d091d22d60b6b043a2100712328787d0097e7ec3 | 2,312 | py | Python | acapy_client/models/indy_pres_attr_spec.py | dbluhm/acapy-client | d92ef607ba2ff1152ec15429f2edb20976991424 | [
"Apache-2.0"
]
| 4 | 2021-08-05T09:20:34.000Z | 2021-08-08T19:37:29.000Z | acapy_client/models/indy_pres_attr_spec.py | dbluhm/acapy-client | d92ef607ba2ff1152ec15429f2edb20976991424 | [
"Apache-2.0"
]
| null | null | null | acapy_client/models/indy_pres_attr_spec.py | dbluhm/acapy-client | d92ef607ba2ff1152ec15429f2edb20976991424 | [
"Apache-2.0"
]
| 2 | 2021-08-12T18:18:45.000Z | 2021-08-14T13:22:28.000Z | from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..types import UNSET, Unset
T = TypeVar("T", bound="IndyPresAttrSpec")
@attr.s(auto_attribs=True)
class IndyPresAttrSpec:
""" """
name: str
cred_def_id: Union[Unset, str] = UNSET
mime_type: Union[Unset, str] = UNSET
referent: Union[Unset, str] = UNSET
value: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
name = self.name
cred_def_id = self.cred_def_id
mime_type = self.mime_type
referent = self.referent
value = self.value
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
{
"name": name,
}
)
if cred_def_id is not UNSET:
field_dict["cred_def_id"] = cred_def_id
if mime_type is not UNSET:
field_dict["mime-type"] = mime_type
if referent is not UNSET:
field_dict["referent"] = referent
if value is not UNSET:
field_dict["value"] = value
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
name = d.pop("name")
cred_def_id = d.pop("cred_def_id", UNSET)
mime_type = d.pop("mime-type", UNSET)
referent = d.pop("referent", UNSET)
value = d.pop("value", UNSET)
indy_pres_attr_spec = cls(
name=name,
cred_def_id=cred_def_id,
mime_type=mime_type,
referent=referent,
value=value,
)
indy_pres_attr_spec.additional_properties = d
return indy_pres_attr_spec
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| 27.2 | 77 | 0.602076 | 2,134 | 0.92301 | 0 | 0 | 2,161 | 0.934689 | 0 | 0 | 122 | 0.052768 |
d0955d256ce56a7190e83eb9977c69b7f233f594 | 4,929 | py | Python | close_approach.py | jepster/python_project_near_earth_objects | 6e48da50685f15226bbc0adc66231c661596ac67 | [
"MIT"
]
| null | null | null | close_approach.py | jepster/python_project_near_earth_objects | 6e48da50685f15226bbc0adc66231c661596ac67 | [
"MIT"
]
| null | null | null | close_approach.py | jepster/python_project_near_earth_objects | 6e48da50685f15226bbc0adc66231c661596ac67 | [
"MIT"
]
| null | null | null | from helpers import cd_to_datetime, datetime_to_str
class CloseApproach:
"""A close approach to Earth by an NEO.
A `CloseApproach` encapsulates information about the NEO's close approach
to Earth, such as the date and time (in UTC) of closest approach, the
nominal approach distance in astronomical units, and the relative approach
velocity in kilometers per second.
A `CloseApproach` also maintains a reference to its `NearEarthObject` -
initially, this information (the NEO's primary designation) is saved in a
private attribute, but the referenced NEO is eventually replaced in the
`NEODatabase` constructor.
"""
def __init__(self, **info):
"""Create a new `CloseApproach`.
:param string time: The date and time (in UTC) of closest approach.
NASA's format, at least in the `cd`
field of close approach data, uses the English locale's month names.
For example, December 31st, 2020 at noon
is: 2020-Dec-31 12:00
:param float distance: The nominal approach distance in astronomical
units.
:param float velocity: The relative approach velocity in kilometers per
second.
:param NearEarthObject neo: Reference to its `NearEarthObject` -
initially, this information
(the NEO's primary designation) is saved in a private attribute, but
the referenced NEO is
eventually replaced in the `NEODatabase` constructor.
"""
for key, value in info.items():
# assign the designation parameter
if key.lower() == 'des':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not string
self._designation = str(value)
except ValueError:
# print the text message
print(f'The type of {key} is not string')
# assign the time parameter
elif key.lower() == 'cd':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not string
self.time = str(value)
self.time = cd_to_datetime(self.time)
except ValueError:
# print the text message
print(f'The type of {key} is not string')
# assign the distance parameter
elif key.lower() == 'dist':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not float
self.distance = float(value)
except ValueError:
# print the text message
print(f'The type of {key} is not float')
# assign the velocity parameter
elif key.lower() == 'v_rel':
# check the value of the parameter to avoid
# an inappropriate value
try:
# if the type of value is not float
self.velocity = float(value)
except ValueError:
# print the text message
print(f'The type of {key} is not float')
self.neo = self._designation
@property
def time_str(self):
"""Return a formatted representation of this `CloseApproach`'s
approach time.
The value in `self.time` should be a Python `datetime` object. While a
`datetime` object has a string representation, the default
representation includes seconds - significant figures that don't
exist in our input data set.
The `datetime_to_str` method converts a `datetime` object to a
formatted string that can be used in human-readable representations and
in serialization to CSV and JSON files.
"""
return f"Approach time of {self._designation} was at " \
f"{datetime_to_str(self.time)}"
def get_neo_primary_designation(self) -> str:
return self._designation
@property
def designation(self):
"""To access to the self._designation.
:return: self._designation
"""
return self._designation
def __str__(self):
"""Return `str(self)`."""
return f"A CloseApproach time={self.time_str} " \
f"distance={self.distance} velocity={self.velocity} " \
f"neo={self.neo}"
def __repr__(self):
"""Return `repr(self)`, a computer-readable string representation of
this object."""
return (
f"CloseApproach(time={self.time_str!r}, "
f"distance={self.distance:.2f}, "f"velocity={self.velocity:.2f}, "
f"neo={self.neo!r})")
| 39.432 | 79 | 0.581862 | 4,874 | 0.988842 | 0 | 0 | 872 | 0.176912 | 0 | 0 | 3,224 | 0.654088 |
d096f230d88b0cb0b44ad27a15da83bc18edf195 | 11,023 | py | Python | terncy/terncy.py | rxwen/python-terncy | 69be427c39118f122554a300e6e82ec24ad48bc1 | [
"MIT"
]
| 1 | 2020-06-30T07:06:14.000Z | 2020-06-30T07:06:14.000Z | terncy/terncy.py | rxwen/python-terncy | 69be427c39118f122554a300e6e82ec24ad48bc1 | [
"MIT"
]
| null | null | null | terncy/terncy.py | rxwen/python-terncy | 69be427c39118f122554a300e6e82ec24ad48bc1 | [
"MIT"
]
| 1 | 2020-12-26T11:20:42.000Z | 2020-12-26T11:20:42.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import asyncio
import logging
import json
import ssl
import uuid
from terncy.version import __version__
import terncy.event as event
import ipaddress
from datetime import datetime
from enum import Enum
from zeroconf import ServiceBrowser, Zeroconf
import aiohttp
import websockets
_LOGGER = logging.getLogger(__name__)
def _next_req_id():
return uuid.uuid4().hex[0:8]
class TokenState(Enum):
INVALID = -1
REQUESTED = 1
APPROVED = 3
TERNCY_HUB_SVC_NAME = "_websocket._tcp.local."
WAIT_RESP_TIMEOUT_SEC = 5
_discovery_engine = None
_discovery_browser = None
discovered_homecenters = {}
class _TerncyZCListener:
def __init__(self):
pass
def remove_service(self, zeroconf, svc_type, name):
global discovered_homecenters
dev_id = name.replace("." + svc_type, "")
if dev_id in discovered_homecenters:
discovered_homecenters.pop(dev_id)
def update_service(self, zeroconf, svc_type, name):
global discovered_homecenters
info = zeroconf.get_service_info(svc_type, name)
dev_id = name.replace("." + svc_type, "")
txt_records = {"dev_id": dev_id}
ip = ""
if len(info.addresses) > 0:
if len(info.addresses[0]) == 4:
ip = str(ipaddress.IPv4Address(info.addresses[0]))
if len(info.addresses[0]) == 16:
ip = str(ipaddress.IPv6Address(info.addresses[0]))
txt_records["ip"] = ip
txt_records["port"] = info.port
for k in info.properties:
txt_records[k.decode("utf-8")] = info.properties[k].decode("utf-8")
discovered_homecenters[dev_id] = txt_records
def add_service(self, zeroconf, svc_type, name):
global discovered_homecenters
info = zeroconf.get_service_info(svc_type, name)
dev_id = name.replace("." + svc_type, "")
txt_records = {"dev_id": dev_id}
ip = ""
if len(info.addresses) > 0:
if len(info.addresses[0]) == 4:
ip = str(ipaddress.IPv4Address(info.addresses[0]))
if len(info.addresses[0]) == 16:
ip = str(ipaddress.IPv6Address(info.addresses[0]))
txt_records["ip"] = ip
txt_records["port"] = info.port
for k in info.properties:
txt_records[k.decode("utf-8")] = info.properties[k].decode("utf-8")
discovered_homecenters[dev_id] = txt_records
async def start_discovery():
global _discovery_engine
global _discovery_browser
if _discovery_engine is None:
zc = Zeroconf()
listener = _TerncyZCListener()
browser = ServiceBrowser(zc, TERNCY_HUB_SVC_NAME, listener)
_discovery_engine = zc
_discovery_browser = browser
async def stop_discovery():
global _discovery_engine
global _discovery_browser
if _discovery_engine is not None:
_discovery_browser.cancel()
_discovery_engine.close()
_discovery_engine = None
_discovery_browser = None
class Terncy:
def __init__(self, client_id, dev_id, ip, port=443, username="", token=""):
self.client_id = client_id
self.dev_id = dev_id
self.ip = ip
self.port = port
self.username = username
self.token = token
self.token_id = -1
self.token_state = TokenState.INVALID
self._connection = None
self._pending_requests = {}
self._event_handler = None
def is_connected(self):
return self._connection is not None
def register_event_handler(self, handler):
self._event_handler = handler
async def request_token(self, username, name):
url = f"https://{self.ip}:{self.port}/v1/tokens:request"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "requestToken",
"clientId": self.client_id,
"username": self.username,
"name": name,
"role": 3,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
body = await response.json()
_LOGGER.debug(f"resp body: {body}")
state = TokenState.INVALID
token = ""
token_id = -1
if "state" in body:
state = body["state"]
if "id" in body:
token_id = body["id"]
if "token" in body:
token = body["token"]
return response.status, token_id, token, state
async def delete_token(self, token_id, token):
url = f"https://{self.ip}:{self.port}/v1/tokens:delete"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "deleteToken",
"clientId": self.client_id,
"id": token_id,
"token": token,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
_LOGGER.debug(f"resp: {response}")
return response.status
async def check_token_state(self, token_id, token=""):
url = f"https://{self.ip}:{self.port}/v1/tokens:query"
async with aiohttp.ClientSession() as session:
data = {
"reqId": _next_req_id(),
"intent": "queryToken",
"clientId": self.client_id,
"token": token,
"id": token_id,
}
async with session.post(
url,
data=json.dumps(data),
ssl=ssl._create_unverified_context(),
) as response:
body = await response.json()
_LOGGER.debug(f"resp: {response}")
state = TokenState.INVALID
if "state" in body:
state = body["state"]
return response.status, state
async def start(self):
"""Connect to Terncy system and start event monitor."""
_LOGGER.info(f"Terncy v{__version__} starting connection to:")
_LOGGER.info(f"{self.dev_id} {self.ip}:{self.port}")
return await self._start_websocket()
async def stop(self):
if self._connection:
await self._connection.close()
self._connection = None
async def _start_websocket(self):
url = f"wss://{self.ip}:{self.port}/ws/json?clientId={self.client_id}&username={self.username}&token={self.token}"
try:
ssl_no_verify = ssl._create_unverified_context()
async with websockets.connect(
url, ping_timeout=None, ping_interval=None, ssl=ssl_no_verify
) as ws:
self._connection = ws
if self._event_handler:
_LOGGER.info(f"connected to {self.dev_id}")
self._event_handler(self, event.Connected())
async for msg in ws:
msgObj = json.loads(msg)
_LOGGER.debug(f"recv {self.dev_id} msg: {msgObj}")
if "rspId" in msgObj:
rsp_id = msgObj["rspId"]
if rsp_id in self._pending_requests:
req = self._pending_requests[rsp_id]
req["rsp"] = msgObj
req["event"].set()
if "intent" in msgObj and msgObj["intent"] == "event":
if self._event_handler:
ev = event.EventMessage(msgObj)
self._event_handler(self, ev)
if "intent" in msgObj and msgObj["intent"] == "ping":
await ws.send('{"intent":"pong"}')
except (
aiohttp.client_exceptions.ClientConnectionError,
websockets.exceptions.ConnectionClosedError,
ConnectionRefusedError,
OSError,
websockets.exceptions.InvalidStatusCode,
) as e:
_LOGGER.info(f"disconnect with {self.dev_id} {e}")
if self._event_handler:
self._event_handler(self, event.Disconnected())
self._connection = None
return
async def _wait_for_response(self, req_id, req, timeout):
""" return the request and its response """
evt = asyncio.Event()
response_desc = {
"req": req,
"time": datetime.now(),
"event": evt,
}
self._pending_requests[req_id] = response_desc
aw = asyncio.ensure_future(evt.wait())
done, pending = await asyncio.wait({aw}, timeout=timeout)
if aw in done:
pass
else:
_LOGGER.info(f"wait {self.dev_id} response timeout")
if req_id in self._pending_requests:
self._pending_requests.pop(req_id)
return response_desc
async def get_entities(
self, ent_type, wait_result=False, timeout=WAIT_RESP_TIMEOUT_SEC
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
req_id = _next_req_id()
data = {
"reqId": req_id,
"intent": "sync",
"type": ent_type,
}
await self._connection.send(json.dumps(data))
if wait_result:
return await self._wait_for_response(req_id, data, timeout)
async def set_onoff(
self, ent_id, state, wait_result=False, timeout=WAIT_RESP_TIMEOUT_SEC
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
return await self.set_attribute(ent_id, "on", state, 0, wait_result)
async def set_attribute(
self,
ent_id,
attr,
attr_val,
method,
wait_result=False,
timeout=WAIT_RESP_TIMEOUT_SEC,
):
if self._connection is None:
_LOGGER.info(f"no connection with {self.dev_id}")
return None
req_id = _next_req_id()
data = {
"reqId": req_id,
"intent": "execute",
"entities": [
{
"id": ent_id,
"attributes": [
{
"attr": attr,
"value": attr_val,
"method": method,
}
],
}
],
}
await self._connection.send(json.dumps(data))
if wait_result:
return await self._wait_for_response(req_id, data, timeout)
| 34.021605 | 122 | 0.547945 | 9,849 | 0.893495 | 0 | 0 | 0 | 0 | 7,888 | 0.715595 | 1,317 | 0.119477 |
d0974c0fc40e9d8831ae2a7987183cbff51e0997 | 110 | py | Python | scripts/item/consume_2430769.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
]
| 9 | 2021-04-26T11:59:29.000Z | 2021-12-20T13:15:27.000Z | scripts/item/consume_2430769.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
]
| null | null | null | scripts/item/consume_2430769.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
]
| 6 | 2021-07-14T06:32:05.000Z | 2022-02-06T02:32:56.000Z | if sm.getSlotsLeftToAddByInvType(2) < 8:
sm.dispose()
sm.addInventorySlotsByInvType(8, 2)
sm.consumeItem() | 27.5 | 40 | 0.763636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
d0995eaa88ebe8af4a5de82f32eaa66b6c6943ba | 5,310 | py | Python | ssc/visualization/vispy_voxel.py | DavidGillsjo/bssc-net | e1ffa643a2c8e3df34225f0756bad0dec9f801a2 | [
"MIT"
]
| 5 | 2021-02-25T01:59:46.000Z | 2022-02-09T12:23:30.000Z | ssc/visualization/vispy_voxel.py | DavidGillsjo/bssc-net | e1ffa643a2c8e3df34225f0756bad0dec9f801a2 | [
"MIT"
]
| null | null | null | ssc/visualization/vispy_voxel.py | DavidGillsjo/bssc-net | e1ffa643a2c8e3df34225f0756bad0dec9f801a2 | [
"MIT"
]
| 1 | 2021-04-10T04:11:02.000Z | 2021-04-10T04:11:02.000Z | import vispy
vispy.use(app='egl')
from moviepy.editor import VideoClip
import numpy as np
from vispy import scene, io, visuals
from vispy.color import *
import cv2
# Check the application correctly picked up egl
assert vispy.app.use_app().backend_name == 'egl', 'Not using EGL'
class AlphaAwareCM(BaseColormap):
def __init__(self, color_list):
bins = np.linspace(0,1,len(color_list)+1)
self.glsl_map = 'vec4 translucent_grays(float t) {\n'
for c_idx, (i1, i2) in enumerate(zip(bins[:-1], bins[1:])):
return_vec = 'return vec4({0[0]:.4},{0[1]:.4},{0[2]:.4},{0[3]:.4});'.format(color_list[c_idx].rgba.flat)
if c_idx == 0:
self.glsl_map += ' if (t < {:.2}) {{\n {}\n }}'.format(i2, return_vec)
elif c_idx == len(color_list):
self.glsl_map += ' else {{}\n {}\n }}'.format(return_vec)
else:
self.glsl_map += ' else if (({:.2} <= t) && (t < {:.2})) {{\n {}\n }}'.format(i1, i2, return_vec)
self.glsl_map += '\n}'
super().__init__()
def plot_voxels(gridLabels, suncg_labels, vox_min, vox_unit, save_path = None, animate = False):
nbr_classes = len(suncg_labels)
canvas = scene.SceneCanvas(keys='interactive', bgcolor='w', size = (1920,1080))
view = canvas.central_widget.add_view()
azimuth = 30
view.camera = scene.TurntableCamera(up='y', distance=4, fov=70,
azimuth=azimuth, elevation=30.)
# Sample colormap and adjust alpha
colormap = get_colormap('cubehelix')
cm_sampled = []
for i, (iclass, sample_f) in enumerate(zip(suncg_labels, np.linspace(0,1,nbr_classes))):
if iclass.lower() in ('free', 'ceiling'):
alpha = 0
elif iclass.lower() in ('floor', 'wall', 'window'):
alpha = 0.6
else:
alpha = 1.0
cm_sampled.append(Color(color=colormap[sample_f].rgb, alpha=alpha))
my_cm = AlphaAwareCM(cm_sampled)
volume = scene.visuals.Volume(gridLabels, relative_step_size = 0.1, method='mip', parent=view.scene, cmap = my_cm, clim = [0, nbr_classes-1], emulate_texture = False)
volume.transform = scene.transforms.MatrixTransform()
volume.transform.scale(3*[vox_unit])
volume.transform.translate(3*[-vox_unit*gridLabels.shape[0]/2.0])
if save_path is None:
return
def make_frame(t):
view.camera.set_state({'azimuth': azimuth+t*90})
return canvas.render()
if animate:
animation = VideoClip(make_frame, duration=3)
animation.write_gif('voxel.gif', fps=8, opt='OptimizePlus')
else:
img = canvas.render()
cv2.imwrite('voxel.png', img[::-1])
def scatter_plot_voxels(gridLabels, suncg_labels, vox_min, vox_unit, save_path = None, animate = False):
nbr_classes = len(suncg_labels)
occMask = gridLabels > 0
xyz = np.nonzero(occMask)
positions = np.vstack([xyz[0], xyz[1], xyz[2]])
gridLabelsMasked = gridLabels[occMask]
canvas = scene.SceneCanvas(keys='interactive', bgcolor='w', size = (1920,1080))
view = canvas.central_widget.add_view()
azimuth = 30
view.camera = scene.TurntableCamera(up='y', distance=4, fov=70,
azimuth=azimuth, elevation=30.)
# Sample colormap and adjust alpha
colormap = get_colormap('hsl', value=1.0, saturation=0.8, ncolors = nbr_classes)
pos_color = np.zeros((positions.shape[1], 4))
cm_sampled = []
for i, (iclass, sample_f) in enumerate(zip(suncg_labels[1:], np.linspace(0,1,nbr_classes-1))):
if iclass.lower() in ('floor', 'wall', 'window'):
alpha = 0.5
elif iclass.lower() == 'ceiling':
alpha = 0.0
else:
alpha = 1.0
base_color = colormap[sample_f].rgba.flatten()
base_color[3] = alpha
pos_color[i==gridLabelsMasked] = base_color
Scatter3D = scene.visuals.create_visual_node(visuals.MarkersVisual)
p1 = Scatter3D(parent=view.scene)
p1.set_gl_state('translucent', blend=True, depth_test=True)
p1.set_data(positions.T, face_color=pos_color, symbol='disc', size=10,
edge_width=0.5, edge_color='k')
p1.transform = scene.transforms.MatrixTransform()
p1.transform.scale(3*[vox_unit])
p1.transform.translate(3*[-vox_unit*gridLabels.shape[0]/2.0])
if save_path is None:
return
def make_frame(t):
view.camera.set_state({'azimuth': azimuth+t*90})
return canvas.render()
if animate:
animation = VideoClip(make_frame, duration=3)
animation.write_gif('voxel.gif', fps=8, opt='OptimizePlus')
else:
img = canvas.render()
cv2.imwrite('voxel.png', img[::-1])
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Test plotting voxels')
parser.add_argument('gt_file', type=str, help='Path to gt file')
parser.add_argument('--animate', action='store_true', help='Yield GIF instead of PNG')
args = parser.parse_args()
from ssc.data.suncg_mapping import SUNCGMapping
import os
labels = SUNCGMapping()
gt_npz = np.load(args.gt_file)
scatter_plot_voxels(gt_npz['voxels'], labels.get_classes(), gt_npz['vox_min'], gt_npz['vox_unit'], save_path = os.getcwd() , animate = args.animate)
| 38.759124 | 170 | 0.634087 | 802 | 0.151036 | 0 | 0 | 0 | 0 | 0 | 0 | 727 | 0.136911 |
d09a191e2a3804cf26b16a157b643e61f06cbb1c | 69,906 | py | Python | __init__.py | state-of-the-art/BlendNet | 0a303e34081b820370c9528c807f276eefb122dc | [
"Apache-2.0"
]
| 66 | 2019-10-30T13:39:13.000Z | 2022-03-23T18:33:03.000Z | __init__.py | state-of-the-art/BlendNet | 0a303e34081b820370c9528c807f276eefb122dc | [
"Apache-2.0"
]
| 97 | 2019-10-18T16:48:50.000Z | 2022-02-13T18:58:45.000Z | __init__.py | state-of-the-art/BlendNet | 0a303e34081b820370c9528c807f276eefb122dc | [
"Apache-2.0"
]
| 12 | 2019-11-23T12:53:52.000Z | 2021-08-09T21:15:48.000Z | bl_info = {
'name': 'BlendNet - distributed cloud render',
'author': 'www.state-of-the-art.io',
'version': (0, 4, 0),
'warning': 'development version',
'blender': (2, 80, 0),
'location': 'Properties --> Render --> BlendNet Render',
'description': 'Allows to easy allocate resources in cloud and '
'run the cycles rendering with getting preview '
'and results',
'wiki_url': 'https://github.com/state-of-the-art/BlendNet/wiki',
'tracker_url': 'https://github.com/state-of-the-art/BlendNet/issues',
'category': 'Render',
}
if 'bpy' in locals():
import importlib
importlib.reload(BlendNet)
importlib.reload(blend_file)
else:
from . import (
BlendNet,
)
from .BlendNet import blend_file
import os
import time
import tempfile
from datetime import datetime
import bpy
from bpy.props import (
BoolProperty,
IntProperty,
StringProperty,
EnumProperty,
PointerProperty,
CollectionProperty,
)
class BlendNetAddonPreferences(bpy.types.AddonPreferences):
bl_idname = __package__
resource_provider: EnumProperty(
name = 'Provider',
description = 'Engine to provide resources for rendering',
items = BlendNet.addon.getProvidersEnumItems,
update = lambda self, context: BlendNet.addon.selectProvider(self.resource_provider),
)
blendnet_show_panel: BoolProperty(
name = 'Show BlendNet',
description = 'Show BlendNet render panel',
default = True,
)
# Advanced
blender_dist: EnumProperty(
name = 'Blender dist',
description = 'Blender distributive to use on manager/agents. '
'By default it\'s set to the current blender version and if '
'you want to change it - you will deal with the custom URL',
items = BlendNet.addon.fillAvailableBlenderDists,
update = lambda self, context: BlendNet.addon.updateBlenderDistProp(self.blender_dist),
)
blender_dist_url: StringProperty(
name = 'Blender dist URL',
description = 'URL to download the blender distributive',
default = '',
)
blender_dist_checksum: StringProperty(
name = 'Blender dist checksum',
description = 'Checksum of the distributive to validate the binary',
default = '',
)
blender_dist_custom: BoolProperty(
name = 'Custom dist URL',
description = 'Use custom url instead the automatic one',
default = False,
update = lambda self, context: BlendNet.addon.updateBlenderDistProp(),
)
session_id: StringProperty(
name = 'Session ID',
description = 'Identifier of the session and allocated resources. '
'It is used to properly find your resources in the GCP '
'project and separate your resources from the other ones. '
'Warning: Please be careful with this option and don\'t '
'change it if you don\'t know what it\'s doing',
maxlen = 12,
update = lambda self, context: BlendNet.addon.genSID(self, 'session_id'),
)
manager_instance_type: EnumProperty(
name = 'Manager size',
description = 'Selected manager instance size',
items = BlendNet.addon.fillAvailableInstanceTypesManager,
)
manager_ca_path: StringProperty(
name = 'CA certificate',
description = 'Certificate Authority certificate pem file location',
subtype = 'FILE_PATH',
default = '',
)
manager_address: StringProperty(
name = 'Address',
description = 'If you using the existing Manager service put address here '
'(it will be automatically created otherwise)',
default = '',
)
manager_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Addon with Manager service',
min = 1,
max = 65535,
default = 8443,
)
manager_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username (will be generated if empty)',
maxlen = 32,
default = 'blendnet-manager',
)
manager_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password (will be generated if empty)',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
update = lambda self, context: BlendNet.addon.hidePassword(self, 'manager_password'),
)
manager_agent_instance_type: EnumProperty(
name = 'Agent size',
description = 'Selected agent instance size',
items = BlendNet.addon.fillAvailableInstanceTypesAgent,
)
manager_agents_max: IntProperty(
name = 'Agents max',
description = 'Maximum number of agents in Manager\'s pool',
min = 1,
max = 65535,
default = 3,
)
agent_use_cheap_instance: BoolProperty(
name = 'Use cheap VM',
description = 'Use cheap instances to save money',
default = True,
)
agent_cheap_multiplier: EnumProperty(
name = 'Cheap multiplier',
description = 'Way to choose the price to get a cheap VM. '
'Some providers allows to choose the maximum price for the instance '
'and it could be calculated from the ondemand (max) price multiplied by this value.',
items = BlendNet.addon.getCheapMultiplierList,
)
agent_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Manager with Agent service',
min = 1,
max = 65535,
default = 9443,
)
agent_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username (will be generated if empty)',
maxlen = 32,
default = 'blendnet-agent',
)
agent_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password (will be generated if empty)',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
update = lambda self, context: BlendNet.addon.hidePassword(self, 'agent_password'),
)
# Hidden
show_advanced: BoolProperty(
name = 'Advanced Properties',
description = 'Show/Hide the advanced properties',
default = False,
)
manager_password_hidden: StringProperty(
subtype = 'PASSWORD',
update = lambda self, context: BlendNet.addon.genPassword(self, 'manager_password_hidden'),
)
agent_password_hidden: StringProperty(
subtype = 'PASSWORD',
update = lambda self, context: BlendNet.addon.genPassword(self, 'agent_password_hidden'),
)
def draw(self, context):
layout = self.layout
# Provider
box = layout.box()
row = box.row()
split = box.split(factor=0.8)
split.prop(self, 'resource_provider')
info = BlendNet.addon.getProviderDocs(self.resource_provider).split('\n')
for line in info:
if line.startswith('Help: '):
split.operator('wm.url_open', text='How to setup', icon='HELP').url = line.split(': ', 1)[-1]
provider_settings = BlendNet.addon.getProviderSettings()
for key, data in provider_settings.items():
path = 'provider_' + self.resource_provider + '_' + key
if not path in self.__class__.__annotations__:
print('ERROR: Unable to find provider setting:', path)
continue
if path not in self or self[path] is None:
self[path] = data.get('value')
box.prop(self, path)
messages = BlendNet.addon.getProviderMessages(self.resource_provider)
for msg in messages:
box.label(text=msg, icon='ERROR')
if not BlendNet.addon.checkProviderIsSelected():
err = BlendNet.addon.getProviderDocs(self.resource_provider).split('\n')
for line in err:
box.label(text=line.strip(), icon='ERROR')
return
if self.resource_provider != 'local':
box = box.box()
box.label(text='Collected cloud info:')
provider_info = BlendNet.addon.getProviderInfo(context)
if 'ERRORS' in provider_info:
for err in provider_info['ERRORS']:
box.label(text=err, icon='ERROR')
for key, value in provider_info.items():
if key == 'ERRORS':
continue
split = box.split(factor=0.5)
split.label(text=key, icon='DOT')
split.label(text=value)
# Advanced properties panel
advanced_icon = 'TRIA_RIGHT' if not self.show_advanced else 'TRIA_DOWN'
box = layout.box()
box.prop(self, 'show_advanced', emboss=False, icon=advanced_icon)
if self.show_advanced:
if self.resource_provider != 'local':
row = box.row()
row.prop(self, 'session_id')
row = box.row(align=True)
row.prop(self, 'blender_dist_custom', text='')
if not self.blender_dist_custom:
row.prop(self, 'blender_dist')
else:
row.prop(self, 'blender_dist_url')
box.row().prop(self, 'blender_dist_checksum')
box_box = box.box()
box_box.label(text='Manager')
if self.resource_provider != 'local':
row = box_box.row()
row.prop(self, 'manager_instance_type', text='Type')
row = box_box.row()
price = BlendNet.addon.getManagerPriceBG(self.manager_instance_type, context)
if price[0] < 0.0:
row.label(text='WARNING: Unable to find price for the type "%s": %s' % (
self.manager_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated price: ~%s/Hour (%s)' % (round(price[0], 12), price[1]))
if self.resource_provider == 'local':
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_address')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_ca_path')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_port')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_user')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'manager_password')
box_box = box.box()
box_box.label(text='Agent')
if self.resource_provider != 'local':
row = box_box.row()
row.prop(self, 'agent_use_cheap_instance')
if 'Cheap instances not available' in provider_info.get('ERRORS', []):
row.enabled = False
else:
row.prop(self, 'agent_cheap_multiplier')
row = box_box.row()
row.enabled = not BlendNet.addon.isManagerCreated()
row.prop(self, 'manager_agent_instance_type', text='Agents type')
row.prop(self, 'manager_agents_max', text='Agents max')
row = box_box.row()
price = BlendNet.addon.getAgentPriceBG(self.manager_agent_instance_type, context)
if price[0] < 0.0:
row.label(text='ERROR: Unable to find price for the type "%s": %s' % (
self.manager_agent_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated combined price: ~%s/Hour (%s)' % (
round(price[0] * self.manager_agents_max, 12), price[1]
))
min_price = BlendNet.addon.getMinimalCheapPriceBG(self.manager_agent_instance_type, context)
if min_price > 0.0:
row = box_box.row()
row.label(text='Minimal combined price: ~%s/Hour' % (
round(min_price * self.manager_agents_max, 12),
))
if price[0] <= min_price:
row = box_box.row()
row.label(text='ERROR: Selected cheap price is lower than minimal one', icon='ERROR')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_port')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_user')
row = box_box.row()
row.use_property_split = True
row.prop(self, 'agent_password')
class BlendNetSceneSettings(bpy.types.PropertyGroup):
scene_memory_req: IntProperty(
name = 'Scene RAM to render',
description = 'Required memory to render the scene in GB',
min = 0,
max = 65535,
default = 0,
)
@classmethod
def register(cls):
bpy.types.Scene.blendnet = PointerProperty(
name = 'BlendNet Settings',
description = 'BlendNet scene settings',
type = cls
)
@classmethod
def unregister(cls):
if hasattr(bpy.types.Scene, 'blendnet'):
del bpy.types.Scene.blendnet
class BlendNetManagerTask(bpy.types.PropertyGroup):
'''Class contains the manager task information'''
name: StringProperty()
create_time: StringProperty()
start_time: StringProperty()
end_time: StringProperty()
state: StringProperty()
done: StringProperty()
received: StringProperty()
class BlendNetSessionProperties(bpy.types.PropertyGroup):
manager_tasks: CollectionProperty(
name = 'Manager tasks',
description = 'Contains all the tasks that right now is available '
'on manager',
type = BlendNetManagerTask,
)
manager_tasks_idx: IntProperty(default=0)
status: StringProperty(
name = 'BlendNet status',
description = 'BlendNet is performing some operation',
default = 'idle',
)
@classmethod
def register(cls):
bpy.types.WindowManager.blendnet = PointerProperty(
name = 'BlendNet Session Properties',
description = 'Just current status of process for internal use',
type = cls,
)
@classmethod
def unregister(cls):
if hasattr(bpy.types.WindowManager, 'blendnet'):
del bpy.types.WindowManager.blendnet
class BlendNetToggleManager(bpy.types.Operator):
bl_idname = 'blendnet.togglemanager'
bl_label = ''
bl_description = 'Start/Stop manager instance'
_timer = None
_last_run = 0
@classmethod
def poll(cls, context):
return context.window_manager.blendnet.status == 'idle' or BlendNet.addon.isManagerStarted()
def invoke(self, context, event):
wm = context.window_manager
BlendNet.addon.toggleManager()
if BlendNet.addon.isManagerStarted():
self.report({'INFO'}, 'BlendNet stopping Manager instance...')
wm.blendnet.status = 'Manager stopping...'
else:
self.report({'INFO'}, 'BlendNet starting Manager instance...')
wm.blendnet.status = 'Manager starting...'
if context.area:
context.area.tag_redraw()
wm.modal_handler_add(self)
self._timer = wm.event_timer_add(5.0, window=context.window)
return {'RUNNING_MODAL'}
def modal(self, context, event):
if event.type != 'TIMER' or self._last_run + 4.5 > time.time():
return {'PASS_THROUGH'}
self._last_run = time.time()
return self.execute(context)
def execute(self, context):
wm = context.window_manager
if wm.blendnet.status == 'Manager starting...':
if not BlendNet.addon.isManagerStarted():
return {'PASS_THROUGH'}
self.report({'INFO'}, 'BlendNet Manager started')
wm.blendnet.status = 'Manager connecting...'
if context.area:
context.area.tag_redraw()
BlendNet.addon.requestManagerInfo(context)
elif wm.blendnet.status == 'Manager stopping...':
if not BlendNet.addon.isManagerStopped():
return {'PASS_THROUGH'}
if wm.blendnet.status == 'Manager connecting...':
if not BlendNet.addon.requestManagerInfo(context):
return {'PASS_THROUGH'}
self.report({'INFO'}, 'BlendNet Manager connected')
if self._timer is not None:
wm.event_timer_remove(self._timer)
wm.blendnet.status = 'idle'
if context.area:
context.area.tag_redraw()
return {'FINISHED'}
class BlendNetDestroyManager(bpy.types.Operator):
bl_idname = 'blendnet.destroymanager'
bl_label = ''
bl_description = 'Destroy manager instance'
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerStopped()
def invoke(self, context, event):
BlendNet.addon.destroyManager()
self.report({'INFO'}, 'BlendNet destroy Manager instance...')
return {'FINISHED'}
class BlendNetTaskPreviewOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskpreview'
bl_label = 'Open preview'
bl_description = 'Show the render for the currently selected task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def _findRenderResultArea(self, context):
for window in context.window_manager.windows:
if window.scene != context.scene:
continue
for area in window.screen.areas:
if area.type != 'IMAGE_EDITOR':
continue
if area.spaces.active.image.type == 'RENDER_RESULT':
return area
return None
def invoke(self, context, event):
# Show the preview of the render if not open
if not self._findRenderResultArea(context):
bpy.ops.render.view_show('INVOKE_DEFAULT')
# Save the original render engine to run render on BlendNet
original_render_engine = context.scene.render.engine
context.scene.render.engine = __package__
# Start the render process
self.result = bpy.ops.render.render('INVOKE_DEFAULT')
# Restore the original scene engine
time.sleep(1.0)
if context.scene.render.engine == __package__:
context.scene.render.engine = original_render_engine
return {'FINISHED'}
class BlendNetRunTaskOperation(bpy.types.Operator):
bl_idname = 'blendnet.runtask'
bl_label = 'Run Task'
bl_description = 'Run Manager task using BlendNet resources'
is_animation: BoolProperty(
name = 'Animation',
description = 'Runs animation rendering instead of just a still image rendering',
default = False
)
_timer = None
_project_file: None # temp blend project file to ensure it will not be changed
_frame: 0 # current/start frame depends on animation
_frame_to: 0 # end frame for animation
_frame_orig: 0 # to restore the current frame after animation processing
_task_name: None # store task name to retry later
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerActive()
def _findRenderResultArea(self, context):
for window in context.window_manager.windows:
if window.scene != context.scene:
continue
for area in window.screen.areas:
if area.type != 'IMAGE_EDITOR':
continue
if area.spaces.active.image.type == 'RENDER_RESULT':
return area
def init(self, context):
'''Initializes the execution'''
if not bpy.data.filepath:
self.report({'ERROR'}, 'Unable to render not saved project. Please save it somewhere.')
return {'CANCELLED'}
# Fix and verify the blendfile dependencies
bads = blend_file.getDependencies(bpy.path.abspath('//'), os.path.abspath(''))[1]
if bads:
self.report({'ERROR'}, 'Found some bad dependencies - please fix them before run: %s' % (bads,))
return {'CANCELLED'}
# Saving project to the same directory
try:
self._project_file = bpy.data.filepath + '_blendnet.blend'
bpy.ops.wm.save_as_mainfile(
filepath = self._project_file,
check_existing = False,
compress = True,
copy = True,
)
except Exception as e:
self.report({'ERROR'}, 'Unable to save the "_blendnet.blend" project file: %s' % (e,))
return {'CANCELLED'}
if self.is_animation:
self._frame = context.scene.frame_start
self._frame_to = context.scene.frame_end
self._frame_orig = context.scene.frame_current
else:
self._frame = context.scene.frame_current
self._task_name = None
context.window_manager.modal_handler_add(self)
self._timer = context.window_manager.event_timer_add(0.1, window=context.window)
return {'RUNNING_MODAL'}
def invoke(self, context, event):
return self.init(context)
def modal(self, context, event):
if event.type != 'TIMER':
return {'PASS_THROUGH'}
# Waiting for manager
if not BlendNet.addon.isManagerActive():
return {'PASS_THROUGH'}
return self.execute(context)
def execute(self, context):
scene = context.scene
wait = False
if not hasattr(self, '_frame'):
wait = True # The execute is running directly, so run in fg
if 'CANCELLED' in self.init(context):
self.report({'ERROR'}, 'Unable to init task preparation')
return {'CANCELLED'}
scene.frame_current = self._frame
fname = bpy.path.basename(bpy.data.filepath)
if not self._task_name:
# If the operation is not completed - reuse the same task name
d = datetime.utcnow().strftime('%y%m%d%H%M')
self._task_name = '%s%s-%d-%s' % (
BlendNet.addon.getTaskProjectPrefix(),
d, scene.frame_current,
BlendNet.addon.genRandomString(3)
)
print('DEBUG: Uploading task "%s" to the manager' % self._task_name)
# Prepare list of files need to be uploaded
deps, bads = blend_file.getDependencies(bpy.path.abspath('//'), os.path.abspath(''))
if bads:
self.report({'ERROR'}, 'Found some bad dependencies - please fix them before run: %s' % (bads,))
return {'CANCELLED'}
deps_map = dict([ (rel, bpy.path.abspath(rel)) for rel in deps ])
deps_map['//'+fname] = self._project_file
# Run the dependencies upload background process
BlendNet.addon.managerTaskUploadFiles(self._task_name, deps_map)
# Slow down the check process
if self._timer is not None:
context.window_manager.event_timer_remove(self._timer)
self._timer = context.window_manager.event_timer_add(3.0, window=context.window)
status = BlendNet.addon.managerTaskUploadFilesStatus()
if wait:
for retry in range(1, 10):
status = BlendNet.addon.managerTaskUploadFilesStatus()
if not status:
break
time.sleep(1.0)
if status:
self.report({'INFO'}, 'Uploading process for task %s: %s' % (self._task_name, status))
return {'PASS_THROUGH'}
# Configuring the task
print('INFO: Configuring task "%s"' % self._task_name)
self.report({'INFO'}, 'Configuring task "%s"' % (self._task_name,))
samples = None
if hasattr(scene.cycles, 'progressive'):
# For blender < 3.0.0
if scene.cycles.progressive == 'PATH':
samples = scene.cycles.samples
elif scene.cycles.progressive == 'BRANCHED_PATH':
samples = scene.cycles.aa_samples
else:
samples = scene.cycles.samples
if hasattr(scene.cycles, 'use_square_samples'):
# For blender < 3.0.0
# Addon need to pass the actual samples number to the manager
if scene.cycles.use_square_samples:
samples *= samples
# Where the compose result will be stored on the Addon side
compose_filepath = scene.render.frame_path()
if scene.render.filepath.startswith('//'):
# It's relative to blend project path
compose_filepath = bpy.path.relpath(compose_filepath)
cfg = {
'samples': samples,
'frame': scene.frame_current,
'project': fname,
'use_compositing_nodes': scene.render.use_compositing,
'compose_filepath': compose_filepath,
'project_path': bpy.path.abspath('//'), # To resolve the project parent paths like `//../..`
'cwd_path': os.path.abspath(''), # Current working directory to resolve relative paths like `../dir/file.txt`
}
if not BlendNet.addon.managerTaskConfig(self._task_name, cfg):
self.report({'WARNING'}, 'Unable to config the task "%s", let\'s retry...' % (self._task_name,))
return {'PASS_THROUGH'}
# Running the task
self.report({'INFO'}, 'Running task "%s"' % self._task_name)
if not BlendNet.addon.managerTaskRun(self._task_name):
self.report({'WARNING'}, 'Unable to start the task "%s", let\'s retry...' % (self._task_name,))
return {'PASS_THROUGH'}
self.report({'INFO'}, 'Task "%s" marked as ready to start' % (self._task_name,))
# Ok, task is started - we can clean the name
self._task_name = None
if self.is_animation:
if self._frame < self._frame_to:
# Not all the frames are processed
self._frame += 1
return {'PASS_THROUGH'}
# Restore the original current frame
scene.frame_current = self._frame_orig
# Removing no more required temp blend file
os.remove(self._project_file)
if self._timer is not None:
context.window_manager.event_timer_remove(self._timer)
return {'FINISHED'}
class TASKS_UL_list(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
self.use_filter_sort_alpha = True
if self.layout_type in {'DEFAULT', 'COMPACT'}:
split = layout.split(factor=0.7)
split.label(text=item.name)
split.label(text=('%s:%s' % (item.state[0], item.done)) if item.done and item.state != 'COMPLETED' else item.state)
elif self.layout_type in {'GRID'}:
pass
class BlendNetGetNodeLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getnodelog'
bl_label = 'Get Node Log'
bl_description = 'Show the node (instance) log data'
node_id: StringProperty(
name = 'Node ID',
description = 'ID of the node/instance to get the log',
default = ''
)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
data = BlendNet.addon.getNodeLog(self.node_id)
if not data:
self.report({'WARNING'}, 'No log data retreived for ' + self.node_id)
return {'CANCELLED'}
if data == 'NOT IMPLEMENTED':
self.report({'WARNING'}, 'Not implemented for the current provider')
return {'CANCELLED'}
prefix = self.node_id
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='''Don't forget to unlink the file if you '''
'''don't want it to stay in blend file.''')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for'+prefix, icon='INFO')
return {'FINISHED'}
class BlendNetGetAddonLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getaddonlog'
bl_label = 'Get BlendNet Addon Log'
bl_description = 'Show the running BlendNet addon log information'
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
out = BlendNet.addon.getAddonLog()
prefix = 'addon'
if not out:
self.report({'ERROR'}, 'No log data found for ' + prefix)
return {'CANCELLED'}
data = []
line = ''
for t, l in out.items():
if not l.endswith('\n'):
line += l
continue
time_str = datetime.fromtimestamp(round(float(t), 3)).strftime('%y.%m.%d %H:%M:%S.%f')
data.append(time_str + '\t' + line + l)
line = ''
if line:
data.append('{not completed line}\t' + line)
data = ''.join(data)
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for ' + prefix, icon='INFO')
return {'FINISHED'}
class BlendNetGetServiceLogOperation(bpy.types.Operator):
bl_idname = 'blendnet.getservicelog'
bl_label = 'Get Service Log'
bl_description = 'Show the service (daemon) log data'
agent_name: StringProperty(
name = 'Name of Agent',
description = 'Name of Agent (or Manager by default) to get the log from',
default = ''
)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
out = {}
if self.agent_name:
out = BlendNet.addon.agentGetLog(self.agent_name)
else:
out = BlendNet.addon.managerGetLog()
prefix = self.agent_name if self.agent_name else BlendNet.addon.getResources(context).get('manager', {}).get('name')
if not out:
self.report({'ERROR'}, 'No log data retreived for ' + prefix)
return {'CANCELLED'}
data = []
line = ''
for t, l in out.items():
if not l.endswith('\n'):
line += l
continue
time_str = datetime.fromtimestamp(round(float(t), 3)).strftime('%y.%m.%d %H:%M:%S.%f')
data.append(time_str + '\t' + line + l)
line = ''
if line:
data.append('{not completed line}\t' + line)
data = ''.join(data)
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Log for' + prefix, icon='INFO')
return {'FINISHED'}
class BlendNetTaskInfoOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskinfo'
bl_label = 'Task info'
bl_description = 'Show the current task info panel'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def invoke(self, context, event):
wm = context.window_manager
def drawPopup(self, context):
layout = self.layout
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
data = BlendNet.addon.managerTaskStatus(task_name)
if not data:
return
keys = BlendNet.addon.naturalSort(data.keys())
for key in keys:
if key == 'result':
layout.label(text='%s:' % (key,))
for k in data[key]:
layout.label(text=' %s: %s' % (k, data[key][k]))
elif key == 'state_error_info':
layout.label(text='%s:' % (key,), icon='ERROR')
for it in data[key]:
if isinstance(it, dict):
for k, v in it.items():
layout.label(text=' %s: %s' % (k, v))
else:
layout.label(text=' ' + str(it))
else:
layout.label(text='%s: %s' % (key, data[key]))
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
wm.popup_menu(drawPopup, title='Task info for "%s"' % task_name, icon='INFO')
return {'FINISHED'}
class BlendNetTaskMessagesOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskmessages'
bl_label = 'Show task messages'
bl_description = 'Show the task execution messages'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state not in {'CREATED', 'PENDING'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
out = BlendNet.addon.managerTaskMessages(task_name)
if not out:
self.report({'ERROR'}, 'No task messages found for "%s"' % (task_name,))
return {'CANCELLED'}
data = []
keys = BlendNet.addon.naturalSort(out.keys())
for key in keys:
data.append(key)
if not out[key]:
continue
for line in out[key]:
data.append(' ' + line)
data = '\n'.join(data)
prefix = task_name + 'messages'
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Task messages for "%s"' % (task_name,), icon='TEXT')
return {'FINISHED'}
class BlendNetTaskDetailsOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskdetails'
bl_label = 'Show task details'
bl_description = 'Show the task execution details'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state not in {'CREATED', 'PENDING'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
out = BlendNet.addon.managerTaskDetails(task_name)
if not out:
self.report({'ERROR'}, 'No task details found for "%s"' % (task_name,))
return {'CANCELLED'}
data = []
keys = BlendNet.addon.naturalSort(out.keys())
for key in keys:
data.append(key)
if not out[key]:
continue
for line in out[key]:
data.append(' ' + str(line))
data = '\n'.join(data)
prefix = task_name + 'details'
def drawPopup(self, context):
layout = self.layout
if BlendNet.addon.showLogWindow(prefix, data):
layout.label(text='Don\'t forget to unlink the file if you don\'t want it to stay in blend file.')
else:
layout.label(text='Unable to show the log window', icon='ERROR')
wm.popup_menu(drawPopup, title='Task details for "%s"' % (task_name,), icon='TEXT')
return {'FINISHED'}
class BlendNetTaskRunOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskrun'
bl_label = 'Task run'
bl_description = 'Start the stopped or created task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state in {'CREATED', 'STOPPED'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
BlendNet.addon.managerTaskRun(task_name)
return {'FINISHED'}
class BlendNetTaskDownloadOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskdownload'
bl_label = 'Download task result'
bl_description = 'Download the completed task result'
result: StringProperty()
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
# Allow to download results even for error state
return task_state in {'COMPLETED', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
# If the result is downloaded manually - use the current project output directory
out_dir = os.path.dirname(bpy.context.scene.render.frame_path())
dir_path = os.path.join(out_dir, self.result)
result = BlendNet.addon.managerDownloadTaskResult(task_name, self.result, dir_path)
if result is None:
self.report({'WARNING'}, 'Unable to download the final result for %s, please retry later ' % (task_name,))
return {'CANCELLED'}
if not result:
self.report({'INFO'}, 'Downloading the final result for %s... ' % (task_name,))
return {'FINISHED'}
self.report({'INFO'}, 'The file is already downloaded and seems the same for %s... ' % (task_name,))
return {'CANCELLED'}
class BlendNetTaskStopOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskstop'
bl_label = 'Task stop'
bl_description = 'Stop the pending, running or error task'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
task_state = bn.manager_tasks[bn.manager_tasks_idx].state
return task_state in {'PENDING', 'RUNNING', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
BlendNet.addon.managerTaskStop(task_name)
return {'FINISHED'}
class BlendNetTasksStopStartedOperation(bpy.types.Operator):
bl_idname = 'blendnet.tasksstopstarted'
bl_label = 'Stop all started tasks'
bl_description = 'Stop all the pending or running tasks'
bl_options = {'REGISTER', 'INTERNAL'}
tasks: CollectionProperty(type=BlendNetManagerTask)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
self.tasks.clear()
for task in wm.blendnet.manager_tasks:
if task.state in {'PENDING', 'RUNNING'}:
self.tasks.add().name = task.name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Stopping %s tasks' % len(self.tasks))
for task in self.tasks:
print('INFO: Stopping task "%s"' % task.name)
BlendNet.addon.managerTaskStop(task.name)
self.tasks.clear()
return {'FINISHED'}
class BlendNetTaskRemoveOperation(bpy.types.Operator):
bl_idname = 'blendnet.taskremove'
bl_label = 'Remove selected task'
bl_description = 'Remove the task from the tasks list'
bl_options = {'REGISTER', 'INTERNAL'}
task_name: StringProperty()
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
if len(bn.manager_tasks) <= bn.manager_tasks_idx:
return False
return bn.manager_tasks[bn.manager_tasks_idx].state in {'CREATED', 'STOPPED', 'COMPLETED', 'ERROR'}
def invoke(self, context, event):
wm = context.window_manager
self.task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing task "%s"' % self.task_name)
BlendNet.addon.managerTaskRemove(self.task_name)
return {'FINISHED'}
class BlendNetAgentRemoveOperation(bpy.types.Operator):
bl_idname = 'blendnet.agentremove'
bl_label = 'Remove the agent'
bl_description = 'Remove the agent from the agents pool or terminate in case of cloud provider'
bl_options = {'REGISTER', 'INTERNAL'}
agent_name: StringProperty()
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing agent "%s"' % self.agent_name)
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider == 'local':
if not BlendNet.addon.managerAgentRemove(self.agent_name):
self.report({'WARNING'}, 'Unable to remove agent "%s"' % (self.agent_name,))
return {'CANCELLED'}
self.report({'INFO'}, 'Removed agent "%s"' % (self.agent_name,))
else:
BlendNet.addon.destroyAgent(self.agent_name)
self.report({'INFO'}, 'BlendNet destroy Agent instance ' + self.agent_name)
return {'FINISHED'}
class BlendNetAgentCreateOperation(bpy.types.Operator):
bl_idname = 'blendnet.agentcreate'
bl_label = 'Agent create'
bl_description = 'Register new agent in the manager'
agent_name: StringProperty(
name = 'Name',
description = 'Name of Agent to create',
default = ''
)
agent_address: StringProperty(
name = 'Address',
description = 'IP or domain name of the agent',
default = ''
)
agent_port: IntProperty(
name = 'Port',
description = 'TLS tcp port to communicate Manager with Agent service',
min = 1,
max = 65535,
default = 9443,
)
agent_user: StringProperty(
name = 'User',
description = 'HTTP Basic Auth username',
maxlen = 32,
default = '',
)
agent_password: StringProperty(
name = 'Password',
description = 'HTTP Basic Auth password',
subtype = 'PASSWORD',
maxlen = 128,
default = '',
)
@classmethod
def poll(cls, context):
return BlendNet.addon.isManagerActive()
def invoke(self, context, event):
wm = context.window_manager
prefs = bpy.context.preferences.addons[__package__].preferences
self.agent_port = prefs.agent_port
self.agent_user = prefs.agent_user
self.agent_password = prefs.agent_password_hidden
return wm.invoke_props_dialog(self)
def execute(self, context):
if not self.agent_name:
self.report({'ERROR'}, 'No agent name is specified')
return {'PASS_THROUGH'}
if not self.agent_address:
self.report({'ERROR'}, 'No agent address is specified')
return {'PASS_THROUGH'}
cfg = {
'address': self.agent_address,
'port': self.agent_port,
'auth_user': self.agent_user,
'auth_password': self.agent_password,
}
if not BlendNet.addon.managerAgentCreate(self.agent_name, cfg):
self.report({'WARNING'}, 'Unable to create agent "%s"' % (self.agent_name,))
return {'PASS_THROUGH'}
self.report({'INFO'}, 'Created agent "%s" (%s:%s)' % (
self.agent_name, self.agent_address, self.agent_port
))
return {'FINISHED'}
class BlendNetTasksRemoveEndedOperation(bpy.types.Operator):
bl_idname = 'blendnet.tasksremoveended'
bl_label = 'Remove all ended tasks'
bl_description = 'Remove all the stopped or completed tasks'
bl_options = {'REGISTER', 'INTERNAL'}
tasks: CollectionProperty(type=BlendNetManagerTask)
@classmethod
def poll(cls, context):
return True
def invoke(self, context, event):
wm = context.window_manager
self.tasks.clear()
for task in wm.blendnet.manager_tasks:
if task.state in {'STOPPED', 'COMPLETED'}:
self.tasks.add().name = task.name
return wm.invoke_confirm(self, event)
def execute(self, context):
self.report({'INFO'}, 'Removing %s tasks' % len(self.tasks))
for task in self.tasks:
print('INFO: Removing task "%s"' % task.name)
BlendNet.addon.managerTaskRemove(task.name)
self.tasks.clear()
return {'FINISHED'}
class BlendNetTaskMenu(bpy.types.Menu):
bl_idname = 'RENDER_MT_blendnet_task_menu'
bl_label = 'Task Menu'
bl_description = 'Allow to operate on tasks in the list'
@classmethod
def poll(cls, context):
bn = context.window_manager.blendnet
return len(bn.manager_tasks) > bn.manager_tasks_idx
def draw(self, context):
layout = self.layout
wm = context.window_manager
if not wm.blendnet.manager_tasks:
layout.label(text='No tasks in the list')
return
if len(wm.blendnet.manager_tasks) <= wm.blendnet.manager_tasks_idx:
# No such item in the list
return
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
layout.label(text='Task "%s":' % task_name)
layout.operator('blendnet.taskinfo', icon='INFO')
layout.operator('blendnet.taskmessages', icon='TEXT')
layout.operator('blendnet.taskdetails', icon='TEXT')
layout.operator('blendnet.taskdownload', text='Download render', icon='DOWNARROW_HLT').result = 'render'
layout.operator('blendnet.taskdownload', text='Download compose', icon='DOWNARROW_HLT').result = 'compose'
layout.operator('blendnet.taskrun', icon='PLAY')
layout.operator('blendnet.taskremove', icon='TRASH')
layout.operator('blendnet.taskstop', icon='PAUSE')
layout.label(text='All tasks actions:')
layout.operator('blendnet.tasksstopstarted', text='Stop all started tasks', icon='PAUSE')
layout.operator('blendnet.tasksremoveended', text='Remove all ended tasks', icon='TRASH')
class BlendNetRenderPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_render'
bl_label = 'BlendNet'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'HIDE_HEADER'}
@classmethod
def poll(cls, context):
# Allow to see the tasks if selected blendnet and support cycles
return context.scene.render.engine in ('CYCLES', __package__)
def draw(self, context):
layout = self.layout
wm = context.window_manager
bn = context.scene.blendnet
prefs = context.preferences.addons[__package__].preferences
box = layout.box()
row = box.split(factor=0.5)
split = row.split(factor=0.1)
split.prop(prefs, 'blendnet_show_panel', icon_only=True)
split.label(text='BlendNet Render (%s)' % (prefs.resource_provider,))
split = row.split(factor=0.9)
split.label(text=context.window_manager.blendnet.status)
split.operator('blendnet.getaddonlog', text='', icon='TEXT')
if not prefs.blendnet_show_panel:
return
row = box.row()
row.use_property_split = True
row.use_property_decorate = False # No prop animation
row.prop(bn, 'scene_memory_req', text='Render RAM (GB)')
if not BlendNet.addon.checkProviderIsSelected():
box.label(text='ERROR: Provider init failed, check addon settings', icon='ERROR')
return
if not BlendNet.addon.checkAgentMemIsEnough():
box.label(text='WARN: Agent does not have enough memory to render the scene', icon='ERROR')
if not prefs.agent_use_cheap_instance:
box.label(text='WARN: No cheap VMs available, check addon settings', icon='ERROR')
if context.scene.render.engine != __package__:
row = box.row(align=True)
if BlendNet.addon.isManagerStarted():
row.operator('blendnet.runtask', text='Run Image Task', icon='RENDER_STILL').is_animation = False
row.operator('blendnet.runtask', text='Run Animation Tasks', icon='RENDER_ANIMATION').is_animation = True
elif prefs.resource_provider != 'local':
row.operator('blendnet.togglemanager', text='Run Manager instance', icon='ADD')
elif prefs.resource_provider == 'local':
split = row.split(factor=0.3)
split.label(text='Using Manager')
split.label(text='%s:%s' % (prefs.manager_address, prefs.manager_port))
if BlendNet.addon.isManagerActive():
box.template_list('TASKS_UL_list', '', wm.blendnet, 'manager_tasks', wm.blendnet, 'manager_tasks_idx', rows=1)
split = box.split(factor=0.8)
split.operator('blendnet.taskpreview', text='Task Preview', icon='RENDER_RESULT')
split.menu('RENDER_MT_blendnet_task_menu', text='Actions')
class BlendNetManagerPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_manager'
bl_parent_id = 'RENDER_PT_blendnet_render'
bl_label = ' '
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return context.preferences.addons[__package__].preferences.blendnet_show_panel and BlendNet.addon.checkProviderIsSelected()
def draw_header(self, context):
layout = self.layout
layout.label(text='Manager')
status = BlendNet.addon.getManagerStatus()
layout.label(text=status[0], icon=status[1])
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
layout.operator('blendnet.togglemanager', icon='ADD' if not BlendNet.addon.isManagerStarted() else 'X')
layout.operator('blendnet.destroymanager', icon='TRASH')
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No prop animation
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
row = layout.row()
row.enabled = not BlendNet.addon.isManagerCreated()
row.prop(prefs, 'manager_instance_type', text='Type')
price = BlendNet.addon.getManagerPriceBG(prefs.manager_instance_type, context)
row = layout.row()
if price[0] < 0.0:
row.label(text='WARNING: Unable to find price for the type "%s": %s' % (
prefs.manager_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated price: ~%s/Hour (%s)' % (round(price[0], 8), price[1]))
if prefs.resource_provider == 'local':
split = layout.split(factor=0.3)
split.label(text='Address')
split.label(text='%s:%s' % (prefs.manager_address, prefs.manager_port))
row = layout.row()
manager_info = BlendNet.addon.getResources(context).get('manager')
col = row.column()
col.enabled = BlendNet.addon.isManagerActive()
col.operator('blendnet.getservicelog', text='Service Log', icon='TEXT').agent_name = ''
col = row.column()
col.enabled = BlendNet.addon.isManagerStarted()
op = col.operator('blendnet.getnodelog', text='Node Log', icon='TEXT')
op.node_id = manager_info.get('id', '') if manager_info else ''
if manager_info:
layout.label(text='Manager instance:')
box = layout.box()
for key, value in manager_info.items():
split = box.split(factor=0.3)
split.label(text=key)
split.label(text=str(value))
if BlendNet.addon.isManagerActive():
info = BlendNet.addon.requestManagerInfo(context)
if info:
layout.label(text='Manager info:')
box = layout.box()
blender_version = info.get('blender', {}).get('version_string')
if blender_version:
split = box.split(factor=0.3)
split.label(text='blender')
split.label(text=blender_version)
for key, value in info.get('platform', {}).items():
split = box.split(factor=0.3)
split.label(text=key)
split.label(text=str(value))
class BlendNetAgentsPanel(bpy.types.Panel):
bl_idname = 'RENDER_PT_blendnet_agents'
bl_parent_id = 'RENDER_PT_blendnet_render'
bl_label = ' '
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'render'
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return context.preferences.addons[__package__].preferences.blendnet_show_panel and BlendNet.addon.checkProviderIsSelected()
def draw_header(self, context):
layout = self.layout
layout.label(text='Agents (%d)' % BlendNet.addon.getStartedAgentsNumber(context))
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider == 'local':
layout.operator('blendnet.agentcreate', icon='ADD', text='')
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False # No prop animation
prefs = bpy.context.preferences.addons[__package__].preferences
if prefs.resource_provider != 'local':
row = layout.row()
row.prop(prefs, 'manager_agent_instance_type', text='Agents type')
row.enabled = not BlendNet.addon.isManagerStarted()
row = layout.row()
row.prop(prefs, 'manager_agents_max', text='Agents max')
row.enabled = not BlendNet.addon.isManagerStarted()
row = layout.row()
price = BlendNet.addon.getAgentPriceBG(prefs.manager_agent_instance_type, context)
if price[0] < 0.0:
row.label(text='ERROR: Unable to find price for the type "%s": %s' % (
prefs.manager_agent_instance_type, price[1]
), icon='ERROR')
else:
row.label(text='Calculated combined price: ~%s/Hour (%s)' % (
round(price[0] * prefs.manager_agents_max, 8), price[1]
))
min_price = BlendNet.addon.getMinimalCheapPriceBG(prefs.manager_agent_instance_type, context)
if min_price > 0.0:
row = layout.row()
row.label(text='Minimal combined price: ~%s/Hour' % (
round(min_price * prefs.manager_agents_max, 8),
))
if price[0] <= min_price:
row = layout.row()
row.label(text='ERROR: Selected cheap price is lower than minimal one', icon='ERROR')
agents = BlendNet.addon.getResources(context).get('agents', {})
if agents:
box = layout.box()
for inst_name in sorted(agents.keys()):
info = agents[inst_name]
split = box.split(factor=0.8)
split.label(text=info.get('name'))
row = split.row()
row.enabled = BlendNet.addon.isManagerActive()
# The Agent status
if info.get('error'):
row.label(icon='ERROR') # You need to check logs
if info.get('active'):
row.label(icon='CHECKMARK') # Agent is active
elif info.get('started'):
row.label(icon='REC') # Node is started, but Agent is initializing
elif info.get('stopped'):
row.label(icon='PAUSE') # Node is stopped
else:
row.label(icon='X') # Node is terminated or unknown state
row.enabled = bool(info.get('started') or info.get('stopped')) or prefs.resource_provider == 'local'
if info.get('active'):
row.operator('blendnet.getservicelog', text='', icon='TEXT').agent_name = info.get('name', '')
else:
col = row.column()
col.operator('blendnet.getnodelog', text='', icon='TEXT').node_id = info.get('id', '')
col.enabled = bool(info.get('started'))
row.operator('blendnet.agentremove', icon='TRASH', text='').agent_name = info.get('name', '')
class BlendNetRenderEngine(bpy.types.RenderEngine):
'''Continuous render engine allows to switch between the tasks'''
bl_idname = __package__
bl_label = "BlendNet (don't use as a primary engine)"
bl_use_postprocess = True
bl_use_preview = False
def __init__(self):
self._prev_status = None
self._prev_message = None
print('DEBUG: Init BlendNet render')
def __del__(self):
print('DEBUG: Delete BlendNet render')
def updateStats(self, status = None, message = None):
'''To update the status only if something is changed and print into console'''
status = status or self._prev_status or ''
message = message or self._prev_message or ''
self.update_stats(status, message)
if self._prev_status != status or self._prev_message != message:
print('INFO: Render status: %s, %s' % (status, message))
self._prev_status = status
self._prev_message = message
def secToTime(self, sec):
h = sec // 3600
m = (sec % 3600) // 60
out = str((sec % 3600) % 60)+'s'
if h or m:
out = str(m)+'m'+out
if h:
out = str(h)+'h'+out
return out
def render(self, depsgraph):
scene = depsgraph.scene
wm = bpy.context.window_manager
scale = scene.render.resolution_percentage / 100.0
self.size_x = int(scene.render.resolution_x * scale)
self.size_y = int(scene.render.resolution_y * scale)
rendering = True
prev_status = {}
prev_name = ''
loaded_final_render = False
temp_dir = tempfile.TemporaryDirectory(prefix='blendnet-preview_')
result = self.begin_result(0, 0, self.size_x, self.size_y)
while rendering:
time.sleep(1.0)
if self.test_break():
# TODO: render cancelled
self.updateStats(None, 'Cancelling...')
rendering = False
if len(wm.blendnet.manager_tasks) < wm.blendnet.manager_tasks_idx+1:
self.updateStats('Please select the task in BlendNet manager tasks list')
continue
task_name = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].name
if task_name != prev_name:
self.update_result(result)
prev_name = task_name
loaded_final_render = False
status = BlendNet.addon.managerTaskStatus(task_name)
if not status:
continue
self.updateStats(None, '%s: %s' % (task_name, status.get('state')))
if status.get('state') == 'RUNNING':
remaining = None
if status.get('remaining'):
remaining = self.secToTime(status.get('remaining'))
self.updateStats('Rendered samples: %s/%s | Remaining: %s' % (
status.get('samples_done'), status.get('samples'),
remaining,
))
update_render = None
if status.get('state') == 'COMPLETED':
if not loaded_final_render:
total_time = self.secToTime((status.get('end_time') or 0) - (status.get('start_time_actual') or 0))
out_file = wm.blendnet.manager_tasks[wm.blendnet.manager_tasks_idx].received
if out_file == 'skipped':
# File was skipped by the downloader, so download it to temp dir
out_file = BlendNet.addon.managerDownloadTaskResult(task_name, 'compose', temp_dir.name)
if out_file and os.path.isfile(out_file):
self.updateStats('Got the final result: %s | Task render time: %s' % (out_file, total_time))
update_render = out_file
loaded_final_render = True
else:
# File is going to be downloaded by BlendNet.addon.updateManagerTasks() soon
self.updateStats('%s | Task render time: %s' % (out_file, total_time))
elif status.get('result', {}).get('preview') != prev_status.get('result', {}).get('preview'):
out_file = BlendNet.addon.managerDownloadTaskResult(task_name, 'preview', temp_dir.name)
if out_file and os.path.isfile(out_file):
update_render = out_file
else:
# It's downloading on background, so not store it right now
status['result']['preview'] = prev_status.get('result', {}).get('preview')
if update_render:
if os.path.isfile(update_render):
try:
result.layers[0].load_from_file(update_render)
print('DEBUG: Loaded preview layer:', update_render)
except Exception as e:
print('DEBUG: Unable to load the preview layer:', e)
result.load_from_file(update_render)
print('DEBUG: Loaded render result file:', update_render)
else:
print('ERROR: Unable to load not existing result file "%s"' % (update_render,))
self.update_result(result)
prev_status = status
self.update_progress(status.get('samples_done')/status.get('samples', 1))
self.end_result(result)
def loadProvidersSettings():
'''Get the available providers settings to set and load them during registration of the class'''
all_settings = BlendNet.addon.getProvidersSettings()
for provider, provider_settings in all_settings.items():
for key, data in provider_settings.items():
path = 'provider_' + provider + '_' + key
print('DEBUG: registering provider config:', path)
if data.get('type') in ('string', 'path'):
BlendNetAddonPreferences.__annotations__[path] = StringProperty(
name = data.get('name'),
description = data.get('description'),
subtype = 'FILE_PATH' if data['type'] == 'path' else 'NONE',
update = BlendNet.addon.updateProviderSettings,
)
elif data.get('type') == 'choice':
BlendNetAddonPreferences.__annotations__[path] = EnumProperty(
name = data.get('name'),
description = data.get('description'),
items = data.get('values'),
update = BlendNet.addon.updateProviderSettings,
)
# Additional field to store string value (otherwise it's hard on init when
# value of enum is integer and has no items to choose from)
BlendNetAddonPreferences.__annotations__[path+'_value'] = StringProperty(
name = data.get('name'),
description = data.get('description'),
)
else:
print('ERROR: Unknown provider "%s" setting "%s" type: %s' % (provider, key, data.get('type')))
def initPreferences():
'''Will init the preferences with defaults'''
prefs = bpy.context.preferences.addons[__package__].preferences
# Set defaults for preferences
# Update resource_provider anyway to set the addon var
prefs.resource_provider = prefs.resource_provider or BlendNet.addon.getAddonDefaultProvider()
# Since default for property will be regenerated every restart
# we generate new session id if the current one is empty
if prefs.session_id == '':
prefs.session_id = ''
if prefs.manager_password_hidden == '':
prefs.manager_password_hidden = ''
if prefs.agent_password_hidden == '':
prefs.agent_password_hidden = ''
BlendNet.addon.fillAvailableBlenderDists()
# Getting provider info to make sure all the settings are ok
# for current provider configuration
BlendNet.addon.getProviderInfo()
def register():
BlendNet.addon.initAddonLog()
BlendNet.providers.loadProviders()
loadProvidersSettings()
bpy.utils.register_class(BlendNetAddonPreferences)
initPreferences()
bpy.utils.register_class(BlendNetSceneSettings)
bpy.utils.register_class(BlendNetManagerTask)
bpy.utils.register_class(TASKS_UL_list)
bpy.utils.register_class(BlendNetSessionProperties)
bpy.utils.register_class(BlendNetRenderEngine)
bpy.utils.register_class(BlendNetRunTaskOperation)
bpy.utils.register_class(BlendNetTaskPreviewOperation)
bpy.utils.register_class(BlendNetTaskInfoOperation)
bpy.utils.register_class(BlendNetTaskMessagesOperation)
bpy.utils.register_class(BlendNetTaskDetailsOperation)
bpy.utils.register_class(BlendNetTaskDownloadOperation)
bpy.utils.register_class(BlendNetTaskRunOperation)
bpy.utils.register_class(BlendNetTaskStopOperation)
bpy.utils.register_class(BlendNetTasksStopStartedOperation)
bpy.utils.register_class(BlendNetTaskRemoveOperation)
bpy.utils.register_class(BlendNetTasksRemoveEndedOperation)
bpy.utils.register_class(BlendNetAgentRemoveOperation)
bpy.utils.register_class(BlendNetAgentCreateOperation)
bpy.utils.register_class(BlendNetTaskMenu)
bpy.utils.register_class(BlendNetGetServiceLogOperation)
bpy.utils.register_class(BlendNetGetAddonLogOperation)
bpy.utils.register_class(BlendNetGetNodeLogOperation)
bpy.utils.register_class(BlendNetRenderPanel)
bpy.utils.register_class(BlendNetToggleManager)
bpy.utils.register_class(BlendNetDestroyManager)
bpy.utils.register_class(BlendNetManagerPanel)
bpy.utils.register_class(BlendNetAgentsPanel)
def unregister():
bpy.utils.unregister_class(BlendNetAgentsPanel)
bpy.utils.unregister_class(BlendNetManagerPanel)
bpy.utils.unregister_class(BlendNetToggleManager)
bpy.utils.unregister_class(BlendNetDestroyManager)
bpy.utils.unregister_class(BlendNetRenderPanel)
bpy.utils.unregister_class(BlendNetGetNodeLogOperation)
bpy.utils.unregister_class(BlendNetGetAddonLogOperation)
bpy.utils.unregister_class(BlendNetGetServiceLogOperation)
bpy.utils.unregister_class(BlendNetTaskMenu)
bpy.utils.unregister_class(BlendNetTaskInfoOperation)
bpy.utils.unregister_class(BlendNetAgentCreateOperation)
bpy.utils.unregister_class(BlendNetAgentRemoveOperation)
bpy.utils.unregister_class(BlendNetTasksRemoveEndedOperation)
bpy.utils.unregister_class(BlendNetTaskRemoveOperation)
bpy.utils.unregister_class(BlendNetTasksStopStartedOperation)
bpy.utils.unregister_class(BlendNetTaskStopOperation)
bpy.utils.unregister_class(BlendNetTaskRunOperation)
bpy.utils.unregister_class(BlendNetTaskDownloadOperation)
bpy.utils.unregister_class(BlendNetTaskDetailsOperation)
bpy.utils.unregister_class(BlendNetTaskMessagesOperation)
bpy.utils.unregister_class(BlendNetTaskPreviewOperation)
bpy.utils.unregister_class(BlendNetRunTaskOperation)
bpy.utils.unregister_class(BlendNetRenderEngine)
bpy.utils.unregister_class(BlendNetSessionProperties)
bpy.utils.unregister_class(TASKS_UL_list)
bpy.utils.unregister_class(BlendNetManagerTask)
bpy.utils.unregister_class(BlendNetSceneSettings)
bpy.utils.unregister_class(BlendNetAddonPreferences)
if __name__ == '__main__':
register()
| 38.988288 | 131 | 0.608918 | 62,881 | 0.899508 | 0 | 0 | 4,247 | 0.060753 | 0 | 0 | 15,774 | 0.225646 |
d09d02c68d6758f8f9b41f85738e74eee8e7455d | 3,601 | py | Python | codonlib/codonlib.py | tmsincomb/codonlib | 026bc475b3255831d749455b4c76250d56e4b91e | [
"MIT"
]
| null | null | null | codonlib/codonlib.py | tmsincomb/codonlib | 026bc475b3255831d749455b4c76250d56e4b91e | [
"MIT"
]
| null | null | null | codonlib/codonlib.py | tmsincomb/codonlib | 026bc475b3255831d749455b4c76250d56e4b91e | [
"MIT"
]
| null | null | null | """Main module."""
from collections import defaultdict
from functools import cache
from itertools import product
from operator import itemgetter
from typing import List
import numpy as np
from Bio.Data.CodonTable import unambiguous_dna_by_id
class CodonDegeneracy:
def __init__(self, table_id: int = 1):
self.table_id = table_id
self.codontable_atlas = unambiguous_dna_by_id[self.table_id]
self.codon2aa = {}
self.aa2codons = defaultdict(list)
self.codon_table: np.char.array = None
self.nt_table: np.char.array = None
self.aa_table: np.char.array = None
self.__codon_aa_mappings()
def __codon_aa_mappings(self):
for codon, aa in self.codontable_atlas.forward_table.items():
self.codon2aa[codon] = aa
self.aa2codons[aa].append(codon)
for codon in self.codontable_atlas.stop_codons:
self.codon2aa[codon] = "*"
self.aa2codons["*"].append(codon)
def __create_tables(self):
codon_list = []
aa_list = []
for i, col_nt in enumerate(["T", "C", "A", "G"]):
for j, wobble_nt in enumerate(["T", "C", "A", "G"]):
for k, row_nt in enumerate(["T", "C", "A", "G"]):
codon = col_nt + row_nt + wobble_nt
aa = self.codon2aa[codon]
codon_list.append(codon)
aa_list.append(aa)
self.codon_table = np.char.array(codon_list, dtype=str).reshape((16, 4))
self.nt_table = self.codon_table.view("U1").reshape((16, 4, -1))
self.aa_table = np.char.array(aa_list).reshape((16, 4))
@cache
def __(self, nt):
print(nt)
codon_list = [x + y + z for x in nt[0] for y in nt[1] for z in nt[2]]
return set(itemgetter(*codon_list)(self.codon2aa))
@cache
def __get_aa_possibilities(self, codons: list) -> set:
"""
Get the possible combinations from a given list of possibilities.
Parameters
----------
nt_combo : list
List of codons.
Examples
--------
>>>__locknkey(['CCG', 'AAG'])
{'T', 'K', 'P', 'Q'}
Returns
-------
set
amino acid symbol set
"""
nt1, nt2, nt3 = frozenset(), frozenset(), frozenset()
for codon in codons:
nt1 |= frozenset(codon[0])
nt2 |= frozenset(codon[1])
nt3 |= frozenset(codon[2])
return self.__combinations((nt1, nt2, nt3))
def off_targets(self, aa_list: List[str]) -> set:
"""
Get the off-target amino acids for a given list of amino acids.
Parameters
----------
aa_list : list
List of amino acid codons.
Returns
-------
set
Set of off-target amino acids.
"""
on_target_aa = set(aa_list)
off_target_aa: set = set()
off_target_best = [0] * 42
all_codons_per_aa = [self.aa2codons[aa] for aa in aa_list]
for one_codon_selected_per_aa in product(*all_codons_per_aa):
aa_possibilities = self.__get_aa_possibilities(one_codon_selected_per_aa)
off_target_aa = aa_possibilities - on_target_aa
if len(off_target_aa) < len(off_target_best):
off_target_best = off_target_aa
# needs to return best combos and list of all equals of each combos so i know the "real" set of codons to return
# in real life we want all the combons not just 1 for each aa
return off_target_best
| 34.295238 | 120 | 0.580117 | 3,354 | 0.931408 | 0 | 0 | 878 | 0.243821 | 0 | 0 | 874 | 0.24271 |
d0a007717c7cd5164028357f50c0a77b0adfbf09 | 243 | py | Python | M7 - python script.py | kfmahre/movies_neural_net | 749d2a4e05bb24537c03c6069443da6956084055 | [
"MIT"
]
| null | null | null | M7 - python script.py | kfmahre/movies_neural_net | 749d2a4e05bb24537c03c6069443da6956084055 | [
"MIT"
]
| null | null | null | M7 - python script.py | kfmahre/movies_neural_net | 749d2a4e05bb24537c03c6069443da6956084055 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Dec 15 01:37:59 2019
@author: kfmah
"""
stuff = list()
stuff.append('python')
stuff.append('chuck')
stuff.sort()
print (stuff[0])
print (stuff.__getitem__(0))
print (list.__getitem__(stuff,0)) | 12.15 | 35 | 0.650206 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 97 | 0.399177 |
d0a233e2c4be9f3270a7527a6c44f88fc90c7f07 | 179 | py | Python | python/ql/src/Security/CWE-022/examples/tarslip_bad.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
]
| 4,036 | 2020-04-29T00:09:57.000Z | 2022-03-31T14:16:38.000Z | python/ql/src/Security/CWE-022/examples/tarslip_bad.py | vadi2/codeql | a806a4f08696d241ab295a286999251b56a6860c | [
"MIT"
]
| 2,970 | 2020-04-28T17:24:18.000Z | 2022-03-31T22:40:46.000Z | python/ql/src/Security/CWE-022/examples/tarslip_bad.py | ScriptBox99/github-codeql | 2ecf0d3264db8fb4904b2056964da469372a235c | [
"MIT"
]
| 794 | 2020-04-29T00:28:25.000Z | 2022-03-30T08:21:46.000Z |
import tarfile
with tarfile.open('archive.zip') as tar:
#BAD : This could write any file on the filesystem.
for entry in tar:
tar.extract(entry, "/tmp/unpack/")
| 22.375 | 55 | 0.664804 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.435754 |
d0a27cee092dbbcf3e9f8c3f67fcac4a8d78a9b8 | 2,206 | py | Python | thermidor/classes/clusterer_socket.py | rcorrero/thermidor | 01381da6dcef6d256c4438f90b46197ea89c4e7d | [
"BSD-3-Clause"
]
| null | null | null | thermidor/classes/clusterer_socket.py | rcorrero/thermidor | 01381da6dcef6d256c4438f90b46197ea89c4e7d | [
"BSD-3-Clause"
]
| null | null | null | thermidor/classes/clusterer_socket.py | rcorrero/thermidor | 01381da6dcef6d256c4438f90b46197ea89c4e7d | [
"BSD-3-Clause"
]
| null | null | null | # Author: Richard Correro
from sklearn.base import ClusterMixin
from .transformer_socket import TransformerSocket
class ClustererSocket(TransformerSocket, ClusterMixin):
'''Class which allows for treating clusterers as
model parameters.
Parameters
----------
estimator : Sci-kit learn estimator object
If estimator is None or 'passthrough' then transform returns X.
'''
def predict(self, X, sample_weight=None):
'''Predict the closest cluster each sample in X belongs to.
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Input data.
sample_weight : array-like, shape (n_samples,), optional
The weights for each observation in X. If None, all
observations are assigned equal weight (default: None)
Returns
-------
labels : ndarray, shape (n_samples,)
cluster labels
'''
return self.estimator.predict(X, sample_weight)
def fit_predict(self, X, y=None):
'''Performs clustering on X and returns cluster labels.
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Input data.
y : Ignored
not used, present for API consistency by convention.
Returns
-------
labels : ndarray, shape (n_samples,)
cluster labels
'''
return self.estimator.fit_predict(X, y)
def score(self, X, y=None, sample_weight=None):
'''Returns estimator's score method, if applicable.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
New data.
y : Ignored
not used, present here for API consistency by convention.
sample_weight : array-like, shape (n_samples,), optional
The weights for each observation in X. If None, all observations
are assigned equal weight (default: None)
Returns
-------
score : float
Opposite of the value of X on the K-means objective.
'''
return self.estimator.score(X, y, sample_weight)
| 30.219178 | 76 | 0.593835 | 2,088 | 0.94651 | 0 | 0 | 0 | 0 | 0 | 0 | 1,720 | 0.779692 |
d0a31acaeb6c9427098272c4fe196ed9fa1d7cff | 17,639 | py | Python | Telstra_Messaging/configuration.py | yashints/MessagingAPI-SDK-python | 6cb41ed90fd237e57a6ce4ca383fa035cd842a7d | [
"Apache-2.0"
]
| null | null | null | Telstra_Messaging/configuration.py | yashints/MessagingAPI-SDK-python | 6cb41ed90fd237e57a6ce4ca383fa035cd842a7d | [
"Apache-2.0"
]
| null | null | null | Telstra_Messaging/configuration.py | yashints/MessagingAPI-SDK-python | 6cb41ed90fd237e57a6ce4ca383fa035cd842a7d | [
"Apache-2.0"
]
| null | null | null | # coding: utf-8
"""
Telstra Messaging API
# Introduction <table><tbody><tr><td class = 'into_api' style='border:none;padding:0 0 0 0'><p>Send and receive SMS and MMS messages globally using Telstra's enterprise grade Messaging API. It also allows your application to track the delivery status of both sent and received messages. Get your dedicated Australian number, and start sending and receiving messages today.</p></td><td class = 'into_api_logo' style='width: 20%;border:none'><img class = 'api_logo' style='margin: -26px 0 0 0' src = 'https://test-telstra-retail-tdev.devportal.apigee.io/sites/default/files/messagingapi-icon.png'></td></tr></tbody></table> # Features The Telstra Messaging API provides the features below. | Feature | Description | | --- | --- | | `Dedicated Number` | Provision a mobile number for your account to be used as `from` address in the API | | `Send Messages` | Sending SMS or MMS messages | | `Receive Messages` | Telstra will deliver messages sent to a dedicated number or to the `notifyURL` defined by you | | `Broadcast Messages` | Invoke a single API call to send a message to a list of numbers provided in `to` | | `Delivery Status` | Query the delivery status of your messages | | `Callbacks` | Provide a notification URL and Telstra will notify your app when a message status changes | | `Alphanumeric Identifier` | Differentiate yourself by providing an alphanumeric string in `from`. This feature is only available on paid plans | | `Concatenation` | Send messages up to 1900 characters long and Telstra will automaticaly segment and reassemble them | | `Reply Request` | Create a chat session by associating `messageId` and `to` number to track responses received from a mobile number. We will store this association for 8 days | | `Character set` | Accepts all Unicode characters as part of UTF-8 | | `Bounce-back response` | See if your SMS hits an unreachable or unallocated number (Australia Only) | | `Queuing` | Messaging API will automatically queue and deliver each message at a compliant rate. | | `Emoji Encoding` | The API supports the encoding of the full range of emojis. Emojis in the reply messages will be in their UTF-8 format. | ## Delivery Notification or Callbacks The API provides several methods for notifying when a message has been delivered to the destination. 1. When you send a message there is an opportunity to specify a `notifyURL`. Once the message has been delivered the API will make a call to this URL to advise of the message status. 2. If you do not specify a URL you can always call the `GET /status` API to get the status of the message. # Getting Access to the API 1. Register at [https://dev.telstra.com](https://dev.telstra.com). 2. After registration, login to [https://dev.telstra.com](https://dev.telstra.com) and navigate to the **My apps** page. 3. Create your application by clicking the **Add new app** button 4. Select **API Free Trial** Product when configuring your application. This Product includes the Telstra Messaging API as well as other free trial APIs. Your application will be approved automatically. 5. There is a maximum of 1000 free messages per developer. Additional messages and features can be purchased from [https://dev.telstra.com](https://dev.telstra.com). 6. Note your `Client key` and `Client secret` as these will be needed to provision a number for your application and for authentication. Now head over to **Getting Started** where you can find a postman collection as well as some links to sample apps and SDKs to get you started. Happy Messaging! # Frequently Asked Questions **Q: Is creating a subscription via the Provisioning call a required step?** A. Yes. You will only be able to start sending messages if you have a provisioned dedicated number. Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: When trying to send an SMS I receive a `400 Bad Request` response. How can I fix this?** A. You need to make sure you have a provisioned dedicated number before you can send an SMS. If you do not have a provisioned dedicated number and you try to send a message via the API, you will get the error below in the response: <pre><code class=\"language-sh\">{ \"status\":\"400\", \"code\":\"DELIVERY-IMPOSSIBLE\", \"message\":\"Invalid \\'from\\' address specified\" }</code></pre> Use Provisioning to create a dedicated number subscription, or renew your dedicated number if it has expired. **Q: How long does my dedicated number stay active for?** A. When you provision a dedicated number, by default it will be active for 30 days. You can use the `activeDays` parameter during the provisioning call to increment or decrement the number of days your dedicated number will remain active. Note that Free Trial apps will have 30 days as the maximum `activeDays` they can add to their provisioned number. If the Provisioning call is made several times within that 30-Day period, it will return the `expiryDate` in the Unix format and will not add any activeDays until after that `expiryDate`. **Q: Can I send a broadcast message using the Telstra Messaging API?** A. Yes. Recipient numbers can be in the form of an array of strings if a broadcast message needs to be sent, allowing you to send to multiple mobile numbers in one API call. A sample request body for this will be: `{\"to\":[\"+61412345678\",\"+61487654321\"],\"body\":\"Test Message\"}` **Q: Can I send SMS and MMS to all countries?** A. You can send SMS and MMS to all countries EXCEPT to countries which are subject to global sanctions namely: Burma, Côte d'Ivoire, Cuba, Iran, North Korea, Syria. **Q: Can I use `Alphanumeric Identifier` from my paid plan via credit card?** A. `Alphanumeric Identifier` is only available on Telstra Account paid plans, not through credit card paid plans. **Q: What is the maximum sized MMS that I can send?** A. This will depend on the carrier that will receive the MMS. For Telstra it's up to 2MB, Optus up to 1.5MB and Vodafone only allows up to 500kB. You will need to check with international carriers for thier MMS size limits. **Q: How is the size of an MMS calculated?** A. Images are scaled up to approximately 4/3 when base64 encoded. Additionally, there is approximately 200 bytes of overhead on each MMS. Assuming the maximum MMS that can be sent on Telstra’s network is 2MB, then the maximum image size that can be sent will be approximately 1.378MB (1.378 x 1.34 + 200, without SOAP encapsulation). **Q: How is an MMS classified as Small or Large?** A. MMSes with size below 600kB are classed as Small whereas those that are bigger than 600kB are classed as Large. They will be charged accordingly. **Q: Are SMILs supported by the Messaging API?** A. While there will be no error if you send an MMS with a SMIL presentation, the actual layout or sequence defined in the SMIL may not display as expected because most of the new smartphone devices ignore the SMIL presentation layer. SMIL was used in feature phones which had limited capability and SMIL allowed a *powerpoint type* presentation to be provided. Smartphones now have the capability to display video which is the better option for presentations. It is recommended that MMS messages should just drop the SMIL. **Q: How do I assign a delivery notification or callback URL?** A. You can assign a delivery notification or callback URL by adding the `notifyURL` parameter in the body of the request when you send a message. Once the message has been delivered, a notification will then be posted to this callback URL. **Q: What is the difference between the `notifyURL` parameter in the Provisoning call versus the `notifyURL` parameter in the Send Message call?** A. The `notifyURL` in the Provisoning call will be the URL where replies to the provisioned number will be posted. On the other hand, the `notifyURL` in the Send Message call will be the URL where the delivery notification will be posted, e.g. when an SMS has already been delivered to the recipient. # Getting Started Below are the steps to get started with the Telstra Messaging API. 1. Generate an OAuth2 token using your `Client key` and `Client secret`. 2. Use the Provisioning call to create a subscription and receive a dedicated number. 3. Send a message to a specific mobile number. ## Run in Postman <a href=\"https://app.getpostman.com/run-collection/ded00578f69a9deba256#?env%5BMessaging%20API%20Environments%5D=W3siZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X2lkIiwidmFsdWUiOiIiLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiY2xpZW50X3NlY3JldCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6ImFjY2Vzc190b2tlbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Imhvc3QiLCJ2YWx1ZSI6InRhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoiQXV0aG9yaXphdGlvbiIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifSx7ImVuYWJsZWQiOnRydWUsImtleSI6Im9hdXRoX2hvc3QiLCJ2YWx1ZSI6InNhcGkudGVsc3RyYS5jb20iLCJ0eXBlIjoidGV4dCJ9LHsiZW5hYmxlZCI6dHJ1ZSwia2V5IjoibWVzc2FnZV9pZCIsInZhbHVlIjoiIiwidHlwZSI6InRleHQifV0=\"><img src=\"https://run.pstmn.io/button.svg\" alt=\"Run in Postman\"/></a> ## Sample Apps - [Perl Sample App](https://github.com/telstra/MessagingAPI-perl-sample-app) - [Happy Chat App](https://github.com/telstra/messaging-sample-code-happy-chat) - [PHP Sample App](https://github.com/developersteve/telstra-messaging-php) ## SDK Repos - [Messaging API - PHP SDK](https://github.com/telstra/MessagingAPI-SDK-php) - [Messaging API - Python SDK](https://github.com/telstra/MessagingAPI-SDK-python) - [Messaging API - Ruby SDK](https://github.com/telstra/MessagingAPI-SDK-ruby) - [Messaging API - NodeJS SDK](https://github.com/telstra/MessagingAPI-SDK-node) - [Messaging API - .Net2 SDK](https://github.com/telstra/MessagingAPI-SDK-dotnet) - [Messaging API - Java SDK](https://github.com/telstra/MessagingAPI-SDK-Java) ## Blog Posts For more information on the Messaging API, you can read these blog posts: - [Callbacks Part 1](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-1) - [Callbacks Part 2](https://dev.telstra.com/content/understanding-messaging-api-callbacks-part-2) # noqa: E501
OpenAPI spec version: 2.2.9
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "https://tapi.telstra.com/v2"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# access token for OAuth
self.access_token = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("Telstra_Messaging")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'auth':
{
'type': 'oauth2',
'in': 'header',
'key': 'Authorization',
'value': 'Bearer ' + self.access_token
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 2.2.9\n"\
"SDK Package Version: 1.0.6".\
format(env=sys.platform, pyversion=sys.version)
| 73.495833 | 10,226 | 0.704122 | 7,097 | 0.402279 | 0 | 0 | 2,610 | 0.147942 | 0 | 0 | 13,669 | 0.774799 |
d0a3ddac31091e48614d1cfeaf2b19071cf7215f | 1,598 | py | Python | gym_minigrid/envs/mygridworld.py | nathan-miller23/gym-minigrid | 4ed9e6a511be88a49903f107003951977d86d842 | [
"Apache-2.0"
]
| null | null | null | gym_minigrid/envs/mygridworld.py | nathan-miller23/gym-minigrid | 4ed9e6a511be88a49903f107003951977d86d842 | [
"Apache-2.0"
]
| null | null | null | gym_minigrid/envs/mygridworld.py | nathan-miller23/gym-minigrid | 4ed9e6a511be88a49903f107003951977d86d842 | [
"Apache-2.0"
]
| null | null | null | from gym_minigrid.minigrid import *
from gym_minigrid.register import register
class MyEnv(MyMiniGridEnv):
def __init__(self, size=9, max_steps=100, start_pos=(1, 1), good_goal_pos=None, bad_goal_pos=None, reward='sparse', good_goal_reward=10, bad_goal_reward=-10):
self.start_pos = start_pos
self.good_goal_pos = good_goal_pos
self.bad_goal_pos = bad_goal_pos
self.reward = reward
self.good_goal_reward = good_goal_reward
self.bad_goal_reward = bad_goal_reward
super(MyEnv, self).__init__(grid_size=size, max_steps=max_steps)
def _gen_grid(self, width, height):
self.grid = Grid(width, height)
self.grid.wall_rect(0, 0, width, height)
self.put_obj(GoodGoal(), *self.good_goal_pos)
self.put_obj(BadGoal(), *self.bad_goal_pos)
self.agent_pos = self.start_pos
self.agent_dir = 0
self.mission = "Be the best agent I can be"
def _reward(self):
curr_cell = self.grid.get(*self.agent_pos)
if curr_cell.goal_type == 'good':
return self.good_goal_reward
elif curr_cell.goal_type == 'bad':
return self.bad_goal_reward
else:
raise ValueError("Called `self._reward()` at incorrect time!")
def dist_to_goal(self, pos):
x, y = pos
goal_x, goal_y = self.good_goal_pos
return abs(goal_x - x) + abs(goal_y - y)
def _dense_reward(self, s, s_prime):
if self.reward == 'sparse':
return 0
return self.dist_to_goal(s) - self.dist_to_goal(s_prime)
| 30.730769 | 162 | 0.646433 | 1,509 | 0.944305 | 0 | 0 | 0 | 0 | 0 | 0 | 99 | 0.061952 |
d0a76a2ccea01e9241f0631317ad97ebe4b3a680 | 86 | py | Python | dai10shou/code10-1.py | naoshige314/workshop01 | 5c7be08f99eb164b7901628de26cecfd04fa926f | [
"MIT"
]
| null | null | null | dai10shou/code10-1.py | naoshige314/workshop01 | 5c7be08f99eb164b7901628de26cecfd04fa926f | [
"MIT"
]
| null | null | null | dai10shou/code10-1.py | naoshige314/workshop01 | 5c7be08f99eb164b7901628de26cecfd04fa926f | [
"MIT"
]
| 2 | 2021-06-10T11:53:02.000Z | 2021-06-20T15:43:39.000Z | #頂点数
n=7
#隣接リスト表現
G=[[] for _ in range(n)]
G[0]=[1,2]
G[1]=[0,3]
G[2]=[0,4,5]
#etc.
| 7.818182 | 24 | 0.476744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.349057 |
d0a7c3be5b482ff8c7670f19fff95f3a54a4face | 5,854 | py | Python | tawsocks/tcp_relay.py | comeacrossyun/skyun | 96ef2e98da2826863850c8b917bf1cba8a8a796b | [
"MIT"
]
| 1 | 2021-05-23T15:50:25.000Z | 2021-05-23T15:50:25.000Z | tawsocks/tcp_relay.py | comeacrossyun/skyun | 96ef2e98da2826863850c8b917bf1cba8a8a796b | [
"MIT"
]
| null | null | null | tawsocks/tcp_relay.py | comeacrossyun/skyun | 96ef2e98da2826863850c8b917bf1cba8a8a796b | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# coding=utf-8
# @Time : 2019-06-04
# @Author : hongshu
import sys
import asyncio
from tawsocks import common
class TcpRelayHandler(object):
def __init__(self, is_client, config, loop):
self.is_client = is_client
self.config = config
self.loop = loop
async def start(self):
await self._listening()
async def _listening(self):
if self.is_client:
await asyncio.start_server(self._shake_hand, '0.0.0.0', self.config.client_port, loop=self.loop)
else:
await asyncio.start_server(self._establish_connection, '0.0.0.0', self.config.server_port, loop=self.loop)
async def _shake_hand(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
data = await common.read_data(reader, False, self.config.password)
if len(data) < 2 or len(data) != 2 + data[1] or data[0] != 0x05:
self._shake_hand_fail(writer)
writer.close()
return
# 判断客户端是否接受"无需认证"的方式
if 0x00 not in data[2:]:
self._shake_hand_fail(writer)
writer.close()
return
self._shake_hand_success(writer)
await self._establish_connection(reader, writer)
def _shake_hand_success(self, writer):
common.write_data(writer, b'\x05\x00', False, self.config.password)
def _shake_hand_fail(self, writer):
common.write_data(writer, b'\x05\xff', False, self.config.password)
async def _establish_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
if self.is_client:
data = await common.read_data(reader, False, self.config.password)
try:
remote_reader, remote_writer = \
await asyncio.open_connection(self.config.server_host, self.config.server_port, loop=self.loop)
except:
print('connot connect to proxy server')
self._establish_connection_fail(writer, 0x04)
writer.close()
return
common.write_data(remote_writer, data, True, self.config.password)
data = await common.read_data(remote_reader, True, self.config.password)
if data[1] == 0x00:
self._establish_connection_success(writer)
else:
self._establish_connection_fail(writer, data[1])
writer.close()
return
await self._transfer_data(reader, writer, remote_reader, remote_writer)
else:
data = await common.read_data(reader, True, self.config.password)
if data[0] != 0x05 or data[2] != 0x00:
self._establish_connection_fail(writer, 0x02)
writer.close()
return
# 只支持TCP和UDP
if data[1] == 0x01: # TCP
pass
elif data[1] == 0x03: # UDP
self._establish_connection_success(writer)
return
else:
self._establish_connection_fail(writer, 0x07)
writer.close()
return
if data[3] == 0x01: # IPv4
remote_host = '%d.%d.%d.%d' % (int(data[4]), int(data[5]), int(data[6]), int(data[7]))
elif data[3] == 0x03: # 域名
remote_host = str(data[5: -2], encoding='utf-8')
elif data[3] == 0x04: # IPv6
self._establish_connection_fail(writer, 0x08)
writer.close()
return
else:
self._establish_connection_fail(writer, 0x02)
writer.close()
return
remote_port = int.from_bytes(bytes=data[-2:], byteorder='big')
print("remote host: %s:%s" % (remote_host, remote_port))
try:
remote_reader, remote_writer = await asyncio.open_connection(remote_host, remote_port, loop=self.loop)
except:
print('connect fail to %s' % remote_host, file=sys.stderr)
self._establish_connection_fail(writer, 0x04)
writer.close()
return
self._establish_connection_success(writer)
await self._transfer_data(reader, writer, remote_reader, remote_writer)
def _establish_connection_success(self, writer):
if self.is_client:
data = bytes([0x05, 0x00, 0x00, 0x01, 0, 0, 0, 0])
data += common.convert_port_to_bytes(self.config.client_port)
common.write_data(writer, data, False, self.config.password)
else:
data = bytes([0x05, 0x00])
common.write_data(writer, data, True, self.config.password)
def _establish_connection_fail(self, writer, error_code):
if self.is_client:
data = bytes([0x05, error_code, 0x00, 0x01, 0, 0, 0, 0])
data += common.convert_port_to_bytes(self.config.client_port)
common.write_data(writer, data, False, self.config.password)
else:
data = bytes([0x05, error_code])
common.write_data(writer, data, True, self.config.password)
async def _transfer_data(self, reader, writer, remote_reader, remote_writer):
if self.is_client:
await asyncio.gather(
common.transfer_data_with_encrypt(reader, remote_writer, self.config.password),
common.transfer_data_with_decrypt(remote_reader, writer, self.config.password),
loop=self.loop
)
else:
await asyncio.gather(
common.transfer_data_with_decrypt(reader, remote_writer, self.config.password),
common.transfer_data_with_encrypt(remote_reader, writer, self.config.password),
loop=self.loop
)
| 39.823129 | 118 | 0.591561 | 5,756 | 0.975924 | 0 | 0 | 0 | 0 | 4,467 | 0.757375 | 317 | 0.053747 |
d0a857e2bc8d397632de8161aef9ad14a63435bd | 1,033 | py | Python | exchange/utils/http_util.py | inasie/PyExchange | 7d40517c8145f92ac8068d5a0f25cc44ecddc82b | [
"MIT"
]
| 12 | 2018-07-31T14:56:33.000Z | 2021-05-24T23:47:51.000Z | exchange/utils/http_util.py | inasie/PyExchange | 7d40517c8145f92ac8068d5a0f25cc44ecddc82b | [
"MIT"
]
| 1 | 2018-08-17T09:16:04.000Z | 2018-08-17T09:16:04.000Z | exchange/utils/http_util.py | inasie/PyExchange | 7d40517c8145f92ac8068d5a0f25cc44ecddc82b | [
"MIT"
]
| 8 | 2018-07-31T14:57:37.000Z | 2021-05-24T23:47:52.000Z | # -*- coding: utf-8 -*-
import requests
import json
import logging
class HttpUtil:
"""
http util
"""
def get(self, url, params=None):
'''
get request
:param str url: url
:param set params: parameters
:return: json object or json array
'''
resp = requests.get(url, params=params)
if resp.status_code != 200:
logging.error('get(%s) failed(%d)' % (url, resp.status_code))
if resp.text is not None:
logging.error('resp: %s' % resp.text)
return None
return json.loads(resp.text)
def get_raw(self, url):
'''
get request
:param str url: url
:return: response text
'''
resp = requests.get(url)
if resp.status_code != 200:
logging.error('get(%s) failed(%d)' % (url, resp.status_code))
if resp.text is not None:
logging.error('resp: %s' % resp.text)
return None
return resp.text
| 25.825 | 73 | 0.522749 | 963 | 0.932236 | 0 | 0 | 0 | 0 | 0 | 0 | 346 | 0.334947 |
d0aa2fe0ec18a3eff28a11f7344074671b7a9434 | 452 | py | Python | src/glitchygames/movement/vertical.py | terrysimons/ghettogames | 23773119d1994251b43c42db39c1c99c08386c24 | [
"BSD-3-Clause"
]
| 1 | 2019-07-06T02:01:27.000Z | 2019-07-06T02:01:27.000Z | src/glitchygames/movement/vertical.py | terrysimons/ghettogames | 23773119d1994251b43c42db39c1c99c08386c24 | [
"BSD-3-Clause"
]
| 4 | 2021-12-31T04:18:01.000Z | 2022-03-29T13:40:32.000Z | src/glitchygames/movement/vertical.py | terrysimons/glitchygames | 23773119d1994251b43c42db39c1c99c08386c24 | [
"BSD-3-Clause"
]
| 1 | 2019-07-12T19:41:09.000Z | 2019-07-12T19:41:09.000Z | """
Vertical:
Adds movement functions along the vertical (Y) axis to a game object
"""
class Vertical:
def __init__(self, speed):
self.speed = speed
self.current_speed = self.speed.y
def _change_speed(self, value):
self.current_speed = value
def up(self):
self._change_speed(-self.speed.y)
def down(self):
self._change_speed(self.speed.y)
def stop(self):
self._change_speed(0)
| 18.833333 | 68 | 0.634956 | 362 | 0.800885 | 0 | 0 | 0 | 0 | 0 | 0 | 86 | 0.190265 |
d0aadc614a084b433d38993f99643e2433d5d14d | 3,828 | py | Python | pip_services3_datadog/clients/DataDogLogClient.py | pip-services3-python/pip-services3-datadog-python | 5d4549685b8486f1fc663b0e50ea52d019095909 | [
"MIT"
]
| null | null | null | pip_services3_datadog/clients/DataDogLogClient.py | pip-services3-python/pip-services3-datadog-python | 5d4549685b8486f1fc663b0e50ea52d019095909 | [
"MIT"
]
| null | null | null | pip_services3_datadog/clients/DataDogLogClient.py | pip-services3-python/pip-services3-datadog-python | 5d4549685b8486f1fc663b0e50ea52d019095909 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
import datetime
from typing import Optional, List, Any
from pip_services3_commons.config import ConfigParams
from pip_services3_commons.convert import StringConverter
from pip_services3_commons.errors import ConfigException
from pip_services3_commons.refer import IReferences
from pip_services3_components.auth import CredentialResolver
from pip_services3_rpc.clients import RestClient
from pip_services3_datadog.clients.DataDogLogMessage import DataDogLogMessage
class DataDogLogClient(RestClient):
__default_config: ConfigParams = ConfigParams.from_tuples(
"connection.protocol", "https",
"connection.host", "http-intake.logs.datadoghq.com",
"connection.port", 443,
"credential.internal_network", "true"
)
def __init__(self, config: ConfigParams = None):
super().__init__()
self.__credential_resolver = CredentialResolver()
if config:
self.configure(config)
self._base_route = 'v1'
def configure(self, config: ConfigParams):
config = self.__default_config.override(config)
super().configure(config)
self.__credential_resolver.configure(config)
def set_references(self, references: IReferences):
super().set_references(references)
self.__credential_resolver.set_references(references)
def open(self, correlation_id: Optional[str]):
credential = self.__credential_resolver.lookup(correlation_id)
if credential is None or credential.get_access_key() is None:
raise ConfigException(
correlation_id,
"NO_ACCESS_KEY",
"Missing access key in credentials"
)
self._headers = self._headers or {}
self._headers['DD-API-KEY'] = credential.get_access_key()
super().open(correlation_id)
def __convert_tags(self, tags: List[Any]) -> Optional[str]:
if tags is None:
return
builder: str = ''
for key in tags:
if builder != '':
builder += ','
builder += key + ':' + tags[key]
return builder
def __convert_message(self, message: DataDogLogMessage) -> Any:
result = {
"timestamp": StringConverter.to_string(message.time or datetime.datetime),
"status": message.status or "INFO",
"ddsource": message.source or 'pip-services',
# "source": message.source or 'pip-services',
"service": message.service,
"message": message.message,
}
if message.tags:
result['ddtags'] = self.__convert_tags(message.tags)
if message.host:
result['host'] = message.host
if message.logger_name:
result['logger.name'] = message.logger_name
if message.thread_name:
result['logger.thread_name'] = message.thread_name
if message.error_message:
result['error.message'] = message.error_message
if message.error_kind:
result['error.kind'] = message.error_kind
if message.error_stack:
result['error.stack'] = message.error_stack
return result
def __convert_messages(self, messages: List[DataDogLogMessage]) -> List[Any]:
return list(map(lambda m: self.__convert_message(m), messages))
def send_logs(self, correlation_id: Optional[str], messages: List[DataDogLogMessage]) -> Any:
data = self.__convert_messages(messages)
# Commented instrumentation because otherwise it will never stop sending logs...
# timing = self._instrument(correlation_id, 'datadog.send_logs')
try:
return self._call("post", "input", None, None, data)
finally:
# timing.end_timing()
pass
| 35.119266 | 97 | 0.648642 | 3,335 | 0.871212 | 0 | 0 | 0 | 0 | 0 | 0 | 605 | 0.158046 |
d0aaf1fbb6455df5e13ef467cbacc908e5245647 | 2,802 | py | Python | gui/composition_worker.py | ivanovwaltz/wavelet_sound_microscope | ff14d82135193a3d20543e84a9e6a81f884b1cf7 | [
"MIT"
]
| null | null | null | gui/composition_worker.py | ivanovwaltz/wavelet_sound_microscope | ff14d82135193a3d20543e84a9e6a81f884b1cf7 | [
"MIT"
]
| null | null | null | gui/composition_worker.py | ivanovwaltz/wavelet_sound_microscope | ff14d82135193a3d20543e84a9e6a81f884b1cf7 | [
"MIT"
]
| null | null | null | import logging
import os
from functools import partial
from PIL.Image import Image
from PyQt5.QtCore import QObject, pyqtSignal, QThread
from PyQt5.QtWidgets import QProgressDialog
from .threading import QThreadedWorkerDebug as QThreadedWorker
from analyze.composition import Composition, Spectrogram
from analyze.media.sound import Sound, SoundResampled
from utils import ProgressProxy
SAMPLERATE = 1024 * 16
log = logging.getLogger(__name__)
class ProgressProxyToProgressDialog(ProgressProxy):
def __init__(self, progress_dialog, *args, **kwargs):
self.progress_dialog = progress_dialog
super().__init__(*args, **kwargs)
def start(self):
self.progress_dialog.reset()
self.progress_dialog.setRange(0, self.length)
def make_step(self):
super().make_step()
if self.progress_dialog.wasCanceled():
self.cancel()
def render_progress(self):
self.progress_dialog.setValue(self.pos)
def done(self):
log.debug('ProgressProxyToProgressDialog.done')
if getattr(self, 'canceled', False):
raise CompositionCanceled
def cancel(self):
self.canceled = True
raise StopIteration
class CompositionCanceled(Exception):
pass
class QCompositionWorker(QThreadedWorker):
def __init__(self):
super().__init__()
self.busy = False
self.process.connect(self._process)
process = pyqtSignal(Sound, QProgressDialog)
process_ok = pyqtSignal(Spectrogram)
process_error = pyqtSignal(str)
message = pyqtSignal(str)
def set_progress_value(self, val):
self._message('Progress value: {}'.format(val))
def _process(self, sound, progressbar):
log.debug('Before Image processed')
# FIXME Implement jobs queue. Just cancel previous here
if self.busy:
self.process_error.emit('Busi')
return
self.busy = True
self._message('Resample sound')
sound_resampled = SoundResampled(sound, SAMPLERATE)
progressbar = partial(ProgressProxyToProgressDialog, progress_dialog)
self._message('Prepare composition')
try:
with Composition(
sound_resampled, scale_resolution=1/155, omega0=70
) as composition:
self._message('Analyse')
spectrogram = composition.get_spectrogram(progressbar)
except CompositionCanceled:
log.debug('Composition canceled')
self.process_error.emit('Composition canceled')
return
else:
log.debug('Image processed')
self.process_ok.emit(spectrogram)
finally:
self.busy = False
def _message(self, msg):
self.message.emit(msg)
| 25.472727 | 77 | 0.664525 | 2,343 | 0.836188 | 0 | 0 | 0 | 0 | 0 | 0 | 258 | 0.092077 |
d0ab2fa07ec216f8af2558d0b182fc6b664345b5 | 61,891 | py | Python | metalpipe/node.py | zacernst/nanostream | 382389b09c42b55c6bdb64c7b0017d4810c7165f | [
"MIT"
]
| 2 | 2019-04-12T19:32:55.000Z | 2019-12-24T16:50:09.000Z | metalpipe/node.py | zacernst/metalpipe | 382389b09c42b55c6bdb64c7b0017d4810c7165f | [
"MIT"
]
| 10 | 2019-04-03T01:25:52.000Z | 2019-12-16T05:09:35.000Z | metalpipe/node.py | zacernst/nanostream | 382389b09c42b55c6bdb64c7b0017d4810c7165f | [
"MIT"
]
| 1 | 2019-04-17T12:55:19.000Z | 2019-04-17T12:55:19.000Z | """
Node module
===========
The ``node`` module contains the ``MetalNode`` class, which is the foundation
for MetalPipe.
"""
import time
import datetime
import uuid
import importlib
import logging
import os
import threading
import pprint
import sys
import copy
import random
import functools
import csv
import MySQLdb
import re
import io
import yaml
import types
import inspect
import prettytable
import requests
import graphviz
from timed_dict.timed_dict import TimedDict
from metalpipe.message.batch import BatchStart, BatchEnd
from metalpipe.message.message import MetalPipeMessage
from metalpipe.node_queue.queue import MetalPipeQueue
from metalpipe.message.canary import Canary
from metalpipe.utils.set_attributes import set_kwarg_attributes
from metalpipe.utils.data_structures import Row, MySQLTypeSystem
from metalpipe.utils import data_structures as ds
# from metalpipe.metalpipe_recorder import RedisFixturizer
from metalpipe.utils.helpers import (
load_function,
replace_by_path,
remap_dictionary,
set_value,
get_value,
to_bool,
aggregate_values,
)
DEFAULT_MAX_QUEUE_SIZE = int(os.environ.get("DEFAULT_MAX_QUEUE_SIZE", 128))
MONITOR_INTERVAL = 1
STATS_COUNTER_MODULO = 4
LOGJAM_THRESHOLD = 0.25
SHORT_DELAY = 0.1
PROMETHEUS = False
def no_op(*args, **kwargs):
"""
No-op function to serve as default ``get_runtime_attrs``.
"""
return None
class bcolors:
"""
This class holds the values for the various colors that are used in the
tables that monitor the status of the nodes.
"""
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
class NothingToSeeHere:
"""
Vacuous class used as a no-op message type.
"""
pass
class Terminated:
"""
Class sent optionally when a node is done processing messages (i.e. when its
upstream nodes have finished.)
"""
def __init__(self, node):
self.node = node
class MetalNode:
"""
The foundational class of `MetalPipe`. This class is inherited by all
nodes in a computation graph.
Order of operations:
1. Child class ``__init__`` function
2. ``MetalNode`` ``__init__`` function
3. ``preflight_function`` (Specified in initialization params)
4. ``setup``
5. start
These methods have the following intended uses:
1. ``__init__`` Sets attribute values and calls the ``MetalNode`` ``__init__``
method.
2. ``get_runtime_attrs`` Sets any attribute values that are to be determined
at runtime, e.g. by checking environment variables or reading values
from a database. The ``get_runtime_attrs`` should return a dictionary
of attributes -> values, or else ``None``.
3. ``setup`` Sets the state of the ``MetalNode`` and/or creates any attributes
that require information available only at runtime.
Args:
send_batch_markers: If ``True``, then a ``BatchStart`` marker will
be sent when a new input is received, and a ``BatchEnd`` will be sent
after the input has been processed. The intention is that a number of
items will be emitted for each input received. For example, we might
emit a table row-by-row for each input.
get_runtime_attrs: A function that returns a dictionary-like object.
The keys and values will be saved to this ``MetalNode`` object's
attributes. The function is executed one time, upon starting the node.
get_runtime_attrs_args: A tuple of arguments to be passed to the
``get_runtime_attrs`` function upon starting the node.
get_runtime_attrs_kwargs: A dictionary of kwargs passed to the
``get_runtime_attrs`` function.
runtime_attrs_destinations: If set, this is a dictionary mapping
the keys returned from the ``get_runtime_attrs`` function to the
names of the attributes to which the values will be saved.
throttle: For each input received, a delay of ``throttle`` seconds
will be added.
keep_alive: If ``True``, keep the node's thread alive after
everything has been processed.
name: The name of the node. Defaults to a randomly generated hash.
Note that this hash is not consistent from one run to the next.
input_mapping: When the node receives a dictionary-like object,
this dictionary will cause the keys of the dictionary to be remapped
to new keys.
retain_input: If ``True``, then combine the dictionary-like input
with the output. If keys clash, the output value will be kept.
input_message_keypath: Read the value in this keypath as the content
of the incoming message.
"""
def __init__(
self,
*args,
batch=False,
get_runtime_attrs=no_op,
get_runtime_attrs_args=None,
get_runtime_attrs_kwargs=None,
runtime_attrs_destinations=None,
input_mapping=None,
retain_input=True,
throttle=0,
keep_alive=True,
max_errors=0,
max_messages_received=None,
name=None,
input_message_keypath=None,
key=None,
messages_received_counter=0,
prefer_existing_value=False,
messages_sent_counter=0,
post_process_function=None,
post_process_keypath=None,
summary="",
fixturize=False,
post_process_function_kwargs=None,
output_key=None,
break_test=None,
send_termination_message=False,
**kwargs
):
self.name = name or uuid.uuid4().hex
self.input_mapping = input_mapping or {}
self.input_queue_list = []
self.output_queue_list = []
self.input_node_list = []
self.queue_event = threading.Event()
self.input_message_keypath = input_message_keypath or []
self.output_node_list = []
self.max_messages_received = max_messages_received
self.global_dict = None # We'll add a dictionary upon startup
self.terminate = False
self.thread_dict = {}
self.kill_thread = False
self.prefer_existing_value = prefer_existing_value
self.accumulator = {}
self.output_key = output_key
self.fixturize = fixturize
self.keep_alive = keep_alive
self.retain_input = (
retain_input # Keep the input dictionary and send it downstream
)
if break_test is not None:
self.break_test = load_function(break_test)
else:
self.break_test = None
self.throttle = throttle
self.get_runtime_attrs = get_runtime_attrs
self.get_runtime_attrs_args = get_runtime_attrs_args or tuple()
self.cleanup_called = False
self.get_runtime_attrs_kwargs = get_runtime_attrs_kwargs or {}
self.runtime_attrs_destinations = runtime_attrs_destinations or {}
self.key = key
self.messages_received_counter = messages_received_counter
self.messages_sent_counter = messages_sent_counter
self.instantiated_at = datetime.datetime.now()
self.started_at = None
self.stopped_at = None
self.error_counter = 0
self.status = "stopped" # running, error, success
self.max_errors = max_errors
self.post_process_function_name = (
post_process_function # Function to be run on result
)
self.post_process_function_kwargs = post_process_function_kwargs or {}
self.summary = summary
self.prometheus_objects = None
self.logjam_score = {"polled": 0.0, "logjam": 0.0}
self.send_termination_message = send_termination_message
# Get post process function if one is named
if self.post_process_function_name is not None:
components = self.post_process_function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
self.post_process_function = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
self.post_process_function = getattr(module, function_name)
else:
self.post_process_function = None
self.post_process_keypath = (
post_process_keypath.split(".")
if post_process_keypath is not None
else None
)
if self.fixturize:
self.fixturizer = RedisFixturizer()
else:
self.fixturizer = None
def setup(self):
"""
For classes that require initialization at runtime, which can't be done
when the class's ``__init__`` function is called. The ``MetalNode`` base
class's setup function is just a logging call.
It should be unusual to have to make use of ``setup`` because in practice,
initialization can be done in the ``__init__`` function.
"""
logging.debug(
"No ``setup`` method for {class_name}.".format(
class_name=self.__class__.__name__
)
)
pass
def __gt__(self, other):
"""
Convenience method so that we can link two nodes by ``node1 > node2``.
This just calls ``add_edge``.
"""
self.add_edge(other)
return other
@property
def is_source(self):
"""
Tests whether the node is a source or not, i.e. whether there are no
inputs to the node.
Returns:
(bool): ``True`` if the node has no inputs, ``False`` otherwise.
"""
return len(self.input_queue_list) == 0
@property
def is_sink(self):
"""
Tests whether the node is a sink or not, i.e. whether there are no
outputs from the node.
Returns:
(bool): ``True`` if the node has no output nodes, ``False`` otherwise.
"""
return len(self.output_queue_list) == 0
def add_edge(self, target, **kwargs):
"""
Create an edge connecting `self` to `target`.
This method instantiates the ``MetalPipeQueue`` object that connects the
nodes. Connecting the nodes together consists in (1) adding the queue to
the other's ``input_queue_list`` or ``output_queue_list`` and (2) setting
the queue's ``source_node`` and ``target_node`` attributes.
Args:
target (``MetalNode``): The node to which ``self`` will be connected.
"""
max_queue_size = kwargs.get("max_queue_size", DEFAULT_MAX_QUEUE_SIZE)
edge_queue = MetalPipeQueue(max_queue_size)
self.output_node_list.append(target)
target.input_node_list.append(self)
edge_queue.source_node = self
edge_queue.target_node = target
target.input_queue_list.append(edge_queue)
self.output_queue_list.append(edge_queue)
def _get_message_content(self, one_item):
# Get the content of a specific keypath, if one has
# been defined in the ``MetalNode`` initialization.
message_content = (
get_value(one_item.message_content, self.input_message_keypath)
if len(self.input_message_keypath) > 0
else one_item.message_content
)
if (
isinstance(message_content, (dict,))
and len(message_content) == 1
and "__value__" in message_content
):
message_content = message_content["__value__"]
return message_content
def wait_for_pipeline_finish(self):
while not self.pipeline_finished:
time.sleep(SHORT_DELAY)
def start(self):
"""
Starts the node. This is called by ``MetalNode.global_start()``.
The node's main loop is contained in this method. The main loop does
the following:
1. records the timestamp to the node's ``started_at`` attribute.
#. calls ``get_runtime_attrs`` (TODO: check if we can deprecate this)
#. calls the ``setup`` method for the class (which is a no-op by default)
#. if the node is a source, then successively yield all the results of
the node's ``generator`` method, then exit.
#. if the node is not a source, then loop over the input queues, getting
the next message. Note that when the message is pulled from the queue,
the ``MetalPipeQueue`` yields it as a dictionary.
#. gets either the content of the entire message if the node has no ``key``
attribute, or the value of ``message[self.key]``.
#. remaps the message content if a ``remapping`` dictionary has been
given in the node's configuration
#. calls the node's ``process_item`` method, yielding back the results.
(Note that a single input message may cause the node to yield zero,
one, or more than one output message.)
#. places the results into each of the node's output queues.
"""
self.started_at = datetime.datetime.now()
logging.debug("Starting node: {node}".format(node=self.__class__.__name__))
# ``get_runtime_attrs`` returns a dict-like object whose keys and
# values are stored as attributes of the ``MetalNode`` object.
if self.get_runtime_attrs is not None:
pre_flight_results = (
self.get_runtime_attrs(
*self.get_runtime_attrs_args, **self.get_runtime_attrs_kwargs
)
or {}
)
if self.runtime_attrs_destinations is not None:
for key, value in pre_flight_results.items():
setattr(self, self.runtime_attrs_destinations[key], value)
elif self.runtime_attrs_destinations is None:
for key, value in pre_flight_results.items():
setattr(self, key, value)
else:
raise Exception(
"There is a ``get_runtime_attrs``, but the "
"``runtime_attrs_destinations`` is neither None nor a "
"dict-like object."
)
# We have to separate the pre-flight function, the setup of the
# class, and any necessary startup functions (such as connecting
# to a database).
self.setup() # Setup function?
if self.is_source and not isinstance(self, (DynamicClassMediator,)):
for output in self.generator():
if self.fixturizer:
self.fixturizer.record_source_node(self, output)
yield output, None
for output in self._cleanup():
yield output, None
else:
logging.debug(
"About to enter loop for reading input queue in {node}.".format(
node=str(self)
)
)
# insert conditions for having no more messages to read...
upstream_nodes_finished = all(
input_node.cleanup_called for input_node in self.input_node_list
)
input_queues_empty = self.is_source or self.input_queues_empty()
while not (upstream_nodes_finished and input_queues_empty):
for input_queue in self.input_queue_list:
one_item = input_queue.get()
####
if self.terminate:
# self.finished = True
break
if one_item is None:
continue
# Keep track of where the message came from, useful for
# managing streaming joins, e.g.
message_source = input_queue.source_node
self.messages_received_counter += 1
if (
self.max_messages_received is not None
and self.messages_received_counter > self.max_messages_received
):
self.finished = True
break
# The ``throttle`` keyword introduces a delay in seconds
time.sleep(self.throttle)
# Retrieve the ``message_content``
message_content = self._get_message_content(one_item)
# If we receive ``None`` or a ``NothingToSeeHere``, continue.
if message_content is None or isinstance(
message_content, (NothingToSeeHere,)
):
continue
# Record the message and its source in the node's attributes
self.message = message_content
self.message_source = message_source
# Otherwise, process the message as usual, by calling
# the ``MetalNode`` object's ``process_item`` method.
for output in self._process_item():
# Put redis recording here
if self.fixturizer:
self.fixturizer.record_worker_node(self, one_item, output)
yield output, one_item # yield previous message
### Do the self.break_test() if it's been defined
### Execute the function and break
### if it returns True
if self.break_test is not None and not self.finished:
self.log_info("running break_test.")
break_test_result = self.break_test(
output_message=output, input_message=self.__message__,
)
self.log_info("NODE BREAK TEST: " + str(break_test_result))
# self.finished = break_test_result
# Check input node(s) here to see if they're all ``.finished``
upstream_nodes_finished = all(
input_node.cleanup_called for input_node in self.input_node_list
)
input_queues_empty = self.is_source or self.input_queues_empty()
self.log_info("checking whether cleanup is a generator. " + str(self.name))
for i in self._cleanup():
yield i, None
@property
def upstream_nodes_finished(self):
return all(input_node.cleanup_called for input_node in self.input_node_list)
@property
def finished(self):
"""
A node is considered "finished" if:
1. All of its immediate parents are "finished" (including if the node
is a generator and has no parents);
2. All of its input queues are empty;
3. It is not processing any messages;
4. Its ``cleanup`` method (if any) has been called.
Alternatively, a node is forced to be in a "finished" state if the
pipeline is being terminated. This causes each node's ``terminate``
attribute to be set to ``True``.
"""
input_queues_empty = self.is_source or self.input_queues_empty()
return (
self.upstream_nodes_finished and input_queues_empty and self.cleanup_called
) or self.terminate
def input_queues_empty(self):
"""
Tests whether there are any messages on any of the node's input
queues.
Returns:
bool: ``True`` if input queues are all empty.
"""
return all(queue.empty for queue in self.input_queue_list)
def cleanup(self):
"""
If there is any cleanup (closing files, shutting down database connections),
necessary when the node is stopped, then the node's class should provide
a ``cleanup`` method. By default, the method is just a logging statement.
"""
self.log_info("in null cleanup")
yield NothingToSeeHere()
def _cleanup(self):
self.log_info("Cleanup called after shutdown.")
for i in self.cleanup():
yield i
# Send termination message here
if self.send_termination_message:
yield Terminated(self)
for q in self.output_queue_list:
while not q.empty:
pass
self.log_info("setting cleanup_called to True")
self.cleanup_called = True
def log_info(self, message=""):
logging.info(
"{node_name}: {message}".format(node_name=self.name, message=message)
)
def terminate_pipeline(self, error=False):
"""
This method can be called on any node in a pipeline, and it will cause
all of the nodes to terminate if they haven't stopped already.
Args:
error (bool): Not yet implemented.
"""
self.log_info("terminate_pipeline called..." + str(self.name))
for node in self.all_connected():
node.terminate = True
for q in node.output_queue_list:
q.drain()
# if not node.finished:
# node.stopped_at = datetime.datetime.now()
# print('setting node.terminate')
# node.terminate = True
def process_item(self, *args, **kwargs):
"""
Default no-op for nodes.
"""
pass
def generator(self):
"""
If there is no ``generator`` method, then call the node's ``process_item``
method instead, assuming that there is code to accommodate this case.
"""
for i in self.process_item():
yield i
@property
def __message__(self):
"""
If the node has an ``output_key`` defined, return the corresponding
value in the message dictionary. If it does not, return the entire
message dictionary.
Nodes should access the content of their incoming message via this
property.
"""
if self.key is None:
out = self.message
elif isinstance(self.key, (str,)):
out = self.message[self.key]
elif isinstance(self.key, (list,)):
out = get_value(self.message, self.key)
else:
raise Exception("Bad type for input key.")
return out
def _process_item(self, *args, **kwargs):
"""
This method wraps the node's ``process_item`` method. It provides a place
to insert code for logging, error handling, etc.
There's lots of experimental code here, particularly the code for
Prometheus monitoring.
"""
# Swap out the message if ``key`` is specified
# If we're using prometheus, then increment a counter
if self.prometheus_objects is not None:
self.prometheus_objects["incoming_message_summary"].observe(random.random())
message_arrival_time = time.time()
try:
for out in self.process_item(*args, **kwargs):
if (
not isinstance(out, (dict, NothingToSeeHere))
and self.output_key is None
):
logging.debug("Exception raised due to no key" + str(self.name))
raise Exception(
"Either message must be a dictionary or `output_key` "
"must be specified. {name}".format(self.name)
)
# Apply post_process_function if it's defined
if self.post_process_function is not None:
set_value(
out,
self.post_process_keypath,
self.post_process_function(
get_value(out, self.post_process_keypath),
**self.post_process_function_kwargs
),
)
if self.prometheus_objects is not None:
self.prometheus_objects["outgoing_message_summary"].set(
time.time() - message_arrival_time
)
yield out
except Exception as err:
self.error_counter += 1
logging.error(
"message: "
+ str(err.args)
+ str(self.__class__.__name__)
+ str(self.name)
)
if self.error_counter > self.max_errors:
self.terminate_pipeline(error=True)
self.status = "error" #
else:
logging.warning("oops")
def stream(self):
"""
Called in each ``MetalNode`` thread.
"""
self.status = "running"
if getattr(self, "_import_pydatalog", False):
from pyDatalog import pyDatalog, Logic
Logic(self.logic_engine)
try:
for output, previous_message in self.start():
logging.debug("In MetalNode.stream.stream() --> " + str(output))
for output_queue in self.output_queue_list:
self.messages_sent_counter += 1
output_queue.put(
output,
block=True,
timeout=None,
queue_event=self.queue_event,
previous_message=previous_message,
)
# if 1 or not isinstance(output, (NothingToSeeHere,)) and output is not None:
except Exception as error:
self.status = "error"
self.stopped_at = datetime.datetime.now()
raise error
self.status = "success"
self.stopped_at = datetime.datetime.now()
@property
def time_running(self):
"""
Return the number of wall-clock seconds elapsed since the node was
started.
"""
if self.status == "stopped":
return None
elif self.status == "running":
return datetime.datetime.now() - self.started_at
elif self.stopped_at is None:
return datetime.datetime.now() - self.started_at
else:
return self.stopped_at - self.started_at
def all_connected(self, seen=None):
"""
Returns all the nodes connected (directly or indirectly) to ``self``.
This allows us to loop over all the nodes in a pipeline even if we
have a handle on only one. This is used by ``global_start``, for
example.
Args:
seen (set): A set of all the nodes that have been identified as
connected to ``self``.
Returns:
(set of ``MetalNode``): All the nodes connected to ``self``. This
includes ``self``.
"""
seen = seen or set()
if isinstance(self, (DynamicClassMediator,)):
for node_name, node_dict in self.node_dict.items():
node_obj = node_dict["obj"]
seen = seen | node_obj.all_connected(seen=seen)
else:
if self not in seen:
seen.add(self)
for node in self.input_node_list + self.output_node_list:
if node in seen:
continue
seen.add(node)
seen = seen | node.all_connected(seen=seen)
return seen
def broadcast(self, broadcast_message):
"""
Puts the message into all the input queues for all connected nodes.
"""
for node in self.all_connected():
for input_queue in node.input_queue_list:
input_queue.put(broadcast_message)
@property
def logjam(self):
"""
Returns the logjam score, which measures the degree to which the
node is holding up progress in downstream nodes.
We're defining a logjam as a node whose input queue is full, but
whose output queue(s) is not. More specifically, we poll each node
in the ``monitor_thread``, and increment a counter if the node is
a logjam at that time. This property returns the percentage of
samples in which the node is a logjam. Our intention is that if
this score exceeds a threshold, the user is alerted, or the load
is rebalanced somehow (not yet implemented).
Returns:
(float): Logjam score
"""
if self.logjam_score["polled"] == 0:
return 0.0
else:
return self.logjam_score["logjam"] / self.logjam_score["polled"]
def global_start(
self, prometheus=False, pipeline_name=None, max_time=None, fixturize=False,
):
"""
Starts every node connected to ``self``. Mainly, it:
1. calls ``start()`` on each node
#. sets some global variables
#. optionally starts some experimental code for monitoring
"""
def prometheus_init():
"""
Experimental code for enabling Prometheus monitoring.
"""
from prometheus_client import (
start_http_server,
Summary,
Gauge,
Histogram,
Counter,
)
for node in self.all_connected():
node.prometheus_objects = {}
summary = Summary(
node.name + "_incoming", "Summary of incoming messages"
)
node.prometheus_objects["incoming_message_summary"] = summary
node.prometheus_objects["outgoing_message_summary"] = Gauge(
node.name + "_outgoing", "Summary of outgoing messages"
)
start_http_server(8000)
if PROMETHEUS:
prometheus_init()
# thread_dict = self.thread_dict
global_dict = {}
run_id = uuid.uuid4().hex
for node in self.all_connected():
# Set the pipeline name on the attribute of each node
node.pipeline_name = pipeline_name or uuid.uuid4().hex
# Set a unique run_id
node.run_id = run_id
node.fixturize = fixturize
node.global_dict = global_dict # Establishing shared globals
logging.debug("global_start:" + str(self))
# Create thread event here?
thread = threading.Thread(
target=MetalNode.stream, args=(node,), daemon=False
)
thread.start()
node.thread_dict = self.thread_dict
self.thread_dict[node.name] = thread
node.status = "running"
monitor_thread = threading.Thread(
target=MetalNode.thread_monitor,
args=(self,),
kwargs={"max_time": max_time},
daemon=True,
)
monitor_thread.start()
@property
def input_queue_size(self):
"""
Return the total number of items in all of the queues that are inputs
to this node.
"""
return sum([input_queue.queue.qsize() for input_queue in self.input_queue_list])
def kill_pipeline(self):
for node in self.all_connected():
node.finished = True
def draw_pipeline(self):
"""
Draw the pipeline structure using graphviz.
"""
dot = graphviz.Digraph()
for node in self.all_connected():
dot.node(node.name, node.name, shape="box")
for node in self.all_connected():
for target_node in node.output_node_list:
dot.edge(node.name, target_node.name)
dot.render("pipeline_drawing.gv", view=True)
@property
def pipeline_finished(self):
finished = all(node.cleanup_called for node in self.all_connected())
self.log_info("finished. " + str(self.name))
return finished
def thread_monitor(self, max_time=None):
"""
This function loops over all of the threads in the pipeline, checking
that they are either ``finished`` or ``running``. If any have had an
abnormal exit, terminate the entire pipeline.
"""
counter = 0
error = False
time_started = time.time()
while not self.pipeline_finished:
logging.debug("MONITOR THREAD")
time.sleep(MONITOR_INTERVAL)
counter += 1
if max_time is not None:
print("checking max_time...")
if time.time() - time_started >= max_time:
self.pipeline_finished = True
print("finished because of max_time")
for node in self.all_connected():
node.finished = True
continue
# Check whether all the workers have ``.finished``
# self.pipeline_finished = all(
# node.finished for node in self.all_connected())
if counter % STATS_COUNTER_MODULO == 0:
table = prettytable.PrettyTable(
["Node", "Class", "Received", "Sent", "Queued", "Status", "Time",]
)
for node in sorted(list(self.all_connected()), key=lambda x: x.name):
if node.status == "running":
status_color = bcolors.WARNING
elif node.status == "stopped":
status_color = ""
elif node.status == "error":
status_color = bcolors.FAIL
error = True
elif node.status == "success":
status_color = bcolors.OKGREEN
else:
assert False
if node.logjam >= LOGJAM_THRESHOLD:
logjam_color = bcolors.FAIL
else:
logjam_color = ""
table.add_row(
[
logjam_color + node.name + bcolors.ENDC,
node.__class__.__name__,
node.messages_received_counter,
node.messages_sent_counter,
node.input_queue_size,
status_color + node.status + bcolors.ENDC,
node.time_running,
]
)
self.log_info("\n" + str(table))
if error:
logging.error("Terminating due to error.")
self.terminate_pipeline(error=True)
# self.pipeline_finished = True
break
# Check for blocked nodes
for node in self.all_connected():
input_queue_full = [
input_queue.approximately_full()
for input_queue in node.input_queue_list
]
output_queue_full = [
output_queue.approximately_full()
for output_queue in node.output_queue_list
]
logjam = (
not node.is_source
and all(input_queue_full)
and not any(output_queue_full)
)
node.logjam_score["polled"] += 1
logging.debug("LOGJAM SCORE: {logjam}".format(logjam=str(node.logjam)))
if logjam:
node.logjam_score["logjam"] += 1
logging.debug(
"LOGJAM {logjam} {name}".format(logjam=logjam, name=node.name)
)
self.log_info("Pipeline finished.")
self.log_info("Sending terminate signal to nodes.")
self.log_info("Messages that are being processed will complete.")
# HERE
if error:
self.log_info("Abnormal exit")
sys.exit(1)
else:
self.log_info("Normal exit.")
sys.exit(0)
class CounterOfThings(MetalNode):
def bar__init__(self, *args, start=0, end=None, **kwargs):
self.start = start
self.end = end
super(CounterOfThings, self).__init__(*args, **kwargs)
def generator(self):
"""
Just start counting integers
"""
counter = 1
while 1:
yield counter
counter += 1
if counter > 10:
assert False
class FunctionOfMessage(MetalNode):
def __init__(self, function_name, *args, **kwargs):
self.function_name = function_name
components = self.function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
function_obj = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
function = getattr(module, function_name)
self.function = function
super(FunctionOfMessage, self).__init__(*args, **kwargs)
def process_item(self):
yield self.function(self.__message__)
class MockNode(MetalNode):
"""
This is only intended for doing unit tests, etc.
"""
def __init__(self, **kwargs):
self.message_holder = None
self.message_counter = 0
self.message_list = []
super(MockNode, self).__init__(**kwargs)
def process_item(self):
self.message_holder = self.__message__
self.message_list.append(self.__message__)
self.message_counter += 1
yield NothingToSeeHere()
class InsertData(MetalNode):
def __init__(
self, overwrite=True, overwrite_if_null=True, value_dict=None, **kwargs
):
self.overwrite = overwrite
self.overwrite_if_null = overwrite_if_null
self.value_dict = value_dict or {}
super(InsertData, self).__init__(**kwargs)
def process_item(self):
logging.debug("INSERT DATA: " + str(self.__message__))
for key, value in self.value_dict.items():
if (
(key not in self.__message__)
or self.overwrite
or (self.__message__.get(key) == None and self.overwrite_if_null)
):
self.__message__[key] = value
yield self.__message__
class RandomSample(MetalNode):
"""
Lets through only a random sample of incoming messages. Might be useful
for testing, or when only approximate results are necessary.
"""
def __init__(self, sample=0.1):
self.sample = sample
def process_item(self):
yield self.message if random.random() <= self.sample else None
class SubstituteRegex(MetalNode):
def __init__(self, match_regex=None, substitute_string=None, *args, **kwargs):
self.match_regex = match_regex
self.substitute_string = substitute_string
self.regex_obj = re.compile(self.match_regex)
super(SubstituteRegex, self).__init__(*args, **kwargs)
def process_item(self):
out = self.regex_obj.sub(self.substitute_string, self.message[self.key])
yield out
class CSVToDictionaryList(MetalNode):
def __init__(self, **kwargs):
super(CSVToDictionaryList, self).__init__(**kwargs)
def process_item(self):
csv_file_obj = io.StringIO(self.__message__)
csv_reader = csv.DictReader(csv_file_obj)
output = [row for row in csv_reader]
yield output
class SequenceEmitter(MetalNode):
"""
Emits ``sequence`` ``max_sequences`` times, or forever if
``max_sequences`` is ``None``.
"""
def __init__(self, sequence, *args, max_sequences=1, **kwargs):
self.sequence = sequence
self.max_sequences = max_sequences
super(SequenceEmitter, self).__init__(*args, **kwargs)
def generator(self):
"""
Emit the sequence ``max_sequences`` times.
"""
type_dict = {
"int": int,
"integer": int,
"str": str,
"string": str,
"float": float,
"bool": to_bool,
}
counter = 0
while counter < self.max_sequences:
for item in self.sequence:
if isinstance(item, (dict,)) and "value" in item and "type" in item:
item = type_dict[item["type"].lower()](item["value"])
item = {self.output_key: item}
yield item
counter += 1
def process_item(self):
"""
Emit the sequence ``max_sequences`` times.
"""
type_dict = {
"int": int,
"integer": int,
"str": str,
"string": str,
"float": float,
"bool": to_bool,
}
counter = 0
while counter < self.max_sequences:
for item in self.sequence:
if isinstance(item, (dict,)) and "value" in item and "type" in item:
item = type_dict[item["type"].lower()](item["value"])
item = {self.output_key: item}
yield item
counter += 1
class GetEnvironmentVariables(MetalNode):
"""
This node reads environment variables and stores them in the message.
The required keyword argument for this node is ``environment_variables``,
which is a list of -- you guessed it! -- environment variables. By
default, they will be read and stored in the outgoing message under
keys with the same names as the environment variables. E.g. ``FOO_VAR``
will be stored in the message ``{"FOO_BAR": whatever}``.
Optionally, you can provide a dictionary to the ``mappings`` keyword
argument, which maps environment variable names to new names. E.g.
if ``mappings = {"FOO_VAR": "bar_var"}``, then the value of ``FOO_VAR``
will be stored in the message ``{"bar_var": whatever}``.
If the environment variable is not defined, then its value will be
set to ``None``.
Args:
mappings (dict): An optional dictionary mapping environment variable
names to new names.
environment_variables (list): A list of environment variable names.
"""
def __init__(self, mappings=None, environment_variables=None, **kwargs):
self.environment_mappings = mappings or {}
self.environment_variables = environment_variables or []
super(GetEnvironmentVariables, self).__init__(**kwargs)
def generator(self):
environment = {
self.environment_mappings.get(
environment_variable, environment_variable
): os.environ.get(environment_variable, None)
for environment_variable in self.environment_variables
}
yield environment
def process_item(self):
environment = {
self.environment_mappings.get(
environment_variable, environment_variable
): os.environ.get(environment_variable, None)
for environment_variable in self.environment_variables
}
yield environment
class SimpleTransforms(MetalNode):
def __init__(
self,
missing_keypath_action="ignore",
starting_path=None,
transform_mapping=None,
target_value=None,
keypath=None,
**kwargs
):
self.missing_keypath_action = missing_keypath_action
self.transform_mapping = transform_mapping or []
self.functions_dict = {}
self.starting_path = starting_path
for transform in self.transform_mapping:
# Not doing the transforms; only loading the right functions here
function_name = transform.get("target_function", None)
full_function_name = function_name
if function_name is not None:
components = function_name.split("__")
if len(components) == 1:
module = None
function_name = components[0]
function_obj = globals()[function_name]
else:
module = ".".join(components[:-1])
function_name = components[-1]
module = importlib.import_module(module)
function = getattr(module, function_name)
self.functions_dict[full_function_name] = function
super(SimpleTransforms, self).__init__(**kwargs)
def process_item(self):
logging.debug("TRANSFORM " + str(self.name))
logging.debug(self.name + " " + str(self.message))
for transform in self.transform_mapping:
path = transform["path"]
target_value = transform.get("target_value", None)
function_name = transform.get("target_function", None)
starting_path = transform.get("starting_path", None)
if function_name is not None:
function = self.functions_dict[function_name]
else:
function = None
function_kwargs = transform.get("function_kwargs", None)
function_args = transform.get("function_args", None)
logging.debug(self.name + " calling replace_by_path:")
replace_by_path(
self.message,
tuple(path),
target_value=target_value,
function=function,
function_args=function_args,
starting_path=starting_path,
function_kwargs=function_kwargs,
)
logging.debug("after SimpleTransform: " + self.name + str(self.message))
yield self.message
class Serializer(MetalNode):
"""
Takes an iterable thing as input, and successively yields its items.
"""
def __init__(self, values=False, *args, **kwargs):
self.values = values
super(Serializer, self).__init__(**kwargs)
def process_item(self):
if self.__message__ is None:
yield None
elif self.values:
for item in self.__message__.values():
yield item
else:
for item in self.__message__:
logging.debug(self.name + " " + str(item))
yield item
class AggregateValues(MetalNode):
"""
Does that.
"""
def __init__(self, values=False, tail_path=None, **kwargs):
self.tail_path = tail_path
self.values = values
super(AggregateValues, self).__init__(**kwargs)
def process_item(self):
values = aggregate_values(self.__message__, self.tail_path, values=self.values)
logging.debug("aggregate_values " + self.name + " " + str(values))
yield values
class Filter(MetalNode):
"""
Applies tests to each message and filters out messages that don't pass
Built-in tests:
key_exists
value_is_true
value_is_not_none
Example:
{'test': 'key_exists',
'key': mykey}
"""
def __init__(self, test=None, test_keypath=None, value=True, *args, **kwargs):
self.test = test
self.value = value
self.test_keypath = test_keypath or []
super(Filter, self).__init__(*args, **kwargs)
@staticmethod
def _key_exists(message, key):
return key in message
@staticmethod
def _value_is_not_none(message, key):
logging.debug(
"value_is_not_none: {message} {key}".format(message=str(message), key=key)
)
return get_value(message, key) is not None
@staticmethod
def _value_is_true(message, key):
return to_bool(message.get(key, False))
def process_item(self):
if self.test in ["key_exists", "value_is_not_none", "value_is_true"]:
result = (
getattr(self, "_" + self.test)(self.__message__, self.test_keypath)
== self.value
)
else:
raise Exception("Unknown test: {test_name}".format(test_name=test))
if result:
logging.debug("Sending message through")
yield self.message
else:
logging.debug("Blocking message: " + str(self.__message__))
yield NothingToSeeHere()
class StreamMySQLTable(MetalNode):
def __init__(
self,
*args,
host="localhost",
user=None,
table=None,
password=None,
database=None,
port=3306,
to_row_obj=False,
send_batch_markers=False,
**kwargs
):
self.host = host
self.user = user
self.to_row_obj = to_row_obj
self.password = password
self.database = database
self.port = port
self.table = table
self.send_batch_markers = send_batch_markers
super(StreamMySQLTable, self).__init__(**kwargs)
def setup(self):
self.db = MySQLdb.connect(
passwd=self.password, db=self.database, user=self.user, port=self.port,
)
self.cursor = MySQLdb.cursors.DictCursor(self.db)
self.table_schema_query = (
"""SELECT column_name, column_type """
"""FROM information_schema.columns """
"""WHERE table_name='{table}';""".format(table=self.table)
)
print(self.table_schema_query)
# self.table_schema = self.get_schema()
# Need a mapping from header to MYSQL TYPE
# for mapping in self.table_schema:
# column = mapping["column_name"]
# type_string = mapping["column_type"]
# this_type = ds.MySQLTypeSystem.type_mapping(type_string)
# Unfinished experimental code
# Start here:
# store the type_mapping
# use it to cast the data into the MySQLTypeSchema
# ensure that the generator is emitting MySQLTypeSchema objects
# def get_schema(self):
# self.cursor.execute(self.table_schema_query)
# table_schema = self.cursor.fetchall()
# return table_schema
def generator(self):
if self.send_batch_markers:
yield BatchStart(schema=self.table_schema)
self.cursor.execute("""SELECT * FROM {table};""".format(table=self.table))
result = self.cursor.fetchone()
while result is not None:
yield result
result = self.cursor.fetchone()
if self.send_batch_markers:
yield BatchEnd()
class PrinterOfThings(MetalNode):
@set_kwarg_attributes()
def __init__(self, disable=False, pretty=False, prepend="printer: ", **kwargs):
self.disable = disable
self.pretty = pretty
super(PrinterOfThings, self).__init__(**kwargs)
logging.debug("Initialized printer...")
def process_item(self):
if not self.disable:
print(self.prepend)
if self.pretty:
pprint.pprint(self.__message__, indent=2)
else:
print(str(self.__message__))
print("\n")
print("------------")
yield self.message
class ConstantEmitter(MetalNode):
"""
Send a thing every n seconds
"""
def __init__(self, thing=None, max_loops=5, delay=0.5, **kwargs):
self.thing = thing
self.delay = delay
self.max_loops = max_loops
super(ConstantEmitter, self).__init__(**kwargs)
def generator(self):
counter = 0
while counter < self.max_loops:
if random.random() < -0.1:
assert False
time.sleep(self.delay)
yield self.thing
counter += 1
class TimeWindowAccumulator(MetalNode):
"""
Every N seconds, put the latest M seconds data on the queue.
"""
@set_kwarg_attributes()
def __init__(self, time_window=None, send_interval=None, **kwargs):
pass
class LocalFileReader(MetalNode):
@set_kwarg_attributes()
def __init__(
self,
directory=".",
send_batch_markers=True,
serialize=False,
read_mode="r",
filename=None,
**kwargs
):
super(LocalFileReader, self).__init__(**kwargs)
def process_item(self):
filename = "/".join([self.directory, self.filename or self.__message__])
with open(filename, self.read_mode) as file_obj:
if self.serialize:
for line in file_obj:
output = line
yield output
else:
output = file_obj.read()
yield output
class CSVReader(MetalNode):
@set_kwarg_attributes()
def __init__(self, **kwargs):
super(CSVReader, self).__init__(**kwargs)
def process_item(self):
file_obj = io.StringIO(self.__message__)
reader = csv.DictReader(file_obj)
for row in reader:
yield row
class LocalDirectoryWatchdog(MetalNode):
def __init__(self, directory=".", check_interval=3, **kwargs):
self.directory = directory
self.latest_arrival = time.time()
self.check_interval = check_interval
super(LocalDirectoryWatchdog, self).__init__(**kwargs)
def generator(self):
while self.keep_alive:
logging.debug("sleeping...")
time.sleep(self.check_interval)
time_in_interval = None
for filename in os.listdir(self.directory):
last_modified_time = os.path.getmtime(
"/".join([self.directory, filename])
)
if last_modified_time > self.latest_arrival:
yield "/".join([self.directory, filename])
if (
time_in_interval is None
or last_modified_time > time_in_interval
):
time_in_interval = last_modified_time
logging.debug("time_in_interval: " + str(time_in_interval))
if time_in_interval is not None:
self.latest_arrival = time_in_interval
class StreamingJoin(MetalNode):
"""
Joins two streams on a key, using exact match only. MVP.
"""
def __init__(self, window=30, streams=None, *args, **kwargs):
self.window = window
self.streams = streams
self.stream_paths = streams
self.buffers = {
stream_name: TimedDict(timeout=self.window)
for stream_name in self.stream_paths.keys()
}
super(StreamingJoin, self).__init__(*args, **kwargs)
def process_item(self):
"""
"""
value_to_match = get_value(
self.message, self.stream_paths[self.message_source.name]
)
# Check for matches in all other streams.
# If complete set of matches, yield the merged result
# If not, add it to the `TimedDict`.
yield ("hi")
class DynamicClassMediator(MetalNode):
def __init__(self, *args, **kwargs):
super(DynamicClassMediator, self).__init__(**kwargs)
for node_name, node_dict in self.node_dict.items():
cls_obj = node_dict["cls_obj"]
node_obj = cls_obj(**kwargs)
node_dict["obj"] = node_obj
for edge in self.raw_config["edges"]:
source_node_obj = self.node_dict[edge["from"]]["obj"]
target_node_obj = self.node_dict[edge["to"]]["obj"]
source_node_obj > target_node_obj
def bind_methods():
for attr_name in dir(DynamicClassMediator):
if attr_name.startswith("_"):
continue
attr_obj = getattr(DynamicClassMediator, attr_name)
if not isinstance(attr_obj, types.FunctionType):
continue
setattr(self, attr_name, types.MethodType(attr_obj, self))
bind_methods()
source = self.get_source()
self.input_queue_list = source.input_queue_list
sink = self.get_sink()
self.output_queue_list = sink.output_queue_list
self.output_node_list = sink.output_node_list
self.input_node_list = source.input_node_list
def get_sink(self):
sinks = self.sink_list()
if len(sinks) > 1:
raise Exception("`DynamicClassMediator` may have no more than one sink.")
elif len(sinks) == 0:
return None
return sinks[0]
def get_source(self):
sources = self.source_list()
if len(sources) > 1:
raise Exception("`DynamicClassMediator` may have no more than one source.")
elif len(sources) == 0:
return None
return sources[0]
def sink_list(self):
sink_nodes = []
for node_name, node_dict in self.node_dict.items():
node_obj = node_dict["obj"]
if len(node_obj.output_queue_list) == 0:
sink_nodes.append(node_obj)
return sink_nodes
def source_list(self):
source_nodes = [
node_dict["obj"]
for node_dict in self.node_dict.values()
if node_dict["obj"].is_source
]
return source_nodes
def hi(self):
return "hi"
def get_node_dict(node_config):
node_dict = {}
for node_config in node_config["nodes"]:
node_class = globals()[node_config["class"]]
node_name = node_config["name"]
node_dict[node_name] = {}
node_dict[node_name]["class"] = node_class
frozen_arguments = node_config.get("frozen_arguments", {})
node_dict[node_name]["frozen_arguments"] = frozen_arguments
node_obj = node_class(**frozen_arguments)
node_dict[node_name]["remapping"] = node_config.get("arg_mapping", {})
return node_dict
def kwarg_remapper(f, **kwarg_mapping):
reverse_mapping = {value: key for key, value in kwarg_mapping.items()}
logging.debug("kwarg_mapping:" + str(kwarg_mapping))
parameters = [i for i, _ in list(inspect.signature(f).parameters.items())]
for kwarg in parameters:
if kwarg not in kwarg_mapping:
reverse_mapping[kwarg] = kwarg
def remapped_function(*args, **kwargs):
remapped_kwargs = {}
for key, value in kwargs.items():
if key in reverse_mapping:
remapped_kwargs[reverse_mapping[key]] = value
logging.debug("renamed function with kwargs: " + str(remapped_kwargs))
return f(*args, **remapped_kwargs)
return remapped_function
def template_class(
class_name, parent_class, kwargs_remapping, frozen_arguments_mapping
):
kwargs_remapping = kwargs_remapping or {}
frozen_init = functools.partial(parent_class.__init__, **frozen_arguments_mapping)
if isinstance(parent_class, (str,)):
parent_class = globals()[parent_class]
cls = type(class_name, (parent_class,), {})
setattr(cls, "__init__", kwarg_remapper(frozen_init, **kwargs_remapping))
return cls
def class_factory(raw_config):
new_class = type(raw_config["name"], (DynamicClassMediator,), {})
new_class.node_dict = get_node_dict(raw_config)
new_class.class_name = raw_config["name"]
new_class.edge_list_dict = raw_config.get("edges", [])
new_class.raw_config = raw_config
for node_name, node_config in new_class.node_dict.items():
_class = node_config["class"]
cls = template_class(
node_name,
_class,
node_config["remapping"],
node_config["frozen_arguments"],
)
setattr(cls, "raw_config", raw_config)
node_config["cls_obj"] = cls
# Inject?
globals()[new_class.__name__] = new_class
return new_class
class Remapper(MetalNode):
def __init__(self, mapping=None, **kwargs):
self.remapping_dict = mapping or {}
super(Remapper, self).__init__(**kwargs)
def process_item(self):
logging.debug("Remapper {node}:".format(node=self.name) + str(self.__message__))
out = remap_dictionary(self.__message__, self.remapping_dict)
yield out
class BlackHole(MetalNode):
"""
Accepts any incoming message and promptly ignores it. Returns ``NothingToSeeHere``.
"""
def __init__(self, **kwargs):
super(BlackHole, self).__init__(**kwargs)
def process_item(self):
logging.debug(
"BlackHole {node}:".format(node=self.name) + str(self.__message__)
)
yield NothingToSeeHere()
class Blocker(BlackHole):
"""
Class that ignores all messages, but sends a message when all of its upstream
nodes have finished.
"""
def __init__(self, **kwargs):
kwargs.update({"send_termination_message": True})
super(Blocker, self).__init__(**kwargs)
class BatchMessages(MetalNode):
def __init__(
self, batch_size=None, batch_list=None, counter=0, timeout=5, **kwargs
):
self.batch_size = batch_size
self.timeout = timeout
self.counter = 0
self.batch_list = batch_list or []
super(BatchMessages, self).__init__(**kwargs)
def process_item(self):
self.counter += 1
self.batch_list.append(self.__message__)
logging.debug(self.name + " " + str(self.__message__))
out = NothingToSeeHere()
if self.counter % self.batch_size == 0:
out = self.batch_list
logging.debug("BatchMessages: " + str(out))
self.batch_list = []
yield out
def cleanup(self):
self.log_info(self.name + " in cleanup, sending remainder of batch...")
yield self.batch_list
if __name__ == "__main__":
pass
| 35.185333 | 97 | 0.587194 | 57,875 | 0.935112 | 19,232 | 0.31074 | 5,159 | 0.083356 | 0 | 0 | 18,294 | 0.295584 |
d0ac701e358934362e43d3495ffc036dc298f1b4 | 184 | py | Python | Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
]
| null | null | null | Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
]
| null | null | null | Curso_de_Python_ Curso_em_Video/PythonTeste/condicoesEx001.py | DanilooSilva/Cursos_de_Python | 8f167a4c6e16f01601e23b6f107578aa1454472d | [
"MIT"
]
| null | null | null | import random
numeroPc = random.randint(1, 5)
numeroUsuario = int(input('Digite um número: '))
print('Parabéns vc acertou!' if numeroPc == numeroUsuario else 'O Computador venceu')
| 23 | 85 | 0.73913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 65 | 0.349462 |
d0ae7da86f116336cc253188f9639ca495e26131 | 5,739 | py | Python | src/get_device_id.py | icemanzz/SPS_NM_Scripts | f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9 | [
"Apache-2.0"
]
| null | null | null | src/get_device_id.py | icemanzz/SPS_NM_Scripts | f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9 | [
"Apache-2.0"
]
| null | null | null | src/get_device_id.py | icemanzz/SPS_NM_Scripts | f7b9e05e441d35715ee98bf1e2a73765a3a8d7c9 | [
"Apache-2.0"
]
| null | null | null | import pyipmi
import pyipmi.interfaces
import os
import re
import datetime
import os.path
import time
import math
import numpy
import mmap
import array
import getopt
import sys
#Inmport path
sys.path.append('../src')
from aardvark_initial import *
#Inmport path
sys.path.append('../')
from os_parameters_define import *
from utility_function import *
from nm_ipmi_raw_to_str import *
from error_messages_define import *
from nm_functions import *
from config import *
def aardvark_ipmi_init(target_addr, channel):
## Test pyIPMI
opts, args = getopt.getopt(sys.argv[1:], 't:hvVI:H:U:P:o:b:')
interface_name = 'aardvark'
name = 'pullups'
value = 'off'
aardvark_pullups = False
aardvark_serial = None
aardvark_target_power = False
target_address = target_addr
target_routing = [(target_addr ,channel)]
###
interface = pyipmi.interfaces.create_interface(interface_name, serial_number=aardvark_serial)
ipmi = pyipmi.create_connection(interface)
ipmi.target = pyipmi.Target(target_address)
#ipmi.target.set_routing(target_routing)
return ipmi
def get_device_id_py(ipmi):
netfn, get_did_raw = get_did_raw_to_str_py()
# Send Get DID to ME
rsp = send_ipmb_aardvark(ipmi , netfn , get_did_raw )
# Check if rsp data correct
sts = ipmi_resp_analyst_py( ord(rsp[0]), App )
if(sts != SUCCESSFUL ):
return ERROR
# Analyst get did resp data format
# Get Major Version = resp byte4 bits[6:0]
DEBUG('Get Major Version:')
marjor_version = get_bits_data_py( ord(rsp[3]) , 0 , 7)
DEBUG('get_me_device_id : Marjor_version = %d' %marjor_version)
# Get Device Available bit : byte4 bit[7] = 1 = device in boot loader mode. = 0 = normal operation mode.
available_bit = get_bits_data_py( ord(rsp[3]) , 7 , 1)
DEBUG('get_me_device_id : Available Bit = %d' % available_bit)
# Get Minor version (byte5 bit[7:4]) and Milestone version (byte5 bits[3:0])number
milestone_version = get_bits_data_py( ord(rsp[4]) , 0 , 4)
DEBUG('get_me_device_id : milestone_version = %d' %milestone_version)
minor_version = get_bits_data_py( ord(rsp[4]) , 4 , 4)
DEBUG('get_me_device_id : minor_version = %d' %minor_version)
# Get Build version number byte14=A.B and byte15 bit[7:4] =C, build version = 100A+10B+C
build_version_AB = get_bits_data_py( ord(rsp[13]) , 0 , 8)
DEBUG('get_me_device_id : build_version_AB = %d' %build_version_AB)
build_version_C = get_bits_data_py( ord(rsp[14]) , 4 , 4)
DEBUG('get_me_device_id : build_version_C Bit = %d' %build_version_C)
sps_version = format(marjor_version, '02x') + '.' + format(minor_version, '02x') + '.' + format(milestone_version, '02x') + '.' + format(build_version_AB,'02x')+format(build_version_C, 'x')
DEBUG('Current SPS FW version = ' + sps_version )
# Get byte11 bit[7:0] platform SKU
platform = get_bits_data_py( ord(rsp[10]) , 0 , 8)
DEBUG('get_me_device_id : platform = %d' %platform)
# Get byte13 bit[3:0] DCMI version, bytes13 bit[7:4] = NM version
nm = get_bits_data_py( ord(rsp[12]) , 0 , 4)
DEBUG('get_me_device_id : nm = %d' %nm)
dcmi = get_bits_data_py( ord(rsp[12]) , 4 , 4)
DEBUG('get_me_device_id : dcmi = %d' %dcmi)
# Get byte16 bit[1:0] image flag
image = get_bits_data_py( ord(rsp[15]) , 0 , 2)
DEBUG('get_me_device_id : image_sts = %d' %image)
sts = PASS
return sts, sps_version, platform , dcmi , nm , image
def usage():
# Description of how to use this script
print' Useage $ sudo python get_device_id.py [delay_time (sec)] [loop_number]'
print' ex: sudo python get_device_id.py 1 100'
return
## Define Delay Time check function
def delay_check(parameter):
delay_time = int(parameter , 10)
if(delay_time > 0 ):
sts = PASS
else:
delay_time = ERROR
sts = ERROR
return sts, delay_time
## Define Delay Time check function
def loop_check(parameter):
if(parameter == 'loop'):
loop_number = 'loop'
sts = PASS
else:
loop_number = int(parameter , 10)
if(loop_number > 0 ):
sts = PASS
else:
loop_number = ERROR
sts = ERROR
return sts, loop_number
## Define Input parameters lenth check
def parameter_check(parameter):
if(len(parameter) != 3 ):
usage()
sts =ERROR
return ERROR
else:
return PASS
## _Main_ ##
# Initial aardvark
#ipmi = aardvark_ipmi_init(target_me_addr, target_me_bridge_channel)
# Check delay time parameter
sts = parameter_check(sys.argv)
if(sts == PASS):
print 'Check Delay Time parameter setting'
sts, delay_time = delay_check(str(sys.argv[1]))
print ( "delay time = %d " %(delay_time) )
sts, loop_number = loop_check(str(sys.argv[2]))
print ("loop_number = " , loop_number)
else:
sts = ERROR
if(sts == PASS):
print 'Start to Send Get Device ID..'
while loop_number :
sts, sps_version, platform, dcmi, nm, image = get_device_id_py(ipmi)
# Add delay time 5 secs to make sure me go back to stable mode
time.sleep(delay_time)
# Show Result
print('SPS Version = '+ sps_version)
print('platform = %d' %platform )
print('dcmi =%d' %dcmi)
print('nm = %d' %nm)
print('image = %d' %(image))
if( loop_number == 'loop' ):
loop_number = True
else:
loop_number = loop_number -1
if(sts == ERROR ):
loop_number = False
break
else:
print' Done! '
| 31.021622 | 194 | 0.638439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,774 | 0.309113 |
d0af8ccc38db80b7705a16b0b92de3ffc09909b1 | 321 | py | Python | submissions/arc068/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
]
| 1 | 2021-05-10T01:16:28.000Z | 2021-05-10T01:16:28.000Z | submissions/arc068/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
]
| 3 | 2021-05-11T06:14:15.000Z | 2021-06-19T08:18:36.000Z | submissions/arc068/b.py | m-star18/atcoder | 08e475810516602fa088f87daf1eba590b4e07cc | [
"Unlicense"
]
| null | null | null | import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
from collections import Counter
n, *a = map(int, read().split())
counter = Counter(a).values()
ans = len(counter)
if (sum(counter) - ans) % 2 == 1:
ans -= 1
print(ans)
| 21.4 | 38 | 0.697819 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
d0af98534b90208cb7e4f06f1ab2ae7e3d283c93 | 9,730 | py | Python | server/plato/test/test_domains.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
]
| null | null | null | server/plato/test/test_domains.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
]
| null | null | null | server/plato/test/test_domains.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
]
| null | null | null | import json
from plato import db
from plato.model.user import User
from plato.test.base import BaseTestCase
from plato.test.utils import add_user, add_domain
class TestDomainService(BaseTestCase):
def test_add_domain(self):
'''Ensure a new domain can be added to database'''
add_user('test', '[email protected]', 'test')
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
ip='http://111.13.100.91/',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 201)
self.assertIn('www.baidu.com was added!', data['message'])
self.assertIn('success', data['status'])
def test_add_duplicate_domain(self):
'''Ensure error is thrown if user's email already exists'''
add_user('test', '[email protected]', 'test')
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
ip='http://111.13.100.91/',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
ip='http://111.13.100.91/',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Sorry, that domain already exists.', data['message'])
self.assertIn('fail', data['status'])
def test_add_domain_invalid_ip(self):
add_user('test', '[email protected]', 'test')
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload', data['message'])
self.assertIn('fail', data['status'])
def test_add_domain_invalid_payload(self):
add_user('test', '[email protected]', 'test')
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
ip='http://111.13.100.91/',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload', data['message'])
self.assertIn('fail', data['status'])
def test_add_domain_invalid_master(self):
add_user('test', '[email protected]', 'test')
with self.client:
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
ip='http://111.13.100.91/',
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid payload', data['message'])
self.assertIn('fail', data['status'])
def test_single_domain(self):
'''Ensure get single user behaves correctly'''
domain = add_domain('www.baidu.com', 'http://10.0.0.122', 1)
with self.client:
response = self.client.get(f'/domain/{domain.id}')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertIn('www.baidu.com', data['data']['domain'])
self.assertIn('http://10.0.0.122', data['data']['ip'])
self.assertIn('success', data['status'])
def test_single_domain_no_id(self):
'''Ensure error is thrown if an id is not provided'''
with self.client:
response = self.client.get('domain/test_id')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('Domain does not exist.', data['message'])
self.assertIn('fail', data['status'])
def test_single_domain_incorrect_id(self):
'''Ensure error is thrown if the id is not correct'''
with self.client:
response = self.client.get('domain/666')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 404)
self.assertIn('Domain does not exist.', data['message'])
self.assertIn('fail', data['status'])
def test_add_user_not_admin(self):
add_user('test', '[email protected]', 'test')
user = User.query.filter_by(email='[email protected]').first()
user.admin = True
db.session.commit()
with self.client:
# user login
resp_login = self.client.post(
'/auth/login',
data=json.dumps(dict(
email='[email protected]',
password='test'
)),
content_type='application/json'
)
response = self.client.post(
'/domains',
data=json.dumps(dict(
domain='www.baidu.com',
ip='http://111.13.100.91/',
master=1
)),
content_type='application/json',
headers=dict(
Authorization='Bearer ' + json.loads(
resp_login.data.decode()
)['auth_token']
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'error')
self.assertTrue(
data['message'] == 'You have no permission to do that.')
self.assertEqual(response.status_code, 403)
def test_all_domains(self):
add_domain('www.baidu.com', 'http://111.13.100.91/', 1)
add_domain('www.google.com', 'http://111.13.100.92/', 1)
with self.client:
response = self.client.get('/domains')
data = json.loads(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['domains']), 2)
self.assertIn('www.google.com', data['data']['domains'][0]['domain'])
self.assertIn('www.baidu.com', data['data']['domains'][1]['domain'])
self.assertIn('http://111.13.100.92/', data['data']['domains'][0]['ip'])
self.assertIn('http://111.13.100.91/', data['data']['domains'][1]['ip'])
| 39.076305 | 84 | 0.481809 | 9,568 | 0.98335 | 0 | 0 | 0 | 0 | 0 | 0 | 2,140 | 0.219938 |
d0b20375cd75fe0eef53b990d01615a34d6461be | 442 | py | Python | create.py | chen-robert/hackcmu21 | 0728af0aa4f61b1969d0b73f7e8688fee90c1cb9 | [
"MIT"
]
| null | null | null | create.py | chen-robert/hackcmu21 | 0728af0aa4f61b1969d0b73f7e8688fee90c1cb9 | [
"MIT"
]
| null | null | null | create.py | chen-robert/hackcmu21 | 0728af0aa4f61b1969d0b73f7e8688fee90c1cb9 | [
"MIT"
]
| null | null | null | import sqlite3
import datetime
conn = sqlite3.connect('database.db')
print("Opened database successfully")
# NOTE: ID is DEPRECATED
conn.execute('CREATE TABLE simulated (id TEXT, lat NUMERIC, lon NUMERIC, alt NUMERIC, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)')
conn.execute('CREATE TABLE locations (id TEXT, lat NUMERIC, lon NUMERIC, alt NUMERIC, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)')
print("Table created successfully")
conn.close() | 40.181818 | 129 | 0.791855 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 325 | 0.735294 |
d0b20f9be0257673e00f2b9f9aa968fab5295bbd | 3,091 | py | Python | tests/components/test_ts_component.py | T4rk1n/dazzler | 69c49422dc19c910445ab265b1d3481041de8f43 | [
"MIT"
]
| 15 | 2019-12-19T11:57:30.000Z | 2021-11-15T23:34:41.000Z | tests/components/test_ts_component.py | T4rk1n/dazzler | 69c49422dc19c910445ab265b1d3481041de8f43 | [
"MIT"
]
| 196 | 2019-09-21T15:10:14.000Z | 2022-03-31T11:07:48.000Z | tests/components/test_ts_component.py | T4rk1n/dazzler | 69c49422dc19c910445ab265b1d3481041de8f43 | [
"MIT"
]
| 7 | 2019-10-30T19:38:15.000Z | 2021-12-01T04:54:16.000Z | # A bit of duplication of the component system tests to ensure
# typescript components are transpiled properly to Python.
# Types are tested in test_mypy.
import json
import re
import pytest
from . import ts_components as tsc
@pytest.mark.parametrize(
'component',
[tsc.TypedComponent, tsc.TypedClassComponent]
)
def test_tsc_required(component):
with pytest.raises(TypeError) as context:
component()
assert context.value.args[0] == "__init__() missing 1 required positional argument: 'required_str'" # noqa: E501
@pytest.mark.parametrize(
'component, doc',
[
(tsc.TypedComponent, 'Typed Component Docstring'),
(tsc.TypedClassComponent, 'Typed class component')
]
)
def test_tsc_docstring(component, doc):
assert component.__doc__.strip() == doc
@pytest.mark.parametrize(
'component',
[tsc.TypedComponent, tsc.TypedClassComponent]
)
def test_tsc_aspect_docstring(component):
assert ':param str_with_comment: Docstring'\
in component.__init__.__doc__
@pytest.mark.parametrize('prop_name, prop_default, component', [
('default_str', "'default'", tsc.TypedComponent),
('default_required_str', "'default required'", tsc.TypedComponent),
('default_num', 3, tsc.TypedComponent),
('default_str', "'default'", tsc.TypedClassComponent),
('default_required_str', "'default required'", tsc.TypedClassComponent),
('default_num', 3, tsc.TypedClassComponent),
])
def test_tsc_default_props_docstring(prop_name, prop_default, component):
pattern = r':param {}:.*\(default={}\)'.format(prop_name, prop_default)
assert re.search(pattern, str(component.__init__.__doc__))
def test_tsc_enum_docstring():
assert ":param enumeration: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
assert ":param defined_enum: (Possible values: 'foo', 'bar')" \
in tsc.TypedComponent.__init__.__doc__
@pytest.mark.async_test
async def test_tsc_render(start_page, browser):
from tests.components.pages.ts import page
await start_page(page)
# assert the children with added classname + base class name css path.
await browser.wait_for_text_to_equal(
'.dazzler-ts-typed-component.other .children .dazzler-core-container',
'foobar'
)
# assert style can be changed, is added to the type by extension.
await browser.wait_for_style_to_equal(
'.dazzler-ts-typed-component.other',
'border', '1px solid rgb(0, 0, 255)'
)
content = await browser.wait_for_element_by_css_selector(
'.dazzler-ts-typed-component.other .json-output'
)
data = json.loads(content.text)
assert data['num'] == 2
assert data['text'] == 'foobar'
assert data['arr'] == [1, 2, 'mixed']
assert data['arr_str'] == ['foo', 'bar']
assert data['default_str'] == 'default'
assert data['required_str'] == 'override'
assert data['obj'] == {'anything': 'possible'}
await browser.wait_for_text_to_equal(
'.dazzler-ts-typed-class-component .children', 'clazz'
)
| 31.865979 | 117 | 0.693303 | 0 | 0 | 0 | 0 | 2,580 | 0.834681 | 1,119 | 0.362019 | 1,187 | 0.384018 |
d0b22aa7904b846e9743534781f5c71318798017 | 9,371 | py | Python | python test generator/main.py | ElDonad/Tixel-Dungeon | ad622e570a06bf7722cdf15dcc33547ba14aada4 | [
"MIT"
]
| null | null | null | python test generator/main.py | ElDonad/Tixel-Dungeon | ad622e570a06bf7722cdf15dcc33547ba14aada4 | [
"MIT"
]
| null | null | null | python test generator/main.py | ElDonad/Tixel-Dungeon | ad622e570a06bf7722cdf15dcc33547ba14aada4 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
import random
import numpy as np
import colorama
from colorama import Fore, Back
import copy
colorama.init()
LEFT = 'lft'
RIGHT = 'rgt'
UP = 'up'
DOWN = 'dwn'
HORIZONTAL = 'horizontal'
VERTICAL = 'vertical'
class Room:
def __init__(self, x=0, y=0, h=0, w=0):
self.x = x
self.y = y
self.h = h
self.w = w
class Path:
def __init__(self, orientation = ''):
self.straights = []
self.orientation = orientation
class Straight:
def __init__(self, x=0, y=0, length = 0):
self.x = x
self.y = y
self.length = length
VERTICAL = -1
HORIZONTAL = -2
def getEnd(self):
if self.orientation == VERTICAL:
return (self.x, self.y + self.length)
elif self.orientation == HORIZONTAL:
return (self.x + self.length, self.y)
print("beginning...")
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
rooms = []
corridors = []
def generateLevel():
levelX = 50
levelY = 50
hasKeyRoom = False
hasItemRoom = False
deltaFromCenter = 3
roomNumber = random.randint(6,9)
print(roomNumber)
for roomIndex in range(roomNumber):
placed = False
placeIt = 0
while placed == False:
if placeIt > 1500:
print("placement error")
return False
newRoom = Room()
newRoom.h = random.randint(3,6)
newRoom.w = random.randint(3,6)
newRoom.x = random.randint(max(0, levelX / 2 - deltaFromCenter), min(levelX - newRoom.w - 1, levelX / 2 + deltaFromCenter))
newRoom.y = random.randint(max(0, levelY / 2 - deltaFromCenter), min(levelY - newRoom.h - 1, levelY / 2 + deltaFromCenter))
if not collide(newRoom):
rooms.append(newRoom)
if len(rooms) > 1:
corridors.append(generatePath(rooms[-1], rooms[-2]))
placed = True
deltaFromCenter += 5
print("finished one room !")
def collide(sqr1):
if isinstance(sqr1, Room):
for sqr2 in rooms:
if (sqr1.x + sqr1.w + 1>= sqr2.x - 1 and
sqr1.x - 1<= sqr2.x + sqr2.w + 1 and
sqr1.y + sqr1.h + 1>= sqr2.y - 1 and
sqr1.y - 1<= sqr2.y + sqr2.h + 1
):
return True
return False
elif isinstance(sqr1, Path):
for straight in sqr1.straights:
fakeSqr = Room()
fakeSqr.x = straight.x
fakeSqr.y = straight.y
if straight.orientation == Path.Straight.HORIZONTAL:
fakeSqr.w = straight.length
fakeSqr.h = 1
elif straight.orientation == Path.Straight.VERTICAL:
fakeSqr.w = 1
fakeSqr.h = straight.length
if collide(fakeSqr):
return True
return False
def generatePath(room1, room2):
startPoint = (0,0)
endPoint = (0,0)
def straightInDirection(direction, start = None, length=1, straight = Path.Straight()):
if start == None:
start = (straight.x, straight.y)
x, y = start
straight.x = x
straight.y = y
if direction == LEFT:
straight.orientation = HORIZONTAL
straight.length = -length
elif direction == RIGHT:
straight.orientation = HORIZONTAL
straight.length = length
elif direction == UP:
straight.orientation = VERTICAL
straight.length = -length
elif direction == DOWN:
straight.orientation = VERTICAL
straight.length = length
print("New path : ", start, length, direction)
return straight
initialDir = None
initialiOrientation = None
if abs(room1.x - room2.x) > abs(room1.y - room2.y):
initialiOrientation = HORIZONTAL
else:
initialiOrientation = VERTICAL
if initialiOrientation == HORIZONTAL:
if room1.x > room2.x:
initialDir = LEFT
else:
initialDir = RIGHT
elif initialiOrientation == VERTICAL:
if room1.y > room2.y:
initialDir = UP
else:
initialDir = DOWN
#Début de la génération : choisir un point de départ:
if initialDir == LEFT:
startPoint = (room1.x, random.randint(room1.y, room1.y + room1.h))
elif initialDir == RIGHT:
startPoint = (room1.x + room1.w, random.randint(room1.y, room1.y + room1.h))
elif initialDir == UP:
startPoint = (random.randint(room1.x, room1.x + room1.w), room1.y)
elif initialDir == DOWN:
startPoint = (random.randint(room1.x, room1.x + room1.w), room1.y + room1.h)
print(startPoint)
#input()
#Choisir un point d'arrivée de la même manière: il doit être sur la face de la pièce opposée à celle du point de départ.
if initialDir == LEFT:
endPoint = (room2.x + room2.w, random.randint(room2.y, room2.y + room2.h))
elif initialDir == RIGHT:
endPoint = (room2.x, random.randint(room2.y, room2.y + room2.h))
elif initialDir == UP:
endPoint = (random.randint(room2.x, room2.x + room2.w), room2.y + room2.h)
elif initialDir == DOWN:
endPoint = (random.randint(room2.x, room2.x + room2.w), room2.y)
print(endPoint)
#input()
path = Path()
currentPos = startPoint
headingDir = initialDir
deltaDir = None
priorityOrientation = None
if initialDir == LEFT or initialDir == RIGHT:
deltaDir = abs(startPoint[0] - endPoint[0])
priorityOrientation = VERTICAL
elif initialDir == UP or initialDir == DOWN:
deltaDir = abs(startPoint[1] - endPoint[1])
priorityOrientation = HORIZONTAL
path.straights.append(copy.deepcopy(straightInDirection(initialDir, length = random.randint(1,max(1,int(deltaDir / 3))), start = currentPos)))
currentPos = path.straights[-1].getEnd()
print(currentPos)
#input()
while currentPos != endPoint:
xDelta = abs(endPoint[0] - currentPos[0])
xOffset = endPoint[0] - currentPos[0]
yDelta = abs(endPoint[1] - currentPos[1])
yOffset = endPoint[1] - currentPos[1]
print("loop enter", xOffset, yOffset, priorityOrientation)
#input()
if yOffset != 0 and (priorityOrientation == VERTICAL or xOffset == 0):
print("yOffset")
newDirection = None
if yOffset < 0:
newDirection = UP
elif yOffset > 0:
newDirection = DOWN
path.straights.append(copy.deepcopy(straightInDirection(newDirection, length=yDelta, start=currentPos)))
currentPos = path.straights[-1].getEnd()
elif xOffset != 0 and (priorityOrientation == HORIZONTAL or yOffset == 0):
print("xOffset")
newDirection = None
if xOffset < 0:
newDirection = LEFT
elif xOffset > 0:
newDirection = RIGHT
print(newDirection)
path.straights.append(copy.deepcopy(straightInDirection(newDirection, length=xDelta, start=currentPos)))
currentPos = path.straights[-1].getEnd()
else:
print('nothing')
print("ROAD ADVANCEMENT : currentPos : " + str(currentPos) + " endPos : " + str(endPoint))
#input()
return path
for a in range(0,1000):
generateLevel()
print("finished generation !")
print(len(rooms))
count = 1
for room in rooms:
for x in range(room.x, room.x + room.w):
level[x][room.y] = str(count)#"░"
level[x][room.y + room.h] = str(count)
for y in range(room.y, room.y + room.h):
level[room.x][y] = str(count)
level[room.x + room.w][y] = str(count)
count += 1
print("nombre de corridors : ", len(corridors))
for corridor in corridors:
print("nombre de corridors : ", len(corridor.straights))
for straight in corridor.straights:
print("origine", straight.x, ', ',straight.y,"oritentation : ",straight.orientation, "length : ", straight.length)
if straight.orientation == VERTICAL:
for y in range(straight.y,straight.y + straight.length, np.sign(straight.length)):
for x in range(straight.x -1,straight.x + 1 + 1,2):
#level[x][y] = "░"
pass
level[straight.x][y]=Fore.RED + "." + Fore.WHITE
elif straight.orientation == HORIZONTAL:
print("horizontal")
for x in range(straight.x,straight.x + straight.length, np.sign(straight.length)):
for y in range(straight.y -1,straight.y + 1 + 1,2):
#level[x][y] = "░"
pass
level[x][straight.y]= Fore.RED + "." + Fore.WHITE
for line in level:
lineC = " "
print(lineC.join(line))
rooms = []
corridors = []
level = []
for x in range(50):
level.append([])
for y in range(50):
level[x].append(".")
print("loop position : ",a)
input()
| 33.230496 | 146 | 0.555224 | 676 | 0.071999 | 0 | 0 | 0 | 0 | 0 | 0 | 644 | 0.068591 |
d0b36a7b39c48086c567c97c9b01212d0a865743 | 255 | py | Python | src/py/vmw/ui/vmw_launcher.py | jp-uom/variant_matrix_wizard | c5d7ac509be6d6a2020ab38f49c28df090a03c1d | [
"MIT"
]
| 1 | 2017-12-27T11:56:33.000Z | 2017-12-27T11:56:33.000Z | src/py/vmw/ui/vmw_launcher.py | jp-uom/variant_matrix_wizard | c5d7ac509be6d6a2020ab38f49c28df090a03c1d | [
"MIT"
]
| null | null | null | src/py/vmw/ui/vmw_launcher.py | jp-uom/variant_matrix_wizard | c5d7ac509be6d6a2020ab38f49c28df090a03c1d | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
import wx
import vmwizard as vmw
if __name__ == '__main__':
app = wx.App(False)
frame = wx.Frame(None, wx.ID_ANY, "Variant Matrix")
wiz = vmw.Wizard(frame)
frame.Show(True)
frame.Centre()
app.MainLoop()
| 15.9375 | 55 | 0.639216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 48 | 0.188235 |
d0b370195e62577b0993491b41073f0838231b20 | 2,308 | py | Python | Modules/Scripted/DMRIInstall/DMRIInstall.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null | Modules/Scripted/DMRIInstall/DMRIInstall.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null | Modules/Scripted/DMRIInstall/DMRIInstall.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null | import os
import string
import textwrap
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
import logging
#
# DMRIInstall
#
class DMRIInstall(ScriptedLoadableModule):
"""
"""
helpText = textwrap.dedent(
"""
Please use the Extension Manager to install the "SlicerDMRI" extension for
diffusion-related tools including:
<ul>
<li> Diffusion Tensor Estimation</li>
<li>Tractography Display</li>
<li>Tractography Seeding</li>
<li>Fiber Tract Measurement</li>
</ul>
""")
def __init__(self, parent):
# Hide this module if SlicerDMRI is already installed
model = slicer.app.extensionsManagerModel()
if model.isExtensionInstalled("SlicerDMRI"):
return
ScriptedLoadableModule.__init__(self, parent)
self.parent.categories = ["Diffusion"]
self.parent.title = "Install Slicer Diffusion Tools"
self.parent.dependencies = []
self.parent.contributors = ["Isaiah Norton"]
self.parent.helpText = DMRIInstall.helpText
self.parent.helpText += self.getDefaultModuleDocumentationLink()
self.parent.acknowledgementText = textwrap.dedent(
"""
SlicerDMRI supported by NIH NCI ITCR U01CA199459 (Open Source Diffusion MRI
Technology For Brain Cancer Research), and made possible by NA-MIC, NAC,
BIRN, NCIGT, and the Slicer Community.
""")
class DMRIInstallWidget(ScriptedLoadableModuleWidget):
"""Uses ScriptedLoadableModuleWidget base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
self.textBox = ctk.ctkFittedTextBrowser()
self.textBox.setHtml(DMRIInstall.helpText)
self.parent.layout().addWidget(self.textBox)
#
# Apply Button
#
self.applyButton = qt.QPushButton("Open Extension Manager")
self.applyButton.toolTip = 'Install the "SlicerDMRI" extension from the Diffusion category.'
self.applyButton.icon = qt.QIcon(":/Icons/ExtensionDefaultIcon.png")
self.applyButton.enabled = True
self.applyButton.connect('clicked()', self.onApply)
self.parent.layout().addWidget(self.applyButton)
self.parent.layout().addStretch(1)
def onApply(self):
slicer.app.openExtensionsManagerDialog()
| 29.589744 | 96 | 0.731369 | 2,142 | 0.928076 | 0 | 0 | 0 | 0 | 0 | 0 | 954 | 0.413345 |
d0b43ab4f6dd3ba972b2dc8c30789b6cc19eaa03 | 24,698 | py | Python | opcalendar/models.py | buahaha/allianceauth-opcalendar | 44e50e06eac4b5c0e6b809e5ca2638af5e49145f | [
"MIT"
]
| null | null | null | opcalendar/models.py | buahaha/allianceauth-opcalendar | 44e50e06eac4b5c0e6b809e5ca2638af5e49145f | [
"MIT"
]
| null | null | null | opcalendar/models.py | buahaha/allianceauth-opcalendar | 44e50e06eac4b5c0e6b809e5ca2638af5e49145f | [
"MIT"
]
| null | null | null | import requests
import json
from typing import Tuple
from datetime import timedelta, datetime
from django.db import models
from django.urls import reverse
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.contrib.auth.models import Group
from esi.errors import TokenExpiredError, TokenInvalidError
from esi.models import Token
from allianceauth.authentication.models import CharacterOwnership
from allianceauth.eveonline.models import EveCharacter, EveCorporationInfo
from allianceauth.services.hooks import get_extension_logger
from allianceauth.authentication.models import State
from .providers import esi
from .decorators import fetch_token_for_owner
logger = get_extension_logger(__name__)
class General(models.Model):
"""Meta model for app permissions"""
class Meta:
managed = False
default_permissions = ()
permissions = (
(
"basic_access",
"Can access this app and see operations based on visibility rules",
),
("create_event", "Can create and edit events"),
("see_signups", "Can see all signups for event"),
("manage_event", "Can delete and manage other signups"),
(
"add_ingame_calendar_owner",
"Can add ingame calendar feeds for their corporation",
),
)
class WebHook(models.Model):
"""Discord Webhook for pings"""
name = models.CharField(
max_length=150,
help_text=_("Name for this webhook"),
)
webhook_url = models.CharField(
max_length=500,
help_text=_("Webhook URL"),
)
enabled = models.BooleanField(default=True, help_text=_("Is the webhook enabled?"))
def send_embed(self, embed):
custom_headers = {"Content-Type": "application/json"}
data = '{"embeds": [%s]}' % json.dumps(embed)
r = requests.post(self.webhook_url, headers=custom_headers, data=data)
r.raise_for_status()
class Meta:
verbose_name = "Webhook"
verbose_name_plural = "Webhooks"
def __str__(self):
return "{}".format(self.name)
class EventVisibility(models.Model):
name = models.CharField(
max_length=150, null=False, help_text="Name for the visibility filter"
)
restricted_to_group = models.ManyToManyField(
Group,
blank=True,
related_name="eventvisibility_require_groups",
help_text=_(
"The group(s) that will be able to see this event visibility type ..."
),
)
restricted_to_state = models.ManyToManyField(
State,
blank=True,
related_name="eventvisibility_require_states",
help_text=_(
"The state(s) that will be able to see this event visibility type ..."
),
)
webhook = models.ForeignKey(
WebHook,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Webhook to send over notifications about these fleet types"),
)
ignore_past_fleets = models.BooleanField(
default=True,
help_text=_("Should we ignore fleet signals that are in the past"),
)
color = models.CharField(
max_length=7,
default="",
blank=True,
help_text=_("Color to be displayed on calendar"),
)
include_in_feed = models.BooleanField(
default=False,
help_text=("Whether these events should be included in the ical feed."),
)
is_visible = models.BooleanField(
default=True,
help_text=(
"Whether this visibility filter should be displayed on the event form. Disable for internal visibilities such as the NPSI import fleet visibilities."
),
)
is_default = models.BooleanField(
default=False,
help_text=(
"Whether this visibility filter is used as the default value on the event form"
),
)
is_active = models.BooleanField(
default=True,
help_text=("Whether this visibility filter is active"),
)
def __str__(self) -> str:
return str(self.name)
class Meta:
verbose_name = "Event Visibility Filter"
verbose_name_plural = "Event Visibilities Filters"
def save(self, *args, **kwargs):
if self.is_default:
# select all other is_default items
qs = type(self).objects.filter(is_default=True)
# except self (if self already exists)
if self.pk:
qs = qs.exclude(pk=self.pk)
# and deactive them
qs.update(is_default=False)
super(EventVisibility, self).save(*args, **kwargs)
@property
def get_visibility_class(self):
return f"{self.name.replace(' ', '-').lower()}"
class EventHost(models.Model):
"""Fleet Timer Create/Delete pings"""
community = models.CharField(
max_length=150, null=False, help_text="Name of the community"
)
logo_url = models.CharField(
max_length=256, blank=True, help_text="Absolute URL for the community logo"
)
ingame_channel = models.CharField(
max_length=150, blank=True, help_text="Ingame channel name"
)
ingame_mailing_list = models.CharField(
max_length=150, blank=True, help_text="Ingame mailing list name"
)
fleet_comms = models.CharField(
max_length=150,
blank=True,
help_text="Link or description for primary comms such as discord link",
)
fleet_doctrines = models.CharField(
max_length=150, blank=True, help_text="Link or description to the doctrines"
)
website = models.CharField(max_length=150, blank=True, help_text="Website link URL")
discord = models.CharField(max_length=150, blank=True, help_text="Discord link URL")
twitch = models.CharField(max_length=150, blank=True, help_text="Twitch link URL")
twitter = models.CharField(max_length=150, blank=True, help_text="Twitter link URL")
youtube = models.CharField(max_length=150, blank=True, help_text="Youtube link URL")
facebook = models.CharField(
max_length=150, blank=True, help_text="Facebook link URL"
)
details = models.CharField(
max_length=150, blank=True, help_text="Short description about the host."
)
is_default = models.BooleanField(
default=False,
help_text=("Whether this host is used as the default value on the event form"),
)
external = models.BooleanField(
default=False,
help_text=_(
"External hosts are for NPSI API imports. Checking this box will hide the host in the manual event form."
),
)
def __str__(self):
return str(self.community)
class Meta:
verbose_name = "Host"
verbose_name_plural = "Hosts"
def save(self, *args, **kwargs):
if self.is_default:
# select all other is_default items
qs = type(self).objects.filter(is_default=True)
# except self (if self already exists)
if self.pk:
qs = qs.exclude(pk=self.pk)
# and deactive them
qs.update(is_default=False)
super(EventHost, self).save(*args, **kwargs)
class EventCategory(models.Model):
name = models.CharField(
max_length=150,
help_text=_("Name for the category"),
)
ticker = models.CharField(
max_length=10,
help_text=_("Ticker for the category"),
)
color = models.CharField(
max_length=7,
default="",
blank=True,
help_text=_("Color to be displayed on calendar"),
)
description = models.TextField(
blank=True,
help_text="Prefilled description that will be added on default on the event description.",
)
class Meta:
verbose_name = "Category"
verbose_name_plural = "Categories"
def __str__(self):
return str(self.name)
@property
def get_category_class(self):
return f"{self.name.replace(' ', '-').lower()}"
class EventImport(models.Model):
"""NPSI IMPORT OPTIONS"""
SPECTRE_FLEET = "SF"
EVE_UNIVERSITY = "EU"
FUN_INC = "FI"
FRIDAY_YARRRR = "FY"
REDEMPTION_ROAD = "RR"
CAS = "CA"
FWAMING_DWAGONS = "FD"
FREE_RANGE_CHIKUNS = "FR"
EVE_LINKNET = "LN"
IMPORT_SOURCES = [
(EVE_LINKNET, _("EVE LinkNet")),
(SPECTRE_FLEET, _("Spectre Fleet")),
(EVE_UNIVERSITY, _("EVE University")),
(FUN_INC, _("Fun Inc.")),
(FRIDAY_YARRRR, _("FRIDAY YARRRR")),
(REDEMPTION_ROAD, _("Redemption Road")),
(CAS, _("CAS")),
(FWAMING_DWAGONS, _("Fwaming Dwagons")),
(FREE_RANGE_CHIKUNS, _("FREE RANGE CHIKUNS")),
]
source = models.CharField(
max_length=32,
choices=IMPORT_SOURCES,
help_text="The API source where you want to pull events from",
)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
default=1,
help_text="The AA host that will be used for the pulled events",
)
operation_type = models.ForeignKey(
EventCategory,
on_delete=models.CASCADE,
help_text="Operation type and ticker that will be assigned for the pulled fleets",
)
creator = models.ForeignKey(
User,
on_delete=models.CASCADE,
default="1",
help_text="User that has been used to create the fleet (most often the superuser who manages the plugin)",
)
eve_character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text="Event creator main character",
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
def __str__(self):
return str(self.source)
class Meta:
verbose_name = "NPSI Event Import"
verbose_name_plural = "NPSI Event Imports"
class Event(models.Model):
operation_type = models.ForeignKey(
EventCategory,
null=True,
on_delete=models.CASCADE,
help_text=_("Event category type"),
)
title = models.CharField(
max_length=200,
help_text=_("Title for the event"),
)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
help_text=_("Host entity for the event"),
)
doctrine = models.CharField(
max_length=254,
help_text=_("Doctrine URL or name"),
)
formup_system = models.CharField(
max_length=254,
help_text=_("Location for formup"),
)
description = models.TextField(
help_text=_("Description text for the operation"),
)
start_time = models.DateTimeField(
help_text=_("Event start date and time"),
)
end_time = models.DateTimeField(
help_text=_("Event end date and time"),
)
fc = models.CharField(
max_length=254,
help_text=_("Fleet commander/manager for the event"),
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
external = models.BooleanField(
default=False,
null=True,
help_text=_("Is the event an external event over API"),
)
created_date = models.DateTimeField(
default=timezone.now,
help_text=_("When the event was created"),
)
eve_character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text=_("Character used to create the event"),
)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
help_text=_("User who created the event"),
)
def duration(self):
return self.end_time - self.start_time
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("opcalendar:event-detail", args=(self.id,))
@property
def get_visibility_class(self):
if self.event_visibility:
return f"{self.event_visibility.name.replace(' ', '-').lower()}"
@property
def get_event_styling(self):
if self.event_visibility:
return f".{self.event_visibility.name.replace(' ', '-').lower()}:before{{border-color: transparent {self.event_visibility.color} transparent transparent;border-style: solid;}} .{self.operation_type.name.replace(' ', '-').lower()} {{border-left: 6px solid {self.operation_type.color} !important;}}"
@property
def get_category_class(self):
if self.operation_type:
return f"{self.operation_type.name.replace(' ', '-').lower()}"
@property
def get_date_status(self):
if datetime.now(timezone.utc) > self.start_time:
return "past-event"
else:
return "future-event"
@property
def get_html_url(self):
url = reverse("opcalendar:event-detail", args=(self.id,))
return f"{url}"
@property
def get_html_title(self):
return f'<span>{self.start_time.strftime("%H:%M")} - {self.end_time.strftime("%H:%M")} <i>{self.host.community}</i></span><span><b>{self.operation_type.ticker} {self.title}</b></span>'
def user_can_edit(self, user: user) -> bool:
"""Checks if the given user can edit this timer. Returns True or False"""
return user.has_perm("opcalendar.manage_event") or (
self.user == user and user.has_perm("opcalendar.create_event")
)
class Owner(models.Model):
"""A corporation that holds the calendars"""
ERROR_NONE = 0
ERROR_TOKEN_INVALID = 1
ERROR_TOKEN_EXPIRED = 2
ERROR_INSUFFICIENT_PERMISSIONS = 3
ERROR_NO_CHARACTER = 4
ERROR_ESI_UNAVAILABLE = 5
ERROR_OPERATION_MODE_MISMATCH = 6
ERROR_UNKNOWN = 99
ERRORS_LIST = [
(ERROR_NONE, "No error"),
(ERROR_TOKEN_INVALID, "Invalid token"),
(ERROR_TOKEN_EXPIRED, "Expired token"),
(ERROR_INSUFFICIENT_PERMISSIONS, "Insufficient permissions"),
(ERROR_NO_CHARACTER, "No character set for fetching data from ESI"),
(ERROR_ESI_UNAVAILABLE, "ESI API is currently unavailable"),
(
ERROR_OPERATION_MODE_MISMATCH,
"Operaton mode does not match with current setting",
),
(ERROR_UNKNOWN, "Unknown error"),
]
corporation = models.OneToOneField(
EveCorporationInfo,
default=None,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text="Corporation owning the calendar",
related_name="+",
)
character = models.ForeignKey(
CharacterOwnership,
on_delete=models.SET_DEFAULT,
default=None,
null=True,
blank=True,
help_text="Character used for syncing the calendar",
related_name="+",
)
event_visibility = models.ForeignKey(
EventVisibility,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=_("Visibility filter that dictates who is able to see this event"),
)
operation_type = models.ForeignKey(
EventCategory,
null=True,
blank=True,
on_delete=models.CASCADE,
help_text=_(
"Event category that will be assigned for all of the events from this owner."
),
)
is_active = models.BooleanField(
default=True,
help_text=("whether this owner is currently included in the sync process"),
)
class Meta:
verbose_name = "Ingame Clanedar Owner"
verbose_name_plural = "Ingame Calendar Owners"
@fetch_token_for_owner(["esi-calendar.read_calendar_events.v1"])
def update_events_esi(self, token):
if self.is_active:
# Get all current imported fleets in database
event_ids_to_remove = list(
IngameEvents.objects.filter(owner=self).values_list(
"event_id", flat=True
)
)
logger.debug(
"Ingame events currently in database: %s" % event_ids_to_remove
)
events = self._fetch_events()
for event in events:
character_id = self.character.character.character_id
details = (
esi.client.Calendar.get_characters_character_id_calendar_event_id(
character_id=character_id,
event_id=event["event_id"],
token=token.valid_access_token(),
).results()
)
end_date = event["event_date"] + timedelta(minutes=details["duration"])
original = IngameEvents.objects.filter(
owner=self, event_id=event["event_id"]
).first()
text = strip_tags(details["text"])
try:
if original is not None:
logger.debug("Event: %s already in database" % event["title"])
event_ids_to_remove.remove(original.event_id)
else:
# Check if we already have the host
original_host = EventHost.objects.filter(
community=details["owner_name"]
).first()
logger.debug("Got original host: {}".format(original_host))
if original_host is not None:
host = original_host
else:
host = EventHost.objects.create(
community=details["owner_name"],
external=True,
)
IngameEvents.objects.create(
event_id=event["event_id"],
owner=self,
text=text,
event_owner_id=details["owner_id"],
owner_type=details["owner_type"],
owner_name=details["owner_name"],
host=host,
importance=details["importance"],
duration=details["duration"],
event_start_date=event["event_date"],
event_end_date=end_date,
title=event["title"],
)
logger.debug("New event created: %s" % event["title"])
except Exception as e:
logger.debug("Error adding new event: %s" % e)
logger.debug("Removing all events that we did not get over API")
IngameEvents.objects.filter(pk__in=event_ids_to_remove).delete()
logger.debug(
"All events fetched for %s" % self.character.character.character_name
)
@fetch_token_for_owner(["esi-calendar.read_calendar_events.v1"])
def _fetch_events(self, token) -> list:
character_id = self.character.character.character_id
events = esi.client.Calendar.get_characters_character_id_calendar(
character_id=character_id,
token=token.valid_access_token(),
).results()
return events
def token(self, scopes=None) -> Tuple[Token, int]:
"""returns a valid Token for the owner"""
token = None
error = None
# abort if character is not configured
if self.character is None:
logger.error("%s: No character configured to sync", self)
error = self.ERROR_NO_CHARACTER
# abort if character does not have sufficient permissions
elif self.corporation and not self.character.user.has_perm(
"opcalendar.add_ingame_calendar_owner"
):
logger.error(
"%s: This character does not have sufficient permission to sync corporation calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
# abort if character does not have sufficient permissions
elif not self.character.user.has_perm("opcalendar.add_ingame_calendar_owner"):
logger.error(
"%s: This character does not have sufficient permission to sync personal calendars",
self,
)
error = self.ERROR_INSUFFICIENT_PERMISSIONS
else:
try:
# get token
token = (
Token.objects.filter(
user=self.character.user,
character_id=self.character.character.character_id,
)
.require_scopes(scopes)
.require_valid()
.first()
)
except TokenInvalidError:
logger.error("%s: Invalid token for fetching calendars", self)
error = self.ERROR_TOKEN_INVALID
except TokenExpiredError:
logger.error("%s: Token expired for fetching calendars", self)
error = self.ERROR_TOKEN_EXPIRED
else:
if not token:
logger.error("%s: No token found with sufficient scopes", self)
error = self.ERROR_TOKEN_INVALID
return token, error
class IngameEvents(models.Model):
event_id = models.PositiveBigIntegerField(
primary_key=True, help_text="The EVE ID of the event"
)
owner = models.ForeignKey(
Owner,
on_delete=models.CASCADE,
help_text="Event holder",
)
event_start_date = models.DateTimeField()
event_end_date = models.DateTimeField(blank=True, null=True)
title = models.CharField(max_length=128)
text = models.TextField()
event_owner_id = models.IntegerField(null=True)
owner_type = models.CharField(max_length=128)
owner_name = models.CharField(max_length=128)
host = models.ForeignKey(
EventHost,
on_delete=models.CASCADE,
default=1,
help_text=_("Host entity for the event"),
)
importance = models.CharField(max_length=128)
duration = models.CharField(max_length=128)
def __str__(self):
return self.title
class Meta:
verbose_name = "Ingame Event"
verbose_name_plural = "Ingame Events"
def get_absolute_url(self):
return reverse("opcalendar:ingame-event-detail", args=(self.event_id,))
@property
def get_date_status(self):
if datetime.now(timezone.utc) > self.event_start_date:
return "past-event"
else:
return "future-event"
@property
def get_visibility_class(self):
if self.owner.event_visibility:
return f"{self.owner.event_visibility.name.replace(' ', '-').lower()}"
else:
return "ingame-event"
@property
def get_event_styling(self):
d = ""
if self.owner.event_visibility:
d += f".{self.owner.event_visibility.name.replace(' ', '-').lower()}:before{{border-color: transparent {self.owner.event_visibility.color} transparent transparent;border-style: solid;}}"
if self.owner.operation_type:
d += f".{self.owner.operation_type.name.replace(' ', '-').lower()} {{border-left: 6px solid {self.owner.operation_type.color} !important;}}"
return d
@property
def get_category_class(self):
if self.owner.operation_type:
return f"{self.owner.operation_type.name.replace(' ', '-').lower()}"
@property
def get_html_url(self):
url = reverse("opcalendar:ingame-event-detail", args=(self.event_id,))
return f"{url}"
@property
def get_html_title(self):
return f'<span>{self.event_start_date.strftime("%H:%M")} - {self.event_end_date.strftime("%H:%M")}<i> {self.owner_name}</i></span><span><b>{self.title}</b></span>'
class EventMember(models.Model):
event = models.ForeignKey(Event, on_delete=models.CASCADE)
character = models.ForeignKey(
EveCharacter,
null=True,
on_delete=models.SET_NULL,
help_text="Event creator main character",
)
class Meta:
unique_together = ["event", "character"]
| 33.285714 | 309 | 0.604907 | 23,830 | 0.964855 | 0 | 0 | 6,562 | 0.26569 | 0 | 0 | 6,710 | 0.271682 |
d0b49d08acf472e125d49a19fc95585b9f897f91 | 5,603 | py | Python | scripts/mot_neural_solver/pl_module/pair_nuclei.py | taimurhassan/crc | 930be78505dd17655542a38b0fc1ded9cf19a9a2 | [
"MIT"
]
| 1 | 2022-03-16T10:40:23.000Z | 2022-03-16T10:40:23.000Z | scripts/mot_neural_solver/pl_module/pair_nuclei.py | taimurhassan/crc | 930be78505dd17655542a38b0fc1ded9cf19a9a2 | [
"MIT"
]
| null | null | null | scripts/mot_neural_solver/pl_module/pair_nuclei.py | taimurhassan/crc | 930be78505dd17655542a38b0fc1ded9cf19a9a2 | [
"MIT"
]
| null | null | null | import sacred
from sacred import Experiment
import os.path as osp
import pandas as pd
import scipy.io as sio
import numpy as np
from sacred import SETTINGS
SETTINGS.CONFIG.READ_ONLY_CONFIG=False
def pair_nuclei_and_generate_output(out_files_dir, datasetName, detector = "tracktor_prepr_det"):
startFrame = 159
endFrame = 238
if datasetName == "crchisto":
startFrame = 71
endFrame = 100
elif datasetName == "consep":
startFrame = 1
endFrame = 14
elif datasetName == "pannuke":
startFrame = 1
endFrame = 2359
elif datasetName == "lizard":
startFrame = 159
endFrame = 238
print("Start Frame: ",startFrame, ", End Frame: ", endFrame, ", Dataset: ", datasetName)
print("\n\nBackbone: ",detector,"\n\n")
for i in range(startFrame, endFrame + 1):
pred2 = []
gt2 = []
gt1 = []
pred1 = []
print(out_files_dir.replace(':','_'))
initial = "/MOT17-0"
if i > 9:
initial = "/MOT17-"
pn1 = out_files_dir.replace(':','_') + initial + str(i) + "-FRCNN.txt"
pn2 = "data/MOT17Det/test" + initial + str(i) + "/gt/gt.txt"
if detector == "tracktor_prepr_det":
pn3 = "data/MOT17Labels/test" + initial + str(i) + "-FRCNN/det/tracktor_prepr_det.txt"
else:
pn3 = "data/MOT17Labels/test" + initial + str(i) + "-FRCNN/det/frcnn_prepr_det.txt"
print(pn1)
print(pn2)
print(pn3)
f1 = open(pn3, "r")
for x in f1:
minDist = 1000000000
pairNodeCentroid = [] #Centroid Point
xs = x.split(',')
#print(xs) # tokenized line
type = int(xs[1])
pred2.append(type)
x1 = float(xs[2]) + float(xs[4])/2
y1 = float(xs[3]) + float(xs[5])/2
f2 = open(pn1, "r")
for y in f2:
ys = y.split(',')
#print(ys)
#input()
x2 = float(ys[2]) + float(ys[4])/2
y2 = float(ys[3]) + float(ys[5])/2
distance = np.sqrt(((x1-x2) * (x1-x2)) + ((y1-y2) * (y1-y2)))
if distance < minDist:
minDist = distance
pairNodeCentroid = [x2, y2]
#print(int(type))
f2.close()
#input()
if len(pairNodeCentroid) > 0:
pred1.append([round(pairNodeCentroid[0],2),round(pairNodeCentroid[1],2)])
else:
pred1.append([round(x1,2),round(y1,2)])
f1.close()
print(len(pred2))
f1 = open(pn2, "r")
#print(len(pred1))
for x in f1:
minDist = 1000000000
pairNodeCentroid = [] #Centroid Point
xs = x.split(',')
#print(xs) # tokenized line
type = int(xs[1])
x1 = float(xs[2]) + float(xs[4])/2
y1 = float(xs[3]) + float(xs[5])/2
gt2.append(type)
gt1.append([round(x1,2),round(y1,2)])
#print(gt1)
f1.close()
print(len(gt2))
#print(len(pred2))
#input()
inst_type_pred = np.transpose(np.array(pred2))
inst_type_gt = np.transpose(np.array(gt2))
inst_centroid_pred = np.array(pred1)
inst_centroid_gt = np.array(gt1)
index = 0
index2 = 0
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + "_" + str(index2) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + "_" + str(index2) + ".mat"
pred1 = []
pred2 = []
gt1 = []
gt2 = []
if datasetName == "pannuke":
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + ".mat"
mdic = {"inst_centroid": inst_centroid_gt, "inst_type": inst_type_gt}
sio.savemat(pn4, mdic)
mdic = {"inst_centroid": inst_centroid_pred, "inst_type": inst_type_pred}
sio.savemat(pn5, mdic)
else:
for ii in range(min(len(inst_type_gt),len(inst_type_pred))):
if index > 99:
pn4 = "mot_neural_solver/output/"+ datasetName +"/true/detections_"+ str(i) + "_" + str(index2) + ".mat"
pn5 = "mot_neural_solver/output/"+ datasetName +"/pred/detections_"+ str(i) + "_" + str(index2) + ".mat"
index = 0
#print(len(pred2))
mdic = {"inst_centroid": gt1, "inst_type": gt2}
sio.savemat(pn4, mdic)
mdic = {"inst_centroid": pred1, "inst_type": pred2}
sio.savemat(pn5, mdic)
pred1 = []
pred2 = []
gt1 = []
gt2 = []
else:
index = index + 1
index2 = index2 + 1
pred1.append(inst_centroid_pred[ii])
pred2.append(inst_type_pred[ii])
if inst_centroid_gt[ii] != '':
gt1.append(inst_centroid_gt[ii])
gt2.append(inst_type_gt[ii])
if inst_type_gt[ii] == 4:
print(inst_type_gt[ii], " type is coming in gt")
if inst_type_gt[ii] == 5:
print(inst_type_gt[ii], " type is coming in gt")
| 29.489474 | 124 | 0.493129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,031 | 0.184009 |
d0b6dbf00473c06ab74f4d07421e49558388e75e | 90 | py | Python | test.py | Jiyao17/fl-grouping | 37ada217cdf9121c9d7119f311228e87ba4a8e83 | [
"MIT"
]
| null | null | null | test.py | Jiyao17/fl-grouping | 37ada217cdf9121c9d7119f311228e87ba4a8e83 | [
"MIT"
]
| null | null | null | test.py | Jiyao17/fl-grouping | 37ada217cdf9121c9d7119f311228e87ba4a8e83 | [
"MIT"
]
| 1 | 2022-01-29T22:31:43.000Z | 2022-01-29T22:31:43.000Z |
import numpy as np
arrs = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
print(arrs[[0,2]]) | 18 | 50 | 0.511111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
d0b8c5cb52a0f84127322d8ea824dbfd7a2fbbb9 | 1,087 | py | Python | onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py | szha/onnxmltools | b04d05bda625cbc006955ce0a220277739a95825 | [
"MIT"
]
| 3 | 2019-02-27T21:03:43.000Z | 2020-04-07T22:16:50.000Z | onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py | szha/onnxmltools | b04d05bda625cbc006955ce0a220277739a95825 | [
"MIT"
]
| null | null | null | onnxmltools/convert/coreml/operator_converters/ArrayFeatureExtractor.py | szha/onnxmltools | b04d05bda625cbc006955ce0a220277739a95825 | [
"MIT"
]
| 2 | 2020-10-01T09:24:55.000Z | 2021-04-17T13:57:31.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from ....proto import onnx_proto
from ...common._registration import register_converter
def convert_array_feature_extractor(scope, operator, container):
op_type = 'ArrayFeatureExtractor'
attrs = {'name': operator.full_name}
target_indexes = operator.raw_operator.arrayFeatureExtractor.extractIndex
index_buffer_name = scope.get_unique_variable_name('target_indexes')
container.add_initializer(index_buffer_name, onnx_proto.TensorProto.INT64, [len(target_indexes)], target_indexes)
inputs = [operator.inputs[0].full_name, index_buffer_name]
outputs = [operator.outputs[0].full_name]
container.add_node(op_type, inputs, outputs, op_domain='ai.onnx.ml', **attrs)
register_converter('arrayFeatureExtractor', convert_array_feature_extractor)
| 41.807692 | 117 | 0.678933 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 385 | 0.354186 |
d0b987edf568de32ee6c05d30261bbe4ded56c15 | 2,682 | py | Python | constants.py | xuefei1/Graph-Seq2Attn | 336c69877e483c95d9996ee205d2a005342f08af | [
"MIT"
]
| 1 | 2020-01-06T07:49:46.000Z | 2020-01-06T07:49:46.000Z | constants.py | xuefei1/Graph-Seq2Attn | 336c69877e483c95d9996ee205d2a005342f08af | [
"MIT"
]
| 1 | 2020-04-16T10:15:27.000Z | 2020-04-16T16:41:42.000Z | constants.py | xuefei1/Graph-Seq2Attn | 336c69877e483c95d9996ee205d2a005342f08af | [
"MIT"
]
| null | null | null | # one identifier for one types of dict
# for instance, DK_SOME_KEY means this is a key for a data_dict
DK_BATCH_SIZE = "batch_size"
DK_PAD = "pad" # DK: general purpose data_dict
DK_SRC_WID = "src_wid" # src = msg + ctx
DK_SRC_WID_MASK = "src_wid_mask"
DK_SRC_SEQ_MASK = "src_seq_mask"
DK_MSG_WID = "msg_wid" # msg is usually shorter than ctx
DK_MSG_WID_MASK = "msg_wid_mask"
DK_CTX_WID = "ctx_wid" # msg is usually shorter than ctx
DK_CTX_WID_MASK = "ctx_wid_mask"
DK_SRC_POS = "src_pos"
DK_SRC_NER = "src_ner"
DK_SRC_SEG_LISTS = "src_seg_lists"
DK_TGT_GEN_WID = "tgt_gen_wid"
DK_TGT_CPY_WID = "tgt_cpy_wid"
DK_TGT_CPY_GATE = "tgt_cpy_gate"
DK_TGT_N_TOKEN = "tgt_n_token"
DK_TGT_SEG_LISTS = "tgt_seg_lists"
DK_SRC_IOB = "src_iob" # iob: SQuAD QG specific
DK_DOC_WID = "doc_wid"
DK_DOC_SEG_LISTS = "doc_seg_lists"
DK_DOC_WID_MASK = "doc_wid_mask"
DK_DOC_SENTS_WID = "doc_sents_wid"
DK_DOC_SENTS_WID_MASK = "doc_sents_wid_mask"
DK_TITLE_WID = "title_wid"
DK_TQ_SEG_LISTS = "title_seg_lists"
DK_TITLE_WID_MASK = "title_wid_mask"
DK_CONCEPT_SEG_LISTS = "concept_seg_lists"
DK_TGT_CONCEPT_GEN_WID = "tgt_concept_gen_wid" # concept gen specific
DK_TGT_CONCEPT_CPY_WID = "tgt_concept_cpy_wid"
DK_TGT_CONCEPT_CPY_GATE = "tgt_concept_cpy_gate"
DK_TGT_CONCEPT_N_TOKEN = "tgt_concept_n_token"
DK_TGT_TITLE_GEN_WID = "tgt_title_gen_wid" # title gen specific
DK_TGT_TITLE_CPY_WID = "tgt_title_cpy_wid"
DK_TGT_TITLE_CPY_GATE = "tgt_title_cpy_gate"
DK_TGT_TITLE_N_TOKEN = "tgt_title_n_token"
DK_SENT_DEPEND_GRAPH_LIST = "sent_depend_graph_list"
DK_DOC_KW_DIST_GRAPH = "doc_kw_dist_graph"
DK_DOC_SENT_MEAN_TFIDF_SIM_GRAPH = "doc_sent_mean_tfidf_sim_graph"
DK_DOC_SENT_PAIR_TFIDF_SIM_GRAPH = "doc_sent_pair_tfidf_sim_graph"
DK_DOC_SENT_WORD_OVERLAP_GRAPH = "doc_sent_word_overlap_graph"
DK_G2S_WID_GRAPH = "graph2seq_wid_graph"
SQGK_SRC_W_LIST = "src_word_list" # SQGK: SQuAD data reader keys
SQGK_SRC_IOB_LIST = "src_iob_list"
SQGK_SRC_POS_LIST = "src_pos_list"
SQGK_SRC_NER_LIST = "src_ner_list"
SQGK_TGT_W_LIST = "tgt_word_list"
SQGK_DATA_LIST = "data_list"
SQGK_IOB_T2I = "iob_t2i"
SQGK_POS_T2I = "pos_t2i"
SQGK_NER_T2I = "ner_t2i"
CHKPT_COMPLETED_EPOCHS = "completed_epochs" # CHKPT: checkpoint dict keys
CHKPT_MODEL = "model"
CHKPT_OPTIMIZER = "optimizer"
CHKPT_METADATA = "metadata"
CHKPT_PARAMS = "params"
CHKPT_BEST_EVAL_RESULT = "best_eval_result"
CHKPT_BEST_EVAL_EPOCH = "best_eval_epoch"
CHKPT_PAST_EVAL_RESULTS = "past_eval_results"
GK_EDGE_WEIGHT = "edge_weight" # GK: graph keys
GK_EDGE_WORD_PAIR = "edge_word_pair"
GK_EDGE_GV_IDX_PAIR = "edge_v_idx_pair"
GK_EDGE_TYPE = "edge_type"
GK_EDGE_DIR = "edge_directed"
GK_EDGE_UNDIR = "edge_undirected"
GK_SENT_DEP = "sentence_depends"
| 37.774648 | 73 | 0.818792 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,344 | 0.501119 |
d0ba6c15a6c14b45dd62608fe761ce634451a9c5 | 794 | py | Python | register/views/list.py | Bartlett-Christopher/coaching_manual | 43e6dd582f74afa3e0c89203cd01380638f8ed7f | [
"MIT"
]
| null | null | null | register/views/list.py | Bartlett-Christopher/coaching_manual | 43e6dd582f74afa3e0c89203cd01380638f8ed7f | [
"MIT"
]
| 6 | 2020-05-18T05:38:26.000Z | 2021-09-22T19:02:10.000Z | register/views/list.py | Bartlett-Christopher/coaching_manual | 43e6dd582f74afa3e0c89203cd01380638f8ed7f | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
.. module:: register.views.list
:synopsis: View to list all registered users
.. moduleauthor:: Chris Bartlett
"""
from django.urls import reverse
from django.views.generic import TemplateView
from register.api.utils.make_request import make_request
class UserListView(TemplateView):
"""User list view."""
template_name = 'register/list.html'
def get(self, request, *args, **kwargs):
"""GET handler"""
url = request.build_absolute_uri(reverse('api:user'))
response = make_request(url, method='get')
if response.status_code != 200:
return self.render_to_response({})
context = self.get_context_data()
context['users'] = response.json()
return self.render_to_response(context)
| 25.612903 | 61 | 0.667506 | 509 | 0.641058 | 0 | 0 | 0 | 0 | 0 | 0 | 224 | 0.282116 |
d0bd703517c8b3f6a8e778d87ff497a305805d45 | 12,308 | py | Python | tests/evaluator_test.py | NightShade256/prymate | deeb81ab685854599d803719971e85ead6699a90 | [
"MIT"
]
| 6 | 2020-06-22T14:54:55.000Z | 2021-12-13T12:33:21.000Z | tests/evaluator_test.py | NightShade256/prymate | deeb81ab685854599d803719971e85ead6699a90 | [
"MIT"
]
| null | null | null | tests/evaluator_test.py | NightShade256/prymate | deeb81ab685854599d803719971e85ead6699a90 | [
"MIT"
]
| 1 | 2020-10-11T18:31:57.000Z | 2020-10-11T18:31:57.000Z | import unittest
from prymate import evaluator, objects
from prymate.lexer import Lexer
from prymate.parser import Parser
class TestEvaluator(unittest.TestCase):
def test_eval_numeric_exp(self):
tests = [
["5", 5],
["10", 10],
["-5", -5],
["-10", -10],
["5 + 5 + 5 + 5 - 10", 10],
["2 * 2 * 2 * 2 * 2", 32],
["-50 + 100 + -50", 0],
["5 * 2 + 10", 20],
["5 + 2 * 10", 25],
["20 + 2 * -10", 0],
["50 / 2 * 2 + 10", 60],
["2 * (5 + 10)", 30],
["3 * 3 * 3 + 10", 37],
["3 * (3 * 3) + 10", 37],
["(5 + 10 * 2 + 15 / 3) * 2 + -10", 50],
["2 * 3 % 4 * 2", 4],
["2 * 3 % 4 * 2 - 10", -6],
["2.2 * 2", 4.4],
["12 / 6", 2.0],
["12.2 / 2", 6.1],
["1.5 * -1.5", -2.25],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_int_object(evaluated, tt[1])
def test_eval_bool_exp(self):
tests = [
["true", True],
["false", False],
["1 < 2", True],
["1 > 2", False],
["1 < 1", False],
["1 > 1", False],
["1 == 1", True],
["1 != 1", False],
["1 == 2", False],
["1 != 2", True],
["true == true", True],
["false == false", True],
["true == false", False],
["true != false", True],
["false != true", True],
["(1 < 2) == true", True],
["(1 < 2) == false", False],
["(1 > 2) == true", False],
["(1 > 2) == false", True],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_boolean_object(evaluated, tt[1])
def test_bang_operator(self):
tests = [
["!true", False],
["!false", True],
["!5", False],
["!!true", True],
["!!false", False],
["!!5", True],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_boolean_object(evaluated, tt[1])
def test_if_exp(self):
tests = [
["if (true) { 10 }", 10],
["if (false) { 10 }", None],
["if (1) { 10 }", 10],
["if (1 < 2) { 10 }", 10],
["if (1 > 2) { 10 }", None],
["if (1 > 2) { 10 } else { 20 }", 20],
["if (1 < 2) { 10 } else { 20 }", 10],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, int(tt[1]))
else:
self._test_null_object(evaluated)
def test_return_statements(self):
tests = [
["return 10;", 10],
["return 10; 9;", 10],
["return 2 * 5; 9;", 10],
["9; return 2 * 5; 9;", 10],
["9; return 2 * 5.2; 9;", 10.4],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
self._test_int_object(evaluated, tt[1])
def test_error_handling(self):
tests = [
["5 + true;", "type mismatch: INTEGER + BOOLEAN"],
["5 + true; 5;", "type mismatch: INTEGER + BOOLEAN"],
["-true", "unknown operator: -BOOLEAN"],
["true + false;", "unknown operator: BOOLEAN + BOOLEAN"],
["true % false;", "unknown operator: BOOLEAN % BOOLEAN"],
["5; true + false; 5", "unknown operator: BOOLEAN + BOOLEAN"],
["if (10 > 1) { true + false; }", "unknown operator: BOOLEAN + BOOLEAN"],
[
"""
132
if (10 > 1) {
if (10 > 1) {
return true + false;
}
return 1;
""",
"unknown operator: BOOLEAN + BOOLEAN",
],
["foobar", "identifier not found: foobar"],
['"Hello" - "World"', "unknown operator: STRING - STRING"],
[
'{"name": "Monkey"}[fn(x) { x }];',
"unusable as dictionary key: FUNCTION",
],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if not isinstance(evaluated, objects.Error):
self.fail(f"No error object returned. Got {evaluated}.")
self.assertEqual(
evaluated.message,
tt[1],
f"Wrong error message. Expected {tt[1]}, got {evaluated.message}.",
)
def test_let_statements(self):
tests = [
["let a = 5; a;", 5],
["let a = 5 * 5; a;", 25],
["let a = 5; let b = a; b;", 5],
["let a = 5; let b = a; let c = a + b + 5; c;", 15],
]
for tt in tests:
self._test_int_object(self._test_eval(tt[0]), tt[1])
def test_function_object(self):
input_case = "fn(x) { x + 2; };"
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.Function):
self.fail(f"Object is not Function. Got, {evaluated}")
self.assertEqual(
len(evaluated.parameters),
1,
f"Function has wrong parameters, got {len(evaluated.parameters)}.",
)
self.assertEqual(
str(evaluated.parameters[0]),
"x",
f"Parameter is not 'x', got {str(evaluated.parameters[0])}.",
)
expected_body = "(x + 2)"
self.assertEqual(
str(evaluated.body),
expected_body,
f"body is not {expected_body}. got {str(evaluated.body)}.",
)
def test_function_application(self):
tests = [
["let identity = fn(x) { x; }; identity(5);", 5],
["let identity = fn(x) { return x; }; identity(5);", 5],
["let double = fn(x) { x * 2; }; double(5);", 10],
["let add = fn(x, y) { x + y; }; add(5, 5);", 10],
["let add = fn(x, y) { x + y; }; add(5 + 5, add(5, 5));", 20],
["fn(x) { x; }(5)", 5],
]
for tt in tests:
self._test_int_object(self._test_eval(tt[0]), tt[1])
def test_string_literal(self):
input_case = '"Hello, World!"'
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.String):
self.fail(f"Object is not String, got {evaluated}.")
self.assertEqual(
evaluated.value,
"Hello, World!",
f"String has wrong value. Got {evaluated.value}.",
)
def test_string_concatenation(self):
input_case = '"Hello" + " " + "World!"'
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.String):
self.fail(f"Object is not String, got {evaluated}.")
self.assertEqual(
evaluated.value,
"Hello World!",
f"String has wrong value. Got {evaluated.value}.",
)
def test_builtin_functions(self):
tests = [
['len("")', 0],
['len("four")', 4],
['len("hello world")', 11],
["len(1)", "argument to `len` not supported, got INTEGER"],
['len("one", "two")', "wrong number of arguments. got=2, want=1"],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, tt[1])
elif isinstance(tt[1], str):
if not isinstance(evaluated, objects.Error):
self.fail(f"Object not of type Error, got {evaluated.tp().value}.")
self.assertEqual(
evaluated.message,
tt[1],
f"Wrong error message. Expected={tt[1]}, Got={evaluated.message}.",
)
def test_array_literals(self):
input_case = "[1, 2 * 2, 3 + 3];"
ev = self._test_eval(input_case)
if not isinstance(ev, objects.Array):
self.fail(f"Object not of type Array, got {ev.tp().value}.")
self.assertEqual(
len(ev.elements),
3,
f"Array has wrong number of elements. Got {len(ev.elements)}.",
)
self._test_int_object(ev.elements[0], 1)
self._test_int_object(ev.elements[1], 4)
self._test_int_object(ev.elements[2], 6)
def test_array_index_exp(self):
tests = [
["[1, 2, 3][0]", 1],
["[1, 2, 3][1]", 2],
["[1, 2, 3][2]", 3],
["let i = 0; [1][i];", 1],
["[1, 2, 3][1 + 1];", 3],
["let myArray = [1, 2, 3]; myArray[2];", 3],
["let myArray = [1, 2, 3]; myArray[0] + myArray[1] + myArray[2];", 6],
["let myArray = [1, 2, 3]; let i = myArray[0]; myArray[i]", 2],
["[1, 2, 3][3]", None],
["[1, 2, 3][-1]", None],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
if isinstance(tt[1], int):
self._test_int_object(evaluated, tt[1])
else:
self._test_null_object(evaluated)
def test_dictionary(self):
input_case = """
let two = "two";
{
"one": 10 - 9,
two: 1 + 1,
"thr" + "ee": 6 / 2,
4: 4,
true: 5,
false: 6
}
"""
evaluated = self._test_eval(input_case)
if not isinstance(evaluated, objects.Dictionary):
self.fail(f"Eval didn't return Hash. Got {evaluated}.")
expected = {
objects.String("one").hashkey(): 1,
objects.String("two").hashkey(): 2,
objects.String("three").hashkey(): 3,
objects.Integer(4).hashkey(): 4,
objects.Boolean(True).hashkey(): 5,
objects.Boolean(False).hashkey(): 6,
}
self.assertEqual(
len(evaluated.pairs),
len(expected),
f"Hash has wrong num of pairs. Got {len(evaluated.pairs)}.",
)
for key, val in expected.items():
pair = evaluated.pairs.get(key, None)
self.assertNotEqual(pair, None, "No pair for given key in pairs")
self._test_int_object(pair.value, val)
def test_dict_index_exp(self):
tests = [
['{"foo": 5}["foo"]', 5],
['{"foo": 5}["bar"]', None],
['let key = "foo"; {"foo": 5}[key]', 5],
['{}["foo"]', None],
["{5: 5}[5]", 5],
["{true: 5}[true]", 5],
["{false: 5}[false]", 5],
]
for tt in tests:
evaluated = self._test_eval(tt[0])
integer = tt[1]
if integer is not None:
self._test_int_object(evaluated, int(integer))
else:
self._test_null_object(evaluated)
def _test_int_object(self, obj: objects.Object, expected: int):
if not isinstance(obj, objects.Integer) and not isinstance(obj, objects.Float):
self.fail(f"Expected object to be Integer/Float, got {type(obj)}.")
self.assertEqual(
obj.value,
expected,
f"Object has wrong value. Expected {expected}, got {obj.value}.",
)
def _test_boolean_object(self, obj: objects.Object, expected: bool):
if not isinstance(obj, objects.Boolean):
self.fail(f"Expected object to be Boolean, got {type(obj)}.")
self.assertEqual(
obj.value,
expected,
f"Object has wrong value. Expected {expected}, got {obj.value}.",
)
def _test_null_object(self, obj: objects.Object):
if obj is not objects.Null():
self.fail(f"Object is not NULL. Got {obj}.")
def _test_eval(self, input_case: str):
lexer = Lexer(input_case)
parser = Parser(lexer)
env = objects.Environment()
program = parser.parse_program()
return evaluator.evaluate(program, env)
if __name__ == "__main__":
unittest.main()
| 32.474934 | 87 | 0.456451 | 12,134 | 0.985863 | 0 | 0 | 0 | 0 | 0 | 0 | 3,916 | 0.318167 |
d0be74bdfe9cb84b8767afe5f63676a2412c89f4 | 1,074 | py | Python | chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py | codenote/chromium-test | 0637af0080f7e80bf7d20b29ce94c5edc817f390 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
]
| 1 | 2018-03-10T13:08:49.000Z | 2018-03-10T13:08:49.000Z | chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py | codenote/chromium-test | 0637af0080f7e80bf7d20b29ce94c5edc817f390 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
]
| null | null | null | chrome/common/extensions/docs/examples/extensions/native_messaging/echo.py | codenote/chromium-test | 0637af0080f7e80bf7d20b29ce94c5edc817f390 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
]
| 1 | 2020-11-04T07:25:45.000Z | 2020-11-04T07:25:45.000Z | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# A simple native client in python.
# All this client does is echo the text it receives back at the extension.
import sys
import struct
def Main():
message_number = 0
while 1:
# Read the message type (first 4 bytes).
text_length_bytes = sys.stdin.read(4)
if len(text_length_bytes) == 0:
break
# Read the message length (4 bytes).
text_length = struct.unpack('i', text_length_bytes)[0]
# Read the text (JSON object) of the message.
text = sys.stdin.read(text_length).decode('utf-8')
message_number += 1
response = '{{"id": {0}, "echo": {1}}}'.format(message_number,
text).encode('utf-8')
try:
sys.stdout.write(struct.pack("I", len(response)))
sys.stdout.write(response)
sys.stdout.flush()
except IOError:
break
if __name__ == '__main__':
Main()
| 25.571429 | 74 | 0.633147 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 472 | 0.439479 |
d0bf49499bee89967eb7d175fcbf84d7e2af6904 | 1,140 | py | Python | covid_19/users_app/models.py | MikePolyakov/djanjo_project | 4d80cac9142bacdaa91b5f7be0c7377a365c3db9 | [
"MIT"
]
| null | null | null | covid_19/users_app/models.py | MikePolyakov/djanjo_project | 4d80cac9142bacdaa91b5f7be0c7377a365c3db9 | [
"MIT"
]
| 6 | 2021-06-04T23:11:44.000Z | 2022-03-12T00:29:55.000Z | covid_19/users_app/models.py | id2k1149/django_project | 4d80cac9142bacdaa91b5f7be0c7377a365c3db9 | [
"MIT"
]
| null | null | null | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
from django.dispatch import receiver
class AppUser(AbstractUser):
email = models.EmailField(unique=True)
is_author = models.BooleanField(default=False)
# Переопределение метода save
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
# Создаем профиль
# Если провиль не создан
if not Profile.objects.filter(user=self).exists():
Profile.objects.create(user=self)
class Profile(models.Model):
# При создании пользователя создать Profile
info = models.TextField(blank=True)
user = models.OneToOneField(AppUser, on_delete=models.CASCADE)
# сигналы при сохранении пользователя (опасно использовать, трудно отслеживать)
# @receiver(post_save, sender=AppUser)
# def create_profile(sender, instance, **kwargs):
# print('Сработал обработчик сигнала')
# if not Profile.objects.filter(user=instance).exists():
# Profile.objects.create(user=instance)
| 30.810811 | 79 | 0.728947 | 663 | 0.503799 | 0 | 0 | 0 | 0 | 0 | 0 | 630 | 0.478723 |
d0bf9ddf2a1b5e4b50f545954e0579d25793cb8e | 1,748 | py | Python | Wrappers/Python/setup.py | lauramurgatroyd/CILViewer | 3aafa4693498a55ffd270c55118399dd807dee5f | [
"Apache-2.0"
]
| null | null | null | Wrappers/Python/setup.py | lauramurgatroyd/CILViewer | 3aafa4693498a55ffd270c55118399dd807dee5f | [
"Apache-2.0"
]
| null | null | null | Wrappers/Python/setup.py | lauramurgatroyd/CILViewer | 3aafa4693498a55ffd270c55118399dd807dee5f | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
# Copyright 2017 Edoardo Pasca
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Wed Jun 7 09:57:13 2017
@author: ofn77899
"""
from distutils.core import setup
#from setuptools import setup, find_packages
import os
import sys
cil_version = "20.07.4"
setup(
name="ccpi-viewer",
version=cil_version,
packages=['ccpi','ccpi.viewer', 'ccpi.viewer.utils'],
install_requires=['numpy','vtk'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
#install_requires=['docutils>=0.3'],
# package_data={
# # If any package contains *.txt or *.rst files, include them:
# '': ['*.txt', '*.rst'],
# # And include any *.msg files found in the 'hello' package, too:
# 'hello': ['*.msg'],
# },
zip_safe = False,
# metadata for upload to PyPI
author="Edoardo Pasca",
author_email="[email protected]",
description='CCPi Core Imaging Library - VTK Viewer Module',
license="Apache v2.0",
keywords="3D data viewer",
url="http://www.ccpi.ac.uk", # project home page, if any
# could also include long_description, download_url, classifiers, etc.
)
| 31.214286 | 76 | 0.677346 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,422 | 0.813501 |
d0c0175702c2bf4073b22292f0de9bec50aa18ec | 4,658 | py | Python | src/das/model_analyzer/analyzer_args.py | saifullah3396/doc_shap | 0b65912bc9abc8721b5a8aec008a438fa13e8cbf | [
"Apache-2.0"
]
| null | null | null | src/das/model_analyzer/analyzer_args.py | saifullah3396/doc_shap | 0b65912bc9abc8721b5a8aec008a438fa13e8cbf | [
"Apache-2.0"
]
| null | null | null | src/das/model_analyzer/analyzer_args.py | saifullah3396/doc_shap | 0b65912bc9abc8721b5a8aec008a438fa13e8cbf | [
"Apache-2.0"
]
| null | null | null | """
Defines the dataclass for holding training related arguments.
"""
import json
import math
import sys
from dataclasses import asdict, dataclass, field
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from das.models.model_args import ModelArguments
from das.utils.basic_utils import create_logger
logger = create_logger(__name__)
@dataclass
class AnalysisTaskArguments:
task_name: str = ""
@dataclass
class GenerateMetricsTaskArguments(AnalysisTaskArguments):
task_name: str = "generate_metrics"
metrics: List = field(default_factory=lambda: ["accuracy"])
@dataclass
class GenerateRobustnessMetricsTaskArguments(AnalysisTaskArguments):
task_name: str = "generate_robustness_metrics"
baseline_metrics_path: str = ""
@dataclass
class SimilarImagesClusteringTaskArguments(AnalysisTaskArguments):
task_name: str = "similar_images_clustering"
dim_reduction_method: str = "pca"
dim_reduction_args: dict = field(default_factory=lambda: {"n_components": 128})
generate_metrics: bool = True
visualize_clusters: bool = False
@dataclass
class GenerateShapValuesTaskArguments(AnalysisTaskArguments):
task_name: str = "generate_shap_values"
analyze_complete_dataset: bool = False
num_test_samples_per_class: int = 1
shap_num_bg_samples: int = 100
start_idx: int = 0
end_idx: int = 999999
bg_name: str = "shap_background"
save_bg_to_cache: bool = True
save_samples_to_cache: bool = True
load_bg_from_cache: bool = True
load_samples_from_cache: bool = True
ranked_outputs: Optional[int] = None
only_get_true_shap_value: bool = False
only_get_pred_shap_value: bool = False
get_true_and_pred_shap_value: bool = False
@dataclass
class GenerateShapVisualizationsTaskArguments(AnalysisTaskArguments):
task_name: str = "generate_shap_visualizations"
resize_shap: bool = True
@dataclass
class FeaturePerturbationAttackConfig:
arg_name: str = "dropout_pixels"
mode: str = "linear"
arg_min: int = 0
arg_max: Optional[int] = 0
arg_step: int = 4
@dataclass
class FeaturePerturbationTaskArguments(AnalysisTaskArguments):
task_name: str = "feature_perturbation"
feature_importance_grid_size: int = 4
black_and_white_threshold: int = 125
importance_order: str = "descending"
max_perturbation_percentage: float = 0.05
attack_type: str = "black_white_pixel_dropout"
attack_config: FeaturePerturbationAttackConfig = FeaturePerturbationAttackConfig()
save_visualizations: bool = True
save_perturbations: bool = True
n_vis_per_class: int = 100
resize_perturbation: bool = True
shuffle_data: bool = False
max_data_per_label: int = -1
random_seed: int = 0
@dataclass
class FeaturePerturbationAnalysisTaskArguments(AnalysisTaskArguments):
most_relevant_first_data: str = ""
least_relevant_first_data: str = ""
random_data: List = field(default_factory=lambda: [])
task_name: str = "feature_perturbation_analysis"
SUPPORTED_MODEL_ARGUMENTS = {
"generate_metrics": GenerateMetricsTaskArguments,
"generate_robustness_metrics": GenerateRobustnessMetricsTaskArguments,
"generate_shap_values": GenerateShapValuesTaskArguments,
"generate_shap_visualizations": GenerateShapVisualizationsTaskArguments,
"feature_perturbation": FeaturePerturbationTaskArguments,
"similar_images_clustering": SimilarImagesClusteringTaskArguments,
"feature_perturbation_analysis": FeaturePerturbationAnalysisTaskArguments,
}
class AnalysisTaskArgumentsFactory:
@staticmethod
def create_child_arguments(task_name: str):
"""
Returns the analysis_task arguments class if present
Args:
task_name: The task name for which the configuration arguments are to
be returned.
"""
try:
model_args_class = SUPPORTED_MODEL_ARGUMENTS.get(task_name, None)
if model_args_class is None:
raise ValueError(f"Analysis task {task_name} is not supported!")
return model_args_class
except Exception as exc:
logger.exception(
f"Exception raised while loading analysis task arguments "
f"[{task_name}]: {exc}"
)
sys.exit(1)
@dataclass
class AnalyzerArguments:
"""
Arguments related to the training loop.
"""
cls_name = "analyzer_args"
analyzer_output_dir: str
analysis_tasks: List[AnalysisTaskArguments]
models: Union[List[ModelArguments], ModelArguments]
output_data_subdir: str = ""
def __post_init__(self):
pass
| 30.444444 | 86 | 0.738729 | 3,641 | 0.781666 | 0 | 0 | 3,711 | 0.796694 | 0 | 0 | 945 | 0.202877 |
d0c3051d812d65b6baa90af1922f0a2918135e6d | 128 | py | Python | django_cenvars/tools/sanitize.py | martinphellwig/django-cenvars | 2b7ae6e719fa6ae7ffb8f0cedad615114064dab1 | [
"BSD-2-Clause"
]
| null | null | null | django_cenvars/tools/sanitize.py | martinphellwig/django-cenvars | 2b7ae6e719fa6ae7ffb8f0cedad615114064dab1 | [
"BSD-2-Clause"
]
| null | null | null | django_cenvars/tools/sanitize.py | martinphellwig/django-cenvars | 2b7ae6e719fa6ae7ffb8f0cedad615114064dab1 | [
"BSD-2-Clause"
]
| null | null | null | """
Perform sanitization check prior of releasing the app as ready.
"""
def main():
"Perform sanitization checks"
pass
| 16 | 63 | 0.695313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 100 | 0.78125 |
d0c48f41277d0c455a9e37dabfa1c49d07148ba0 | 1,982 | py | Python | src/server/ClientHandler.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| 1 | 2021-06-20T05:47:53.000Z | 2021-06-20T05:47:53.000Z | src/server/ClientHandler.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| null | null | null | src/server/ClientHandler.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
]
| null | null | null | import asyncio
import traceback
from asyncio import StreamWriter, StreamReader, Task
from .BaseClientHandler import BaseClientHandler
from data import Message
class ClientHandler(BaseClientHandler):
_inputTask: Task
_errorCheckTask: Task
reader: StreamReader
writer: StreamWriter
# noinspection PyUnresolvedReferences
def __init__( self, server: 'AServer', reader: StreamReader, writer: StreamWriter ):
super().__init__( server, ':'.join( [ str(i) for i in writer.get_extra_info('peername') ] ) )
self.reader = reader
self.writer = writer
print( f'[{self.addr}] starting input loop' )
self._inputTask = asyncio.create_task( self.InputLoop() )
self._errorCheckTask = asyncio.create_task( self.CheckErrors() )
async def Send( self, message: Message ) -> None:
if self.isAlive():
message = await self.ReplacePlaceholders(message)
enc_message = message.toJson().encode( 'utf8' )
header = int.to_bytes( len( enc_message ), length=4, byteorder='big' )
self.writer.write( header )
self.writer.write( enc_message )
await self.writer.drain()
async def CheckErrors( self ) -> None:
while True:
await asyncio.sleep(10)
exc: Exception = self.reader.exception()
if exc is not None:
if isinstance( exc, ConnectionResetError ):
self._alive = False
break
print('Exception on reader:')
traceback.print_exception( type( exc ), exc, exc.__traceback__ )
async def InputLoop( self ) -> None:
while (
self.isAlive() and (
self.reader.exception() is None or
isinstance( self.reader.exception(), ConnectionResetError )
)
):
size = int.from_bytes( await self.reader.read( 4 ), 'big' )
msg = Message.fromJson(
(
await self.reader.read(size)
).decode( 'utf8' )
)
await self.HandleMessage(msg)
print( f'closed connection to [{self.addr}]' )
self._alive = False
def isAlive( self ) -> bool:
return self._alive and not ( self.reader.at_eof() or self.writer.is_closing() )
| 30.492308 | 95 | 0.700807 | 1,819 | 0.91776 | 0 | 0 | 0 | 0 | 1,127 | 0.568618 | 176 | 0.088799 |
d0c5b0e690a24fec09fd97682f7f29681f7e57f6 | 8,658 | py | Python | strangeflix/room/consumers.py | samsoldeinstein/webster2020 | 9795635e806caa261bb33d629f3d1f2bd603638c | [
"MIT"
]
| 6 | 2020-11-02T16:40:56.000Z | 2020-11-07T06:59:00.000Z | strangeflix/room/consumers.py | samsoldeinstein/webster2020 | 9795635e806caa261bb33d629f3d1f2bd603638c | [
"MIT"
]
| null | null | null | strangeflix/room/consumers.py | samsoldeinstein/webster2020 | 9795635e806caa261bb33d629f3d1f2bd603638c | [
"MIT"
]
| 2 | 2020-11-03T05:20:25.000Z | 2020-11-03T05:38:47.000Z | # chat/consumers.py
import json
from channels.generic.websocket import AsyncWebsocketConsumer
from .models import RoomControl
from channels.db import database_sync_to_async
class ChatConsumer(AsyncWebsocketConsumer):
# when a user connect
async def connect(self):
self.user = self.scope['user']
self.room_name = self.scope['url_route']['kwargs']['room_name']
self.room_group_name = 'chat_%s' % self.room_name
if self.user.is_authenticated:
self.is_member = await self.check_if_member()
self.is_host = await self.check_if_host()
if self.is_member or self.is_host:
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'add_user',
'user':self.user.username
}
)
await self.accept()
# to check membership of the user
@database_sync_to_async
def check_if_member(self):
return self.user in RoomControl.objects.filter(room_id = self.room_name).first().members.all()
# to check if user is host
@database_sync_to_async
def check_if_host(self):
return self.user == RoomControl.objects.filter(room_id = self.room_name).first().host_user
# when a user exits
async def disconnect(self, close_code):
# Leave room group
if(hasattr(self,'room_group_name')):
await self.channel_layer.group_discard(
self.room_group_name,
self.channel_name
)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'remove_user',
'user':self.user.username
}
)
# Receive message from WebSocket
async def receive(self, text_data):
if self.user.is_authenticated:
self.is_member = await self.check_if_member()
self.is_host = await self.check_if_host()
if self.is_member or self.is_host:
text_data_json = json.loads(text_data)
message = text_data_json['message']
message_type = text_data_json['type']
# Send message to room group
if message_type == 'chat_message':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'chat_message',
'message': message,
'user':self.user.username
}
)
if message_type == 'play':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'play',
'message':message,
'user':self.user.username
}
)
if message_type == 'skip':
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'skip',
'message': message,
'skipAmount': text_data_json['skipAmount'],
'user':self.user.username
}
)
if message_type == 'upd':
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'upd',
'message':message,
'updTime':text_data_json['updTime'],
'user':self.user.username
}
)
if message_type == 'join':
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'join',
'message':message,
'user':self.user.username
}
)
if message_type == 'hostupd' and self.is_host:
await self.channel_layer.group_send(
self.room_group_name,
{
'type':'hostupd',
'message':message,
'pausedStatus':text_data_json['pausedStatus'],
'currentTimeStatus':text_data_json['currentTimeStatus'],
'videoStatus':text_data_json['videoStatus'],
'users':text_data_json['users'],
'user':self.user.username
}
)
if message_type == 'close_room':
print(hi)
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'close_room',
'message': message,
'user':self.user.username
}
)
# Receive message from room group
async def chat_message(self, event):
message = event['message']
# Send message to WebSocket
await self.send(text_data=json.dumps({
'type':'chat_message',
'message': message,
'user':event['user']
}))
#Send play control
async def play(self,event):
message = event['message']
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'play',
'message':message,
'user':event['user']
}))
#Send skip control
async def skip(self,event):
message = event['message']
skipAmount = event['skipAmount']
#Send skip control to WebSocket
await self.send(text_data=json.dumps({
'type':'skip',
'message':message,
'skipAmount':skipAmount,
'user':event['user']
}))
#Send updated time
async def upd(self,event):
message = event['message']
updTime = event['updTime']
#Send updated time control to WebSocket
await self.send(text_data=json.dumps({
'type':'upd',
'message':message,
'updTime':updTime,
'user':event['user']
}))
#Send join request
async def join(self,event):
message = event['message']
#Send join request to WebSocket
await self.send(text_data=json.dumps({
'type':'join',
'message':message,
'user':event['user']
}))
#Send host update
async def hostupd(self,event):
message = event['message']
pausedStatus = event['pausedStatus']
currentTimeStatus = event['currentTimeStatus']
videoStatus = event['videoStatus']
users = event['users']
#Send join request to WebSocket
await self.send(text_data=json.dumps({
'type':'hostupd',
'message':message,
'pausedStatus': pausedStatus,
'currentTimeStatus': currentTimeStatus,
'videoStatus' : videoStatus,
'users':users,
'user':event['user']
}))
#Send add user
async def add_user(self,event):
print(event['user'])
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'add_user',
'user':event['user']
}))
#Send remove user
async def remove_user(self,event):
#Send play control to WebSocket
await self.send(text_data=json.dumps({
'type':'remove_user',
'user':event['user']
}))
# Receive host left from room group
async def close_room(self, event):
message = event['message']
# Send host left to WebSocket
await self.send(text_data=json.dumps({
'type':'close_room',
'message': message,
'user':event['user']
})) | 36.378151 | 102 | 0.473897 | 8,484 | 0.979903 | 0 | 0 | 308 | 0.035574 | 7,635 | 0.881843 | 1,744 | 0.201432 |
d0c819007b9eb94c341aa70c5a8a5172d3857e95 | 7,342 | py | Python | src/cnnclustering/hooks.py | janjoswig/CNN | 06ab0e07da46141cca941e99ac1a11ddc7ce233d | [
"MIT"
]
| 4 | 2020-06-16T13:33:57.000Z | 2021-01-05T18:19:57.000Z | src/cnnclustering/hooks.py | janjoswig/CNN | 06ab0e07da46141cca941e99ac1a11ddc7ce233d | [
"MIT"
]
| 12 | 2019-10-22T09:15:09.000Z | 2020-07-02T09:42:44.000Z | src/cnnclustering/hooks.py | janjoswig/CommonNNClustering | 06ab0e07da46141cca941e99ac1a11ddc7ce233d | [
"MIT"
]
| null | null | null | import numpy as np
from cnnclustering._primitive_types import P_AINDEX, P_AVALUE
from cnnclustering import _types, _fit
COMPONENT_ALT_KW_MAP = {
"input": "input_data",
"data": "input_data",
"n": "neighbours",
"na": "neighbours",
"nb": "neighbour_neighbours",
"getter": "neighbours_getter",
"ogetter": "neighbours_getter_other",
"ngetter": "neighbours_getter",
"ongetter": "neighbours_getter_other",
"dgetter": "distance_getter",
"checker": "similarity_checker",
"q": "queue",
}
COMPONENT_KW_TYPE_ALIAS_MAP = {
"neighbour_neighbours": "neighbours",
"neighbour_getter_other": "neighbours_getter",
}
COMPONENT_NAME_TYPE_MAP = {
"input_data": {
"components_mview": _types.InputDataExtComponentsMemoryview,
"neighbourhoods_mview": _types.InputDataExtNeighbourhoodsMemoryview
},
"neighbours_getter": {
"brute_force": _types.NeighboursGetterExtBruteForce,
"lookup": _types.NeighboursGetterExtLookup,
},
"distance_getter": {
"metric": _types.DistanceGetterExtMetric,
"lookup": _types.DistanceGetterExtLookup,
},
"neighbours": {
"vector": _types.NeighboursExtVector,
"uset": _types.NeighboursExtCPPUnorderedSet,
"vuset": _types.NeighboursExtVectorCPPUnorderedSet,
},
"metric": {
"dummy": _types.MetricExtDummy,
"precomputed": _types.MetricExtPrecomputed,
"euclidean": _types.MetricExtEuclidean,
"euclidean_r": _types.MetricExtEuclideanReduced,
"euclidean_periodic_r": _types.MetricExtEuclideanPeriodicReduced,
"euclidean_reduced": _types.MetricExtEuclideanReduced,
"euclidean_periodic_reduced": _types.MetricExtEuclideanPeriodicReduced,
},
"similarity_checker": {
"contains": _types.SimilarityCheckerExtContains,
"switch": _types.SimilarityCheckerExtSwitchContains,
"screen": _types.SimilarityCheckerExtScreensorted,
},
"queue": {
"fifo": _types.QueueExtFIFOQueue
},
"fitter": {
"bfs": _fit.FitterExtBFS,
"bfs_debug": _fit.FitterExtBFSDebug
}
}
def get_registered_recipe(key):
registered_recipes = {
"none": {},
"coordinates": {
"input_data": "components_mview",
"fitter": "bfs",
"fitter.ngetter": "brute_force",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
"fitter.ngetter.dgetter": "metric",
"fitter.ngetter.dgetter.metric": "euclidean_r",
},
"distances": {
"input_data": "components_mview",
"fitter": "bfs",
"fitter.ngetter": "brute_force",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
"fitter.ngetter.dgetter": "metric",
"fitter.ngetter.dgetter.metric": "precomputed",
},
"neighbourhoods": {
"input_data": "neighbourhoods_mview",
"fitter": "bfs",
"fitter.ngetter": "lookup",
"fitter.na": "vuset",
"fitter.checker": "switch",
"fitter.queue": "fifo",
},
"sorted_neighbourhoods": {
"input_data": "neighbourhoods_mview",
"fitter": "bfs",
"fitter.ngetter": ("lookup", (), {"is_sorted": True}),
"fitter.na": "vector",
"fitter.checker": "screen",
"fitter.queue": "fifo",
}
}
return registered_recipes[key.lower()]
def prepare_pass(data):
"""Dummy preparation hook
Use if no preparation of input data is desired.
Args:
data: Input data that should be prepared.
Returns:
(data,), {}
"""
return (data,), {}
def prepare_points_from_parts(data):
r"""Prepare input data points
Use when point components are passed as sequence of parts, e.g. as
>>> input_data, meta = prepare_points_parts([[[0, 0],
... [1, 1]],
... [[2, 2],
... [3,3]]])
>>> input_data
array([[0, 0],
[1, 1],
[2, 2],
[3, 3]])
>>> meta
{"edges": [2, 2]}
Recognised data formats are:
* Sequence of length *d*:
interpreted as 1 point with *d* components.
* 2D Sequence (sequence of sequences all of same length) with
length *n* (rows) and width *d* (columns):
interpreted as *n* points with *d* components.
* Sequence of 2D sequences all of same width:
interpreted as parts (groups) of points.
The returned input data format is compatible with:
* `cnnclustering._types.InputDataExtPointsMemoryview`
Args:
data: Input data that should be prepared.
Returns:
* Formatted input data (NumPy array of shape
:math:`\sum n_\mathrm{part}, d`)
* Dictionary of meta-information
Notes:
Does not catch deeper nested formats.
"""
try:
d1 = len(data)
except TypeError as error:
raise error
finished = False
if d1 == 0:
# Empty sequence
data = [np.array([[]])]
finished = True
if not finished:
try:
d2 = [len(x) for x in data]
all_d2_equal = (len(set(d2)) == 1)
except TypeError:
# 1D Sequence
data = [np.array([data])]
finished = True
if not finished:
try:
d3 = [len(y) for x in data for y in x]
all_d3_equal = (len(set(d3)) == 1)
except TypeError:
if not all_d2_equal:
raise ValueError(
"Dimension mismatch"
)
# 2D Sequence of sequences of same length
data = [np.asarray(data)]
finished = True
if not finished:
if not all_d3_equal:
raise ValueError(
"Dimension mismatch"
)
# Sequence of 2D sequences of same width
data = [np.asarray(x) for x in data]
finished = True
meta = {}
meta["edges"] = [x.shape[0] for x in data]
data_args = (np.asarray(np.vstack(data), order="C", dtype=P_AVALUE),)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
def prepare_neighbourhoods(data):
"""Prepare neighbourhood information by padding
Args:
data: Expects a sequence of sequences with neighbour indices.
Returns:
Data as a 2D NumPy array of shape (#points, max. number of neighbours)
and a 1D array with the actual number of neighbours for each point (data
args). Also returns meta information (data kwargs).
"""
n_neighbours = [len(s) for s in data]
pad_to = max(n_neighbours)
data = [
np.pad(a, (0, pad_to - n_neighbours[i]), mode="constant", constant_values=0)
for i, a in enumerate(data)
]
meta = {}
data_args = (
np.asarray(data, order="C", dtype=P_AINDEX),
np.asarray(n_neighbours, dtype=P_AINDEX)
)
data_kwargs = {"meta": meta}
return data_args, data_kwargs
| 28.679688 | 84 | 0.566194 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,552 | 0.483792 |
d0c895b700d9298c6544f69260721fb2fce2376e | 15,620 | py | Python | cybergis/jobs.py | cybergis/jupyterlib | b39cf9c525b52fc9f67a388a751126df00b498f2 | [
"NCSA"
]
| 5 | 2017-11-08T15:32:09.000Z | 2019-12-20T03:05:34.000Z | cybergis/jobs.py | cybergis/jupyterlib | b39cf9c525b52fc9f67a388a751126df00b498f2 | [
"NCSA"
]
| null | null | null | cybergis/jobs.py | cybergis/jupyterlib | b39cf9c525b52fc9f67a388a751126df00b498f2 | [
"NCSA"
]
| 1 | 2019-12-20T02:46:56.000Z | 2019-12-20T02:46:56.000Z | #!/usr/bin/env python
from __future__ import print_function
from ipywidgets import *
from IPython.display import display
from getpass import getpass
import glob
import os
import stat
import paramiko
from string import Template
from os.path import expanduser
from pkg_resources import resource_string
from IPython.core.magic import (register_line_magic, register_cell_magic,register_line_cell_magic)
import hashlib
from itertools import izip,cycle
from IPython.display import IFrame
USERNAME = os.environ['USER']
CONF_DIR='.rg_conf'
CONF_MOD=int('700', 8) # exclusive access
CONF_FILE='%s/%s'%(CONF_DIR, USERNAME)
#ROGER_PRJ='/projects/class/jhub/users'
#JUPYTER_HOME='/mnt/jhub/users'
ROGER_PRJ='/projects/jupyter'
JUPYTER_HOME='/home'
def encrypt(plaintext):
ciphertext = ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(plaintext, cycle(hashlib.sha256(USERNAME).hexdigest())))
return ciphertext.encode('base64')
def decrypt(ciphertext):
ciphertext = ciphertext.decode('base64')
return ''.join(chr(ord(x) ^ ord(y)) for (x,y) in izip(ciphertext, cycle(hashlib.sha256(USERNAME).hexdigest())))
def Labeled(label, widget):
width='130px'
return (Box([HTML(value='<p align="right" style="width:%s">%s  </p>'%(width,label)),widget],
layout=Layout(display='flex',align_items='center',flex_flow='row')))
def listExeutables(folder='.'):
executable = stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH
return [filename for filename in os.listdir(folder)
if os.path.isfile(filename)]# and (os.stat(filename).st_mode & executable)]
def tilemap(tif, name, overwrite=False, overlay=None,tilelvl=[9,13]):
id=hashlib.sha1(name).hexdigest()[:10]
if overwrite:
os.system('rm -rf %s'%id)
os.system('gdal2tiles.py -e -z %d-%d -a 0,0,0 -s epsg:4326 -r bilinear -t "%s" %s -z 8-14 %s'%(tilelvl[0], tilelvl[1], name,tif,id))
with open('%s/leaflet.html'%id) as input:
s=input.read()
s=s.replace('http://cdn.leafletjs.com','https://cdn.leafletjs.com')
s=s.replace('http://{s}.tile.osm.org','https://{s}.tile.openstreetmap.org')
addLayer='map.addLayer(lyr);'
if overlay:
os.system("wget 'https://raw.githubusercontent.com/calvinmetcalf/leaflet-ajax/master/dist/leaflet.ajax.min.js' -O %s/leaflet.ajax.min.js"%id)
s=s.replace('leaflet.js"></script>','leaflet.js"></script>\n<script src="leaflet.ajax.min.js"></script>')
vectorNewLayers = []
vectorOverlay = []
vectorAdd = []
for vecFile,vecName in overlay:
vecId=hashlib.sha1(vecName).hexdigest()[:10]
os.system('ogr2ogr -f "geojson" %s/%s.json %s'%(id,vecId,vecFile))
vectorNewLayers.append('var vecLayer%s = new L.GeoJSON.AJAX("%s.json");'%(vecId,vecId))
vectorOverlay.append('"%s":vecLayer%s'%(vecName, vecId))
vectorAdd.append('map.addLayer(vecLayer%s);'%vecId)
s=s.replace('// Map','\n'.join(vectorNewLayers)+'\n // Map')
s=s.replace('{"Layer": lyr}','{'+','.join(vectorOverlay)+', "Layer": lyr}')
addLayer+='\n'.join(vectorAdd)
s=s.replace(').addTo(map);',').addTo(map); '+addLayer)
with open('%s/leaflet.html'%id,'w') as output:
output.write(s)
return IFrame('%s/leaflet.html'%id, width='1000',height='600')
class Job():
def __init__(self):
#user=widgets.Text(value=USERNAME,placeholder='Your ROGER Account name', description='Username',disabled=False)
#display(user)
#pw=getpass(prompt='Password')
#paramiko.util.log_to_file("ssh.log")
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.homeDir = '%s/%s'%(JUPYTER_HOME,USERNAME)
self.jobDir = self.homeDir + '/.jobs'
if not os.path.exists(self.jobDir):
os.makedirs(self.jobDir)
self.userName = USERNAME
self.rogerRoot = '%s/%s'%(ROGER_PRJ, self.userName)
self.rogerJobDir = self.rogerRoot + '/.jobs'
self.relPath = os.path.relpath(os.getcwd(), self.homeDir)
self.rogerPath = self.rogerRoot + '/' + self.relPath
self.editMode = True
self.jobId = None
with open(os.path.dirname(__file__)+'/qsub.template') as input:
self.job_template=Template(input.read())
self.login()
def login(self):
if not os.path.exists(CONF_DIR):
os.makedirs(CONF_DIR)
if stat.S_IMODE(os.stat(CONF_DIR).st_mode)!=CONF_MOD:
os.chmod(CONF_DIR, stat.S_IREAD | stat.S_IWUSR | stat.S_IXUSR)
if not os.path.exists(CONF_FILE):
#user=widgets.Text(value=USERNAME,placeholder='Your Roger Username', description='Username',disabled=False)
#display(user)
login_success = False
while not login_success:
pw=getpass(prompt='Password')
try:
self.client.connect('roger-login.ncsa.illinois.edu', username=USERNAME, password=pw)
self.sftp=self.client.open_sftp()
except Exception as e:
print(e)
else:
print('Successfully logged in as %s'%self.userName)
login_success = True
with open(CONF_FILE,'w') as output:
output.write(encrypt(pw))
else:
pw=decrypt(open(CONF_FILE).read())
try:
self.client.connect('roger-login.ncsa.illinois.edu', username=USERNAME, password=pw)
#key = paramiko.RSAKey.from_private_key_file(self.homeDir+'/.ssh/roger.key')
#self.client.connect('roger-login.ncsa.illinois.edu', username='dyin4', pkey = key)
self.sftp=self.client.open_sftp()
except Exception as e:
print(e)
else:
print('Successfully logged in as %s'%self.userName)
def submit(self,jobName='test',entrance='test.sh',nNodes=4,ppn=1,isGPU=False,walltime=1,submit=False,hideUI=False):
self.jobName=jobName
self.entrance=entrance
self.nNodes=nNodes
self.ppn=ppn
self.isGPU=isGPU
self.walltime=walltime
res=self.__submitUI(submit,hideUI)
if submit and hideUI:
return res
def __runCommand(self, command):
stdin,stdout,stderr = self.client.exec_command(command)
return ''.join(stdout.readlines())+''.join(stderr.readlines())
def __submitUI(self, direct_submit=False,hideUI=False):
fileList=listExeutables()
if len(fileList) == 0:
with open('test.sh','w') as output:
output.write('#!/bin/bash\n\necho test')
jobName=Text(value=self.jobName)
entrance=Dropdown(
options=fileList,
value=fileList[0],
layout=Layout()
)
nNodes=IntSlider(
value=self.nNodes,
min=1,
max=10,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
ppn=IntSlider(
value=self.ppn,
min=1,
max=20,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
isGPU=RadioButtons(
options=['No GPU','GPU'],
value = 'GPU' if self.isGPU else 'No GPU'
)
ppn=IntSlider(
value=self.ppn,
min=1,
max=20,
step=1,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='i',
slider_color='white'
)
walltime=FloatSlider(
value=float(self.walltime),
min=1.0,
max=48.0,
step=1.0,
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='.1f',
slider_color='white'
)
preview=Button(
description='Preview Job script',
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Preview Job'
)
jobview=Textarea(
layout=Layout(width='500px',height='225px',max_width='1000px', max_height='1000px')
)
confirm=Button(
description='Submit Job',
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Submit job'
)
status=HTML(
layout=Layout(width='850px',height='200px',max_width='1000px', min_height='200px', max_height='1000px')
)
refresh=Button(
description='Refresh Status',
disabled=True
)
cancel=Button(
description='Cancel Job',
disabled=True
)
newJob=Button(
description='New Job',
disabled=True
)
jobEdits = [jobName,entrance,nNodes,ppn,isGPU,walltime,confirm]
postSubmission = [refresh, cancel, newJob]
def switchMode():
if not self.editMode:
status.value = ''
for w in jobEdits:
w.disabled = self.editMode
jobview.disabled = self.editMode
self.editMode = not self.editMode
for w in postSubmission:
w.disabled = self.editMode
def click_preview(b):
jobview.value=self.job_template.substitute(
jobname = jobName.value,
n_nodes = nNodes.value,
is_gpu = isGPU.value.lower().replace(' ',''),
ppn = ppn.value,
walltime = '%d:00:00'%int(walltime.value),
username = self.userName,
jobDir = self.rogerJobDir,
rogerPath= self.rogerPath,
exe = entrance.value
)
click_preview(1)
preview.on_click(click_preview)
for w in jobEdits:
w.observe(click_preview, names='value')
def refreshStatus(b):
#status.value='<pre>'+self.__runCommand('date; qstat | awk \'NR < 3 || /%s/\''%(self.username))+'</pre>'
if self.jobId is None:
status.value='<pre><font size=2>%s</font></pre>'%('\n'*8)
return
result = self.__runCommand('date; qstat -a %s | sed 1,3d '%self.jobId)
if 'Unknown Job Id Error' in result:
result = 'Job %s is finished'%self.jobId
est_time= '\n'*7
else:
est_time = self.__runCommand('showstart %s | head -3'%self.jobId)
if 'cannot locate job' in est_time:
est_time = 'Job %s is currently out of queue.\n\n'%self.jobId
status.value='<pre><font size=2>%s\n%s</font></pre>'%(result, est_time)
refreshStatus(1)
refresh.on_click(refreshStatus)
def submit(b):
filename='%s.pbs'%jobName.value
with open(self.jobDir + '/' + filename,'w') as output:
output.write(jobview.value)
self.jobId = self.__runCommand('qsub %s/%s 2>/dev/null'%(self.rogerJobDir, filename)).strip()
switchMode()
refreshStatus(1)
#status.value='<pre>'+self.__runCommand('qsub %s >/dev/null 2>&1; date; qstat | awk \'NR < 3 || /%s/ \''%(filename,self.username))+'</pre>'
#status.value='<pre><font size=2>'+self.__runCommand('date; qstat -u %s | sed 1,3d'%(self.userName))+'</font></pre>'
confirm.on_click(submit)
def click_cancel(b):
if self.jobId:
self.__runCommand('qdel %s'%self.jobId)
switchMode()
cancel.on_click(click_cancel)
def click_newJob(b):
switchMode()
newJob.on_click(click_newJob)
submitForm=VBox([
Labeled('Job name', jobName),
Labeled('Executable', entrance),
Labeled('No. nodes', nNodes),
Labeled('Cores per node', ppn),
Labeled('GPU needed', isGPU),
Labeled('Walltime (h)', walltime),
#Labeled('', preview),
Labeled('Job script', jobview),
Labeled('', confirm)
])
statusTab=VBox([
Labeled('Job Status', status),
Labeled('', HBox([refresh,cancel,newJob])),
])
if direct_submit:
submit(1)
#display(Tab([submitForm, statusTab], _titles={0: 'Submit New Job', 1: 'Check Job Status'}))
if direct_submit:
if hideUI:
return self.jobId
else:
display(VBox([
Labeled('Job script', jobview),
VBox([
Labeled('Job Status', status),
Labeled('', HBox([refresh,cancel])),
])
]))
else:
display(VBox([submitForm, statusTab]))
def listRunning(self, user=USERNAME, hideUI=False):
header=HTML(
layout=Layout(width='800px',max_width='1000px',
min_width='50px', max_height='1000px')
)
status=SelectMultiple(
layout=Layout(width='850px',height='125px',max_width='1000px',
min_width='800px', min_height='125px', max_height='1000px')
)
refresh=Button(
description='Refresh Status',
disabled=False
)
cancel=Button(
description='Cancel Job',
disabled=False
)
def refreshStatus(b):
#status.value='<pre>'+self.__runCommand('date; qstat | awk \'NR < 3 || /%s/\''%(self.username))+'</pre>'
result = self.__runCommand("qstat | sed -n '1,2p;/%s/p'"%user)
header.value='<pre>%s</pre>'%result
self.runningIds = [_.split()[0] for _ in result.strip().split('\n')[2:]]
#status.options = [_.split()[0] for _ in result.strip().split('\n')[2:]]
refreshStatus(1)
refresh.on_click(refreshStatus)
def click_cancel(b):
pass
#self.__runCommand('qdel %s'%status.value[0].split()[0])
cancel.on_click(click_cancel)
if not hideUI:
display(
VBox([
header,
#HBox([status,header]),
#status,
HBox([refresh, cancel])
])
)
else:
return self.runningIds
def cancel(self, jobIds):
if isinstance(jobIds, str):
self.__runCommand('qdel %s'%jobIds)
elif isinstance(JobIds, list):
self.__runCommand('qdel %s'%' '.join(jobIds))
#def showDetail(self, jobId): # Not handling large output
# print(self.__runCommand('qstat -f %s'%jobId))
#@register_line_magic
#def roger(line):
# Roger()
#del roger
| 37.368421 | 151 | 0.541613 | 12,252 | 0.784379 | 0 | 0 | 0 | 0 | 0 | 0 | 3,646 | 0.233419 |
d0c95eb3b0bfb04075898983cf10d20a892318cb | 5,419 | py | Python | compile.py | Wizard-collab/wizard_2 | a2cb23362e178a0205f6dd0b9b4328c329b5b142 | [
"MIT"
]
| 1 | 2021-10-13T15:07:32.000Z | 2021-10-13T15:07:32.000Z | compile.py | Wizard-collab/wizard_2 | a2cb23362e178a0205f6dd0b9b4328c329b5b142 | [
"MIT"
]
| null | null | null | compile.py | Wizard-collab/wizard_2 | a2cb23362e178a0205f6dd0b9b4328c329b5b142 | [
"MIT"
]
| null | null | null | import subprocess
import os
import shutil
import time
import yaml
import sys
import logging
logger = logging.getLogger(__name__)
class compile():
def __init__(self):
args = sys.argv
args.pop(0)
if len(args) >= 1:
self.release_type = args.pop(0)
else:
self.release_type = None
self.build_folder = None
self.get_release_name()
self.compile()
def get_release_name(self):
if self.release_type is not None:
compil_dir = 'compile'
if not os.path.isdir(compil_dir):
os.mkdir(compil_dir)
compil_data_file = 'version.yaml'
if not os.path.isfile(compil_data_file):
compil_data_dic = dict()
compil_data_dic['builds'] = 0
# version name : MAJOR.MINOR.PATCH
compil_data_dic['MAJOR'] = 2
compil_data_dic['MINOR'] = 0
compil_data_dic['PATCH'] = 0
with open(compil_data_file, 'w') as f:
yaml.dump(compil_data_dic, f)
else:
with open(compil_data_file, 'r') as f:
compil_data_dic = yaml.load(f, Loader=yaml.Loader)
build_no = compil_data_dic['builds'] + 1
MAJOR = compil_data_dic['MAJOR']
MINOR = compil_data_dic['MINOR']
PATCH = compil_data_dic['PATCH']
if self.release_type == 'MAJOR':
MAJOR += 1
MINOR = 0
PATCH = 0
elif self.release_type == 'MINOR':
MINOR += 1
PATCH = 0
elif self.release_type == 'PATCH':
PATCH += 1
elif self.release_type == 'REBUILD':
pass
else:
logger.error(f"{self.release_type} is not a valid release type")
MAJOR = None
MINOR = None
PATCH = None
if (MAJOR and MINOR and PATCH) is not None:
release_name = f"{MAJOR}.{MINOR}.{PATCH}"
self.build_folder = os.path.join(compil_dir, f"{release_name}_{str(build_no).zfill(4)}")
self.setup_name = f'{release_name}.{str(build_no).zfill(4)}-setup.exe'
compil_data_dic['MAJOR'] = MAJOR
compil_data_dic['MINOR'] = MINOR
compil_data_dic['PATCH'] = PATCH
compil_data_dic['builds'] = build_no
compil_data_dic['date'] = time.time()
with open(compil_data_file, 'w') as f:
yaml.dump(compil_data_dic, f)
logger.info(f"Release name : {release_name}")
logger.info(f"Build : {build_no}")
else:
logger.error(f"please provide a release type")
def compile(self):
if self.build_folder is not None:
self.clean_pycache()
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
command_line = "PyInstaller wizard.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller create_repository.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller PyWizard.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller wizard_cmd.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller server.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller uninstall.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller error_handler.spec"
p = subprocess.Popen(command_line)
p.wait()
command_line = "PyInstaller project_manager.spec"
p = subprocess.Popen(command_line)
p.wait()
folders_list = ['ressources', 'softwares']
dist_folder = 'dist/Wizard'
for folder in folders_list:
destination = os.path.join(dist_folder, folder)
shutil.copytree(folder, destination)
files_list = [ 'version.yaml',
'LICENSE',
'wapi.py',
'dist/PyWizard/PyWizard.exe',
'dist/PyWizard/PyWizard.exe.manifest',
'dist/Create Repository/Create Repository.exe',
'dist/Create Repository/Create Repository.exe.manifest',
'dist/wizard_cmd/wizard_cmd.exe',
'dist/wizard_cmd/wizard_cmd.exe.manifest',
'dist/server/server.exe',
'dist/server/server.exe.manifest',
'dist/uninstall.exe',
'dist/Project Manager/Project Manager.exe',
'dist/Project Manager/Project Manager.exe.manifest',
'dist/error_handler/error_handler.exe',
'dist/error_handler/error_handler.exe.manifest']
for file in files_list:
destination = os.path.join(dist_folder, os.path.basename(file))
shutil.copyfile(file, destination)
shutil.copytree(dist_folder, self.build_folder)
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
shutil.make_archive(f'{self.build_folder}', 'zip', self.build_folder)
if os.path.isdir(self.build_folder):
shutil.rmtree(self.build_folder)
# Making installer
zip_file = self.build_folder+'.zip'
shutil.copyfile(zip_file, '__wizard__.zip')
command_line = "PyInstaller installer.spec"
p = subprocess.Popen(command_line)
p.wait()
shutil.copyfile('dist/__installer_temp__.exe', os.path.join('compile', self.setup_name))
os.remove('__wizard__.zip')
if os.path.isdir('dist'):
shutil.rmtree('dist')
if os.path.isdir('build'):
shutil.rmtree('build')
self.clean_pycache()
os.startfile(os.path.dirname(self.build_folder))
def clean_pycache(self):
total_chars = 0
total_files = 0
for root, dirs, files in os.walk(os.path.abspath(""), topdown=False):
for directory in dirs:
if directory == '__pycache__':
dir_name = os.path.join(root, directory)
logger.info(f"Deleting {dir_name}...")
shutil.rmtree(dir_name)
if __name__ == '__main__':
compile() | 29.291892 | 92 | 0.677985 | 5,250 | 0.968813 | 0 | 0 | 0 | 0 | 0 | 0 | 1,519 | 0.28031 |
d0cbf9627e932b48a14476699153120bd9f96cba | 990 | py | Python | ibms_project/ibms/migrations/0011_auto_20190814_1110.py | parksandwildlife/ibms | caea0cb15deed1744ee73a6a44c264650391f71d | [
"Apache-2.0"
]
| 2 | 2019-09-07T20:39:29.000Z | 2021-09-16T12:02:16.000Z | ibms_project/ibms/migrations/0011_auto_20190814_1110.py | ropable/ibms | 8cb2c24ad0202e961c4cf7e3c79385f5716b8c63 | [
"Apache-2.0"
]
| 11 | 2020-06-18T06:53:01.000Z | 2022-02-11T01:55:42.000Z | ibms_project/ibms/migrations/0011_auto_20190814_1110.py | ropable/ibms | 8cb2c24ad0202e961c4cf7e3c79385f5716b8c63 | [
"Apache-2.0"
]
| 5 | 2016-01-18T04:36:48.000Z | 2017-09-07T06:38:28.000Z | # Generated by Django 2.1.11 on 2019-08-14 03:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sfm', '0001_squashed_0003_auto_20180319_0924'),
('ibms', '0010_auto_20190808_1104'),
]
operations = [
migrations.AlterUniqueTogether(
name='corporatestrategy',
unique_together={('corporateStrategyNo', 'fy')},
),
migrations.AlterUniqueTogether(
name='glpivdownload',
unique_together={('gLCode', 'fy')},
),
migrations.AlterUniqueTogether(
name='ibmdata',
unique_together={('ibmIdentifier', 'fy')},
),
migrations.AlterUniqueTogether(
name='ncservicepriority',
unique_together={('servicePriorityNo', 'fy')},
),
migrations.AlterUniqueTogether(
name='ncstrategicplan',
unique_together={('strategicPlanNo', 'fy')},
),
]
| 28.285714 | 60 | 0.579798 | 904 | 0.913131 | 0 | 0 | 0 | 0 | 0 | 0 | 302 | 0.305051 |
d0ce5b7ab68cf3a392399184b703c3027b6d7ef3 | 180 | py | Python | config.py | x5g/AutoWritePoems | 634d30587bb8e5cf15d8287d259ac22fe0798762 | [
"MIT"
]
| null | null | null | config.py | x5g/AutoWritePoems | 634d30587bb8e5cf15d8287d259ac22fe0798762 | [
"MIT"
]
| null | null | null | config.py | x5g/AutoWritePoems | 634d30587bb8e5cf15d8287d259ac22fe0798762 | [
"MIT"
]
| null | null | null | class Config(object):
poetry_file = 'poetry4min.txt'
weight_file = 'poetry_model_4_1.h5'
# 根据前六个字预测第七个字
max_len = 4
batch_size = 128
learning_rate = 0.0005
| 22.5 | 39 | 0.672222 | 203 | 0.995098 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 0.367647 |
d0d20743fdd39b355e497598543bd007290f251f | 840 | py | Python | src/discolight/loaders/annotation/widthheightcsv.py | denzel-datature/discolight | 7c8309d3f883263b2e4cae0b289f17be1d1c07ea | [
"MIT"
]
| 27 | 2020-07-23T08:09:25.000Z | 2022-03-01T08:24:43.000Z | src/discolight/loaders/annotation/widthheightcsv.py | denzel-datature/discolight | 7c8309d3f883263b2e4cae0b289f17be1d1c07ea | [
"MIT"
]
| 7 | 2020-08-05T07:26:55.000Z | 2020-12-31T04:20:40.000Z | src/discolight/loaders/annotation/widthheightcsv.py | denzel-datature/discolight | 7c8309d3f883263b2e4cae0b289f17be1d1c07ea | [
"MIT"
]
| 6 | 2020-07-27T04:30:01.000Z | 2020-08-13T02:39:25.000Z | """A CSV annotation writer that reads the bbox in x, y, w, h format."""
from discolight.annotations import BoundingBox
from .types import CSVRow, CSVAnnotationLoader
class WidthHeightCSV(CSVAnnotationLoader):
"""Loads annotations from a CSV file in the following format.
image_name, x_min, y_min, width, height, label
"""
def get_csv_row(self, row):
"""Return the image and annotation from a CSV row."""
x_min = float(row["x_min"])
y_min = float(row["y_min"])
width = float(row["width"])
height = float(row["height"])
x_max = x_min + width
y_max = y_min + height
image_name = row["image_name"]
class_idx = row["label"]
return CSVRow(image_name=image_name,
bbox=BoundingBox(x_min, y_min, x_max, y_max, class_idx))
| 28.965517 | 78 | 0.633333 | 671 | 0.79881 | 0 | 0 | 0 | 0 | 0 | 0 | 293 | 0.34881 |
d0d3e5c8138c7d0eda8194549ae4292083be2818 | 1,286 | py | Python | test/test_day09.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
]
| null | null | null | test/test_day09.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
]
| null | null | null | test/test_day09.py | frangiz/AdventOfCode2017 | 5fc171d4a83bfb9a408b4647ded4cb3efd12247e | [
"MIT"
]
| null | null | null | from days import day09
from ddt import ddt, data, unpack
import unittest
import util
@ddt
class MyTestCase(unittest.TestCase):
@data(
[['{}'], '1'],
[['{{{}}}'], '6'],
[['{{},{}}'], '5'],
[['{{{},{},{{}}}}'], '16'],
[['{<a>,<a>,<a>,<a>}'], '1'],
[['{{<ab>},{<ab>},{<ab>},{<ab>}}'], '9'],
[['{{<!!>},{<!!>},{<!!>},{<!!>}}'], '9'],
[['{{<a!>},{<a!>},{<a!>},{<ab>}}'], '3'])
@unpack
def test_example_a(self, test_input, expected):
result = day09.part_a(test_input)
self.assertEqual(result, expected)
def test_answer_part_a(self):
result = day09.part_a(util.get_file_contents('day09.txt'))
self.assertEqual(result, '14190')
@data(
[['<>'], '0'],
[['<random characters>'], '17'],
[['<<<<>'], '3'],
[['<{!>}>'], '2'],
[['<!!>'], '0'],
[['<!!!>>'], '0'],
[['<{o"i!a,<{i<a>'], '10'])
@unpack
def test_example_b(self, test_input, expected):
result = day09.part_b(test_input)
self.assertEqual(result, expected)
def test_answer_part_b(self):
result = day09.part_b(util.get_file_contents('day09.txt'))
self.assertEqual(result, '7053')
| 29.906977 | 67 | 0.437014 | 1,185 | 0.921462 | 0 | 0 | 1,191 | 0.926128 | 0 | 0 | 302 | 0.234837 |
d0d550ba7652a9b60f892093b2e1479dc926d08c | 751 | py | Python | venv/lib/python2.7/dist-packages/landscape/lib/fd.py | pengwu/scapy_env | 3db9c5dea2e219048a2387649d6d89be342903d9 | [
"MIT"
]
| null | null | null | venv/lib/python2.7/dist-packages/landscape/lib/fd.py | pengwu/scapy_env | 3db9c5dea2e219048a2387649d6d89be342903d9 | [
"MIT"
]
| null | null | null | venv/lib/python2.7/dist-packages/landscape/lib/fd.py | pengwu/scapy_env | 3db9c5dea2e219048a2387649d6d89be342903d9 | [
"MIT"
]
| null | null | null | """A utility module which has FD-related functions.
This module mostly exists for L{clean_fds}, so it can be imported without
accidentally getting a reactor or something else that might create a critical
file descriptor.
"""
import os
import resource
def clean_fds():
"""Close all non-stdio file descriptors.
This should be called at the beginning of a program to avoid inheriting any
unwanted file descriptors from the invoking process. Unfortunately, this
is really common in unix!
"""
rlimit_nofile = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
total_descriptors = min(4096, rlimit_nofile)
for fd in range(3, total_descriptors):
try:
os.close(fd)
except OSError:
pass
| 28.884615 | 79 | 0.713715 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 462 | 0.61518 |
d0d55d407b26fa73a5076bdbaa2919b847abf548 | 6,760 | py | Python | jps-people-importer.py | UniversalSuperBox/jps-people-importer | eb7128122d00879798a88b599d90e53c139a00da | [
"MIT"
]
| null | null | null | jps-people-importer.py | UniversalSuperBox/jps-people-importer | eb7128122d00879798a88b599d90e53c139a00da | [
"MIT"
]
| null | null | null | jps-people-importer.py | UniversalSuperBox/jps-people-importer | eb7128122d00879798a88b599d90e53c139a00da | [
"MIT"
]
| null | null | null | """
This script creates users in a JAMF Pro Server instance from an LDAP query.
"""
# Copyright 2020 Dalton Durst
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import sys
from collections import namedtuple
from multiprocessing.pool import ThreadPool
from typing import List
from json.decoder import JSONDecodeError
import ldap
import requests
from ldap.controls import SimplePagedResultsControl
from conf import (
JAMF_PASSWORD,
JAMF_URL,
JAMF_USERNAME,
LDAP_BIND_PASSWORD,
LDAP_BIND_URI,
LDAP_BIND_USERNAME,
LDAP_FILTER,
LDAP_INSECURE,
LDAP_SEARCH_DN_LIST,
)
JAMF_AUTH = requests.auth.HTTPBasicAuth(JAMF_USERNAME, JAMF_PASSWORD)
SESSION = requests.Session()
User = namedtuple("User", ["sAMAccountName", "email", "last_name", "first_name"])
def eprint(*args, **kwargs):
"""Like print, but outputs to stderr."""
print(*args, file=sys.stderr, **kwargs)
def results_for_dn(directory: ldap.ldapobject, base_dn: str, filter: str) -> List[User]:
"""Returns a list of User objects found in the directory object for filter
:param directory: A ldap.LDAPObject that has already been bound to a
directory.
:param base_dn: The base of the directory tree to run the search filter
against.
:param filter: The LDAP search filter to run on base_dn using directory.
"""
req_ctrl = SimplePagedResultsControl(True, size=5000, cookie="")
known_ldap_resp_ctrls = {
SimplePagedResultsControl.controlType: SimplePagedResultsControl,
}
# Send search request
msgid = directory.search_ext(
base_dn, ldap.SCOPE_SUBTREE, filterstr=LDAP_FILTER, serverctrls=[req_ctrl]
)
results = []
while True:
__, result_data, __, serverctrls = directory.result3(
msgid, resp_ctrl_classes=known_ldap_resp_ctrls
)
results.extend(
[
User(
ldap_entry["sAMAccountName"][0].decode(),
ldap_entry["mail"][0].decode(),
ldap_entry["sn"][0].decode(),
ldap_entry["givenName"][0].decode(),
)
for __, ldap_entry in result_data
]
)
page_controls = [
control
for control in serverctrls
if control.controlType == SimplePagedResultsControl.controlType
]
if page_controls:
if page_controls[0].cookie:
# Copy cookie from response control to request control
req_ctrl.cookie = page_controls[0].cookie
msgid = directory.search_ext(
base_dn,
ldap.SCOPE_SUBTREE,
filterstr=LDAP_FILTER,
serverctrls=[req_ctrl],
)
else:
break
else:
eprint("Warning: Server ignores RFC 2696 control.")
break
return results
def create_user_in_jamf(user: User):
""" Creates a user in the JPS
:param user: A User object which will be used to create the JPS user.
This function uses the following module variables:
* SESSION must be a requests.Session instance
* JAMF_AUTH must be a requests.auth interface instance
* JAMF_URL must be the full base URL of a JAMF instance.
"""
eprint("Attempting to create", user.sAMAccountName)
xml = """
<user>
<name>{name}</name>
<full_name>{last_name}, {first_name}</full_name>
<email>{email}</email>
</user>
""".format(
name=user.sAMAccountName,
last_name=user.last_name,
first_name=user.first_name,
email=user.email,
).encode()
r = SESSION.post(
JAMF_URL + "/JSSResource/users/id/-1",
data=xml,
headers={"Content-Type": "application/xml", "Accept": "application/xml"},
auth=JAMF_AUTH,
)
try:
r.raise_for_status()
except requests.exceptions.RequestException as e:
eprint("Failed to create user with username", user.sAMAccountName)
eprint(e)
eprint(r.text)
else:
print(user.sAMAccountName)
def main():
eprint("Binding to LDAP...")
if LDAP_INSECURE:
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
directory = ldap.initialize(LDAP_BIND_URI)
directory.protocol_version = 3
directory.simple_bind_s(who=LDAP_BIND_USERNAME, cred=LDAP_BIND_PASSWORD)
eprint("Searching directory for users...")
ldap_users = []
for base_dn in LDAP_SEARCH_DN_LIST:
eprint("Searching DN", base_dn, "with filter", LDAP_FILTER)
ldap_users.extend(results_for_dn(directory, base_dn, LDAP_FILTER))
directory.unbind_s()
directory = None
eprint("Total LDAP users:", len(ldap_users))
eprint("Asking JPS for its user list...")
jamf_user_request = requests.get(
JAMF_URL + "/JSSResource/users",
auth=JAMF_AUTH,
headers={"Accept": "application/json"},
)
try:
jamf_user_json = jamf_user_request.json()
except JSONDecodeError:
eprint(jamf_user_request.text)
eprint("Failed to decode /JSSResource/users response as JSON.")
sys.exit(1)
jamf_usernames = frozenset([user["name"] for user in jamf_user_json["users"]])
eprint("Total JAMF users:", len(jamf_usernames))
missing_users = [
user for user in ldap_users if user.sAMAccountName not in jamf_usernames
]
eprint("Users to create:", len(missing_users))
with ThreadPool(10) as pool:
results = pool.map(create_user_in_jamf, missing_users)
eprint("Done. Created users:", len(results))
if __name__ == "__main__":
main()
| 31.009174 | 88 | 0.657988 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,713 | 0.401331 |
d0d82bd9b120db172a8b9b2c6622284777e11985 | 442 | py | Python | l8.py | snowleung/mypychallenge | 9482e267906a23fc10041f49f7d308c596447f16 | [
"MIT"
]
| null | null | null | l8.py | snowleung/mypychallenge | 9482e267906a23fc10041f49f7d308c596447f16 | [
"MIT"
]
| null | null | null | l8.py | snowleung/mypychallenge | 9482e267906a23fc10041f49f7d308c596447f16 | [
"MIT"
]
| null | null | null | # coding:utf-8
'''
from http://www.pythonchallenge.com/pc/def/integrity.html
'''
un = 'BZh91AY&SYA\xaf\x82\r\x00\x00\x01\x01\x80\x02\xc0\x02\x00 \x00!\x9ah3M\x07<]\xc9\x14\xe1BA\x06\xbe\x084'
pw = 'BZh91AY&SY\x94$|\x0e\x00\x00\x00\x81\x00\x03$ \x00!\x9ah3M\x13<]\xc9\x14\xe1BBP\x91\xf08'
def bz2_un():
return un.decode('bz2')
def bz2_pw():
return pw.decode('bz2')
if __name__ == '__main__':
print bz2_un()
print bz2_pw()
| 23.263158 | 110 | 0.665158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 294 | 0.665158 |
d0d8be8cd8fd46f56d5540bc555ac35643dd277b | 1,387 | py | Python | app.py | arian-nasr/Temporary-SMS | cedbe68b3e329362049c86e0974396bc660875da | [
"MIT"
]
| null | null | null | app.py | arian-nasr/Temporary-SMS | cedbe68b3e329362049c86e0974396bc660875da | [
"MIT"
]
| null | null | null | app.py | arian-nasr/Temporary-SMS | cedbe68b3e329362049c86e0974396bc660875da | [
"MIT"
]
| 1 | 2021-09-10T05:02:48.000Z | 2021-09-10T05:02:48.000Z | from flask import Flask, jsonify, request
from flask_cors import CORS
from twilio.twiml.messaging_response import MessagingResponse, Message
from twilio.rest import Client
import sqlconnector as sql
from datetime import datetime
import os
# configuration
DEBUG = True
twilio_sid = os.environ.get('TWILIO_SID')
twilio_secret = os.environ.get('TWILIO_SECRET')
client = Client(twilio_sid, twilio_secret)
# instantiate the app
app = Flask(__name__)
app.config.from_object(__name__)
# enable CORS
CORS(app, resources={r'/*': {'origins': '*'}})
@app.route('/api/temporarysms/writemessage', methods=['POST'])
def inbound_sms():
response = MessagingResponse()
message_sender = request.form['From']
message_body = request.form['Body']
message_date = datetime.now()
sql.write_to_database('phone1', message_date, message_sender, message_body)
return str(response)
@app.route('/api/temporarysms/available', methods=['GET'])
def available():
numbers = sql.read_availability_from_database('numbers')
return jsonify({
'status': 'success',
'numbers': numbers
})
@app.route('/api/temporarysms/readmessage', methods=['GET'])
def allmessages():
messages = sql.read_messages_from_database('phone1')
return jsonify({
'status': 'success',
'messages': messages
})
if __name__ == '__main__':
app.run(host="192.168.0.21") | 28.895833 | 79 | 0.716655 | 0 | 0 | 0 | 0 | 779 | 0.561644 | 0 | 0 | 315 | 0.227109 |
d0dd4e4b7186f7188547583db738e69bad28912d | 1,174 | py | Python | testLib.py | quarker/stream-metrics | ae03748b75f840dbff346bedb195f9414243553f | [
"Apache-2.0"
]
| null | null | null | testLib.py | quarker/stream-metrics | ae03748b75f840dbff346bedb195f9414243553f | [
"Apache-2.0"
]
| 1 | 2018-02-27T20:57:06.000Z | 2018-02-27T20:57:06.000Z | testLib.py | dnguyen219/stream-metrics | ae03748b75f840dbff346bedb195f9414243553f | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
from scapy.all import *
import dpkt
import argparse
import sys
def printTS(pcapFile, lib):
if lib == 'scapy':
packets = rdpcap(pcapFile)
counter = 0
for packet in packets:
# print packet.time
print str.format('{0:.9f}', packet.time)
counter += 1
if counter >= 10:
break
elif lib == 'dpkt':
counter = 0
with open(pcapFile, 'rb') as f:
packets = dpkt.pcap.Reader(f)
for ts, buf in packets:
print str.format('{0:.9f}', ts)
counter += 1
if counter >= 10:
break
else:
print 'The requested lib is not supported.'
def main():
parser = argparse.ArgumentParser(description='PCAP Timestamp Diagnostic')
parser.add_argument('pcap', help='PCAP file to dump')
parser.add_argument('-l', '--lib', default='dpkt', help='lib to use: scapy or dpkt (if not supplied, default to dpkt)')
arguments = parser.parse_args(sys.argv[1:])
printTS(arguments.pcap, arguments.lib)
return 0
if __name__ == '__main__':
sys.exit(main())
| 28.634146 | 123 | 0.563884 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 253 | 0.215503 |
d0df0de6b9dc212d463d40040bc158f2287e5e3f | 1,101 | py | Python | scripts/txtool/txtool/get_logs.py | baajur/cita | 763c7866e6ea59ff96de085b4c72665f4e2f69ba | [
"Apache-2.0"
]
| 930 | 2017-07-25T08:27:55.000Z | 2019-11-26T10:07:48.000Z | scripts/txtool/txtool/get_logs.py | baajur/cita | 763c7866e6ea59ff96de085b4c72665f4e2f69ba | [
"Apache-2.0"
]
| 484 | 2017-07-25T14:32:44.000Z | 2019-11-14T11:16:45.000Z | scripts/txtool/txtool/get_logs.py | QingYanL/testCITA | 6d2e82c87831553c8d34749c56c4e5c8b94ece9c | [
"Apache-2.0"
]
| 184 | 2017-07-26T01:37:36.000Z | 2019-11-19T07:07:49.000Z | #!/usr/bin/env python3
# coding=utf-8
from __future__ import print_function
from jsonrpcclient.http_client import HTTPClient
from url_util import endpoint
import argparse
import simplejson
def get_topics():
with open("../output/transaction/topics", 'r') as topicfile:
topics = simplejson.load(topicfile)
return topics
def get_logs(topics, from_block, to_block):
try:
url = endpoint()
response = HTTPClient(url).request("getLogs", [{
"topics": topics,
"fromBlock": from_block,
"toBlock": to_block
}])
except:
return None
return response
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("--fromBlock", default="0")
parser.add_argument("--toBlock", default="latest")
opts = parser.parse_args()
return opts.fromBlock, opts.toBlock
def main():
from_block, to_block = parse_arguments()
topics = get_topics()
logger.debug(topics)
resp = get_logs(topics, from_block, to_block)
print(resp)
if __name__ == "__main__":
main()
| 22.02 | 64 | 0.659401 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 151 | 0.137148 |
d0df17b69424625513414f688731105176ee7001 | 319 | py | Python | demo/flask_app/flask_api/model.py | joshyjoseph/react-docker-swagger-demo | 7ba7dce6ff1457fd6bfa2af0873f60c07f918ade | [
"MIT"
]
| null | null | null | demo/flask_app/flask_api/model.py | joshyjoseph/react-docker-swagger-demo | 7ba7dce6ff1457fd6bfa2af0873f60c07f918ade | [
"MIT"
]
| null | null | null | demo/flask_app/flask_api/model.py | joshyjoseph/react-docker-swagger-demo | 7ba7dce6ff1457fd6bfa2af0873f60c07f918ade | [
"MIT"
]
| null | null | null | from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class TeamModel(db.Model):
rowId = db.Column(db.Integer, primary_key=True)
teamName = db.Column(db.String)
teamRole = db.Column(db.String)
def __repr__(self) -> str:
return "{}:{}:{}".format(self.rowId, self.teamName, self.teamRole)
| 24.538462 | 74 | 0.680251 | 257 | 0.805643 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.031348 |
d0e03d5a5825d11b0df50f5373fcf12a4e9bb5fb | 114 | py | Python | rev_powersystems/__init__.py | NREL-SIIP/reV-PowerSystems | 39e2577082743f638426f14c8b01a1576a985558 | [
"BSD-3-Clause"
]
| null | null | null | rev_powersystems/__init__.py | NREL-SIIP/reV-PowerSystems | 39e2577082743f638426f14c8b01a1576a985558 | [
"BSD-3-Clause"
]
| null | null | null | rev_powersystems/__init__.py | NREL-SIIP/reV-PowerSystems | 39e2577082743f638426f14c8b01a1576a985558 | [
"BSD-3-Clause"
]
| null | null | null | __version__ = "0.0.1"
from .revx_output_siip import SIIPTimeSeriesMetadata, concat, max_fiber_size, match_points
| 28.5 | 90 | 0.824561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.061404 |
d0e07e8ca8f962f207b1a467ec124c229cd57722 | 2,095 | py | Python | w2/w2/t1.py | mvgrigoriev/ml-course | fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b | [
"MIT"
]
| null | null | null | w2/w2/t1.py | mvgrigoriev/ml-course | fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b | [
"MIT"
]
| null | null | null | w2/w2/t1.py | mvgrigoriev/ml-course | fc5cf01d0de0eb5771389ea3d978e0bd291fdf2b | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 07 07:52:52 2018
@author: MVGrigoriev
@task: kNN method
"""
import pandas
import numpy as np
from sklearn.neighbors import KNeighborsClassifier # Import class from scikit-learn
from sklearn.model_selection import KFold # Import KFold function
from sklearn.model_selection import cross_val_score # Import metrics for cross validation
from sklearn.preprocessing import scale # Import Scale function
data = pandas.read_csv('wine.data', header=None) # Import data
target = data[0] # Extract target
features = data.drop(0, axis=1) # Extract features
kf = KFold(n_splits=5, shuffle=True, random_state=42)
# At what k is the maximum quality obtained without normalization of characteristics?
#
# What is the maximum quality without the normalization of characteristics (the number in the scale from 0 to 1)?
#
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_1.txt', 'w') as f1:
print(optIndex+1, file=f1, end='')
with open('2_2.txt', 'w') as f2:
print(round(optValue, 2), file=f2, end='')
# Which optimal K is obtained after the normalization of the characteristics?
#
# What is the maximum quality after the normalization of characteristics (a number in the range from 0 to 1)?
#
features = scale(features)
listOfAccuracy = []
for i in range(1, 51):
neigh = KNeighborsClassifier(n_neighbors=i)
neigh.fit(features, target)
cvs = cross_val_score(neigh, features, target, cv=kf, scoring='accuracy')
cvsValue = np.mean(cvs)
listOfAccuracy.append(cvsValue)
optValue = max(listOfAccuracy)
optIndex = listOfAccuracy.index(optValue)
with open('2_3.txt', 'w') as f3:
print(optIndex+1, file=f3, end='')
with open('2_4.txt', 'w') as f4:
print(round(optValue, 2), file=f4, end='')
| 36.754386 | 115 | 0.719809 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 772 | 0.368496 |
d0e15314e67099f053fc8acbea6a1a91c7a8ed52 | 1,946 | py | Python | tutorial/tutorial.py | isabella232/sosp21-artifact | 1b4a11c648e456c9ff9d74f16b09f4238d6694a0 | [
"BSD-3-Clause"
]
| 1 | 2021-09-20T07:57:50.000Z | 2021-09-20T07:57:50.000Z | tutorial/tutorial.py | digi-project/sosp21-artifact | 1b4a11c648e456c9ff9d74f16b09f4238d6694a0 | [
"BSD-3-Clause"
]
| 1 | 2022-03-21T11:33:33.000Z | 2022-03-21T11:33:33.000Z | tutorial/tutorial.py | isabella232/sosp21-artifact | 1b4a11c648e456c9ff9d74f16b09f4238d6694a0 | [
"BSD-3-Clause"
]
| 2 | 2021-12-09T12:54:52.000Z | 2022-03-21T08:43:31.000Z | # ipython utils
import os
import sys
import time
import yaml
import datetime
from pathlib import Path
from IPython import get_ipython
from IPython.core.magic import (register_line_magic, register_cell_magic,
register_line_cell_magic)
import warnings; warnings.simplefilter('ignore')
start = time.time()
@register_line_cell_magic
def elapsed_time(line, cell=None):
if cell is not None:
get_ipython().run_cell(cell)
print(datetime.timedelta(seconds=round(time.time() - start)))
os.environ.update({
"GROUP": "tutorial",
"VERSION": "v1",
"KOPFLOG": "false",
"DOCKER_TLS_VERIFY": "1",
"DOCKER_HOST": "tcp://127.0.0.1:32770",
"DOCKER_CERT_PATH": str(Path(os.environ["HOME"], ".minikube/certs")),
"MINIKUBE_ACTIVE_DOCKERD": "minikube",
"IMAGEPULL": "Never",
"REPO": "tutorial",
})
workdir = (Path(os.environ["GOPATH"],
"src", "digi.dev",
"tutorial", "workdir"))
os.environ["WORKDIR"] = str(workdir)
def _rm_tree(pth):
pth = Path(pth)
for child in pth.glob('*'):
if child.is_file():
child.unlink()
else:
_rm_tree(child)
pth.rmdir()
def create(m: str, new=True):
y = yaml.load(m, Loader=yaml.FullLoader)
assert "kind" in y
_dir = Path(workdir, y["kind"].lower())
if _dir.is_dir() and new:
_rm_tree(_dir)
Path(_dir, "driver").mkdir(parents=True, exist_ok=True)
Path(_dir, "deploy").mkdir(parents=True, exist_ok=True)
Path(_dir, "deploy", "cr_run.yaml").touch()
Path(_dir, "driver", "handler.py").touch()
with open(Path(_dir, "model.yaml"), "w") as f:
f.write(m)
def handler_file(k):
return Path(workdir, k, "driver", "handler.py")
def model_file(k, new=True):
if new:
return Path(workdir, k, "deploy", "cr.yaml")
else:
return Path(workdir, k, "deploy", "cr_run.yaml") | 26.297297 | 73 | 0.613052 | 0 | 0 | 0 | 0 | 188 | 0.096608 | 0 | 0 | 433 | 0.222508 |
d0e275de32ffad1ac148c2e85a79a876fec1fd53 | 362 | py | Python | examples/rotation.py | aallan/picamera2 | d64fbe669e071402d11c043cf044f52f6b2edc57 | [
"BSD-2-Clause"
]
| null | null | null | examples/rotation.py | aallan/picamera2 | d64fbe669e071402d11c043cf044f52f6b2edc57 | [
"BSD-2-Clause"
]
| null | null | null | examples/rotation.py | aallan/picamera2 | d64fbe669e071402d11c043cf044f52f6b2edc57 | [
"BSD-2-Clause"
]
| null | null | null | #!/usr/bin/python3
# Run the camera with a 180 degree rotation.
from qt_gl_preview import *
from picamera2 import *
import time
picam2 = Picamera2()
preview = QtGlPreview(picam2)
preview_config = picam2.preview_configuration()
preview_config["transform"] = libcamera.Transform(hflip=1, vflip=1)
picam2.configure(preview_config)
picam2.start()
time.sleep(5)
| 20.111111 | 67 | 0.779006 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 73 | 0.201657 |
d0e40ed7df88adf45e5432f3af67cf4214a7c00a | 331 | py | Python | examples/system-prompt.py | davidbrochart/python-prompt-toolkit | 8498692b31671fee7c5a426300a9df2ee290eae2 | [
"BSD-3-Clause"
]
| 2 | 2020-04-12T01:23:25.000Z | 2021-05-22T13:46:00.000Z | examples/system-prompt.py | davidbrochart/python-prompt-toolkit | 8498692b31671fee7c5a426300a9df2ee290eae2 | [
"BSD-3-Clause"
]
| null | null | null | examples/system-prompt.py | davidbrochart/python-prompt-toolkit | 8498692b31671fee7c5a426300a9df2ee290eae2 | [
"BSD-3-Clause"
]
| 2 | 2016-12-30T23:57:44.000Z | 2021-05-22T13:50:21.000Z | #!/usr/bin/env python
from __future__ import unicode_literals
from prompt_toolkit import prompt
if __name__ == '__main__':
print('If you press meta-! or esc-! at the following prompt, you can enter system commands.')
answer = prompt('Give me some input: ', enable_system_bindings=True)
print('You said: %s' % answer)
| 33.1 | 97 | 0.725076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 153 | 0.462236 |
d0e4e293078cbb35e7bb94fd2a5b26005400333e | 3,294 | py | Python | searcheval/test/test_metrics.py | VikasNeha/searcheval | 90f3be8e57dd70179f707ef73241306cdd2ec915 | [
"Apache-2.0"
]
| 1 | 2018-01-18T18:37:11.000Z | 2018-01-18T18:37:11.000Z | searcheval/test/test_metrics.py | VikasNeha/searcheval | 90f3be8e57dd70179f707ef73241306cdd2ec915 | [
"Apache-2.0"
]
| 1 | 2022-01-11T10:37:11.000Z | 2022-01-11T17:11:01.000Z | searcheval/test/test_metrics.py | VikasNeha/searcheval | 90f3be8e57dd70179f707ef73241306cdd2ec915 | [
"Apache-2.0"
]
| 1 | 2022-01-11T10:46:05.000Z | 2022-01-11T10:46:05.000Z | import unittest
import searcheval.metrics as sm
class MetricsTests(unittest.TestCase):
def test_mean(self):
vector = [2, 3, 7]
mean = sm.mean(vector)
self.assertEqual(mean, 4)
def test_precision(self):
relevance_vector = [1, 0, 0, 1, 0]
precision = sm.precision(relevance_vector)
self.assertEqual(precision, 0.4)
def test_precision_at_rank(self):
relevance_vector = [1, 0, 0, 1, 0]
rank = 2
precision_at_rank = sm.precision_at_rank(relevance_vector, rank)
self.assertEqual(precision_at_rank, 0.5)
def test_precision_vector(self):
relevance_vector = [1, 0]
precision_vector = sm.precision_vector(relevance_vector)
self.assertEqual(list(precision_vector), [1.0, 0.5])
def test_avg_prec(self):
relevance_vector = [1, 0]
avg_prec = sm.avg_prec(relevance_vector)
self.assertEqual(avg_prec, 0.5)
def test_r_prec(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 2
r_prec = sm.r_prec(relevance_vector, recall_base)
self.assertEqual(r_prec, 0.5)
# check that r_prec handles recall base larger than number of samples
r_prec = sm.r_prec([1, 0], 5)
self.assertEqual(r_prec, 0.2)
def test_recall(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
recall = sm.recall(relevance_vector, recall_base)
self.assertEqual(recall, 0.5)
def test_recall_at_rank(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
rank = 2
precision_at_rank = sm.recall_at_rank(relevance_vector, recall_base,
rank)
self.assertEqual(precision_at_rank, 0.25)
def test_recall_vector(self):
relevance_vector = [1, 0, 0, 1, 0]
recall_base = 4
recall_vector = sm.recall_vector(relevance_vector, recall_base)
self.assertEqual(list(recall_vector), [0.25, 0.25, 0.25, 0.5, 0.5])
def test_nDCG(self):
# binary relevance
gain_vector = [1, 1, 0, 0, 0] # perfect query
ideal_gain_vector = [1, 1, 0, 0, 0]
nDCG = sm.nDCG(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG, 1.0)
# graded relevance
gain_vector = [3, 2, 1, 1, 0] # perfect query
ideal_gain_vector = [3, 2, 1, 1, 0]
nDCG = sm.nDCG(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG, 1.0)
def test_nDCG_at_rank(self):
gain_vector = [1, 0, 1, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
rank = 1
nDCG_at_rank = sm.nDCG_at_rank(gain_vector, ideal_gain_vector, rank)
self.assertEqual(nDCG_at_rank, 1.0)
# not perfect query
gain_vector = [1, 0, 1, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
rank = 2
nDCG_at_rank = sm.nDCG_at_rank(gain_vector, ideal_gain_vector, rank)
self.assertTrue(nDCG_at_rank < 1.0)
def test_nDCG_vector(self):
gain_vector = [1, 1, 0, 0, 0]
ideal_gain_vector = [1, 1, 0, 0, 0]
nDCG_vector = sm.nDCG_vector(gain_vector, ideal_gain_vector)
self.assertEqual(nDCG_vector, [1.0, 1.0, 1.0, 1.0, 1.0])
if __name__ == '__main__':
unittest.main()
| 32.94 | 77 | 0.608682 | 3,193 | 0.969338 | 0 | 0 | 0 | 0 | 0 | 0 | 164 | 0.049787 |
d0e54036779246dea8bdd23ebf8e7a5ba24254b9 | 1,054 | py | Python | debpkgr/compat.py | sassoftware/python-debpkgr | 220d57b461c2f323a30fb44b2d1126ca4a0f9ea6 | [
"Apache-2.0"
]
| 7 | 2017-03-09T11:28:42.000Z | 2019-10-26T02:12:09.000Z | debpkgr/compat.py | sassoftware/python-debpkgr | 220d57b461c2f323a30fb44b2d1126ca4a0f9ea6 | [
"Apache-2.0"
]
| 12 | 2017-03-24T07:45:41.000Z | 2019-12-20T15:44:11.000Z | debpkgr/compat.py | sassoftware/python-debpkgr | 220d57b461c2f323a30fb44b2d1126ca4a0f9ea6 | [
"Apache-2.0"
]
| 5 | 2017-03-09T11:28:15.000Z | 2021-02-18T13:14:34.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# flake8: noqa
from six import (
add_metaclass,
iteritems,
raise_from,
string_types,
text_type,
)
from six.moves import configparser
from six.moves.reprlib import Repr
from six.moves.urllib.parse import parse_qs, urlsplit, urlunsplit
from six.moves.urllib.parse import urlparse, urlencode
from six.moves.urllib.request import urlopen, urlretrieve
from six.moves.urllib.error import HTTPError
try:
maketrans = str.maketrans
except AttributeError:
from string import maketrans
| 30.114286 | 74 | 0.766603 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 549 | 0.520873 |
d0e6ec9507e696a89752b35b6c0b3c155c6656fe | 16,732 | py | Python | nomic/proposal.py | HactarCE/Quobot | e13f28990f212b92835dd9c8fcbdc53bc37d5ab8 | [
"MIT"
]
| null | null | null | nomic/proposal.py | HactarCE/Quobot | e13f28990f212b92835dd9c8fcbdc53bc37d5ab8 | [
"MIT"
]
| null | null | null | nomic/proposal.py | HactarCE/Quobot | e13f28990f212b92835dd9c8fcbdc53bc37d5ab8 | [
"MIT"
]
| null | null | null | from collections import OrderedDict
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Optional, Set
import discord
import functools
from .gameflags import GameFlagsManager
from .playerdict import PlayerDict
from .repoman import GameRepoManager
from constants import colors, emoji, info, strings
import utils
class ProposalStatus(Enum):
VOTING = 'voting'
PASSED = 'passed'
FAILED = 'failed'
DELETED = 'deleted'
VOTE_ALIASES = {
'+': 'for',
'-': 'against',
'abstain': 'abstain',
'against': 'against',
'del': 'remove',
'delete': 'remove',
'for': 'for',
'remove': 'remove',
'rm': 'remove',
}
VOTE_TYPES = ('for', 'against', 'abstain')
@dataclass
class _Proposal:
game: 'ProposalManager' and GameFlagsManager
n: int
author: discord.Member
content: str
status: ProposalStatus = ProposalStatus.VOTING
message_id: Optional[int] = None
votes: PlayerDict = None
timestamp: int = None
@functools.total_ordering
class Proposal(_Proposal):
"""A dataclass representing a Nomic proposal.
Attributes:
- game
- n -- integer; proposal ID number
- author -- discord.Member
- content -- string
Optional attributes:
- status (default Proposal.Status.Voting)
- message_id (default None) -- discord.Message or the ID of one (converted
to integer ID)
- votes (default {}) -- PlayerDict of ints; positive numbers are votes
for, negative numbers are votes against, and zero is an abstention
- timestamp (default now)
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not isinstance(self.author, discord.Member):
self.author = self.game.get_member(self.author)
# if isinstance(self.message_id, discord.Message):
# self.message_id = self.message_id.id
self.votes = PlayerDict(self.game, self.votes)
self.status = ProposalStatus(self.status)
if self.timestamp is None:
self.timestamp = utils.now()
def export(self) -> dict:
return OrderedDict(
n=self.n,
author=self.author and self.author.id,
content=self.content,
status=self.status.value,
message_id=self.message_id,
votes=self.votes.export(),
timestamp=self.timestamp,
)
async def set_vote(self, player: discord.Member, new_vote_amount: int):
self.game.assert_locked()
if self.status != ProposalStatus.VOTING:
return False
if new_vote_amount == 0 and not self.game.flags.allow_vote_abstain:
new_vote_amount = None
if player in self.votes and not self.game.flags.allow_vote_change:
return False
if new_vote_amount and abs(new_vote_amount) > 1 and not self.game.flags.allow_vote_multi:
new_vote_amount //= abs(new_vote_amount)
self.votes[player] = new_vote_amount
if new_vote_amount is None:
del self.votes[player]
await self.refresh()
self.game.save()
return True
async def vote_for(self, player: discord.Member, amount: int = 1):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = amount
elif old_vote_amount < 0:
new_vote_amount = None
else:
new_vote_amount = old_vote_amount + amount
return await self.set_vote(player, new_vote_amount)
async def vote_against(self, player: discord.Member, amount: int = 1):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = -amount
elif old_vote_amount > 0:
new_vote_amount = None
else:
new_vote_amount = old_vote_amount - amount
return await self.set_vote(player, new_vote_amount)
async def vote_abstain(self, player: discord.Member):
old_vote_amount = self.votes.get(player)
if old_vote_amount == 0:
new_vote_amount = None
else:
new_vote_amount = 0
return await self.set_vote(player, new_vote_amount)
async def vote_abstain_or_remove(self, player: discord.Member):
old_vote_amount = self.votes.get(player)
if old_vote_amount is None:
new_vote_amount = 0
else:
new_vote_amount = None
return await self.set_vote(player, new_vote_amount)
async def vote_remove(self, player: discord.Member):
return await self.set_vote(player, None)
@property
def votes_for(self) -> int:
return sum(v for v in self.votes.values() if v > 0)
@property
def votes_against(self) -> int:
return -sum(v for v in self.votes.values() if v < 0)
@property
def votes_abstain(self) -> int:
return sum(v == 0 for v in self.votes.values())
async def set_status(self, new_status: ProposalStatus):
self.game.assert_locked()
self.status = new_status
await self.refresh()
self.game.save()
async def set_content(self, new_content: str):
self.game.assert_locked()
self.content = new_content
await self.refresh()
self.game.save()
async def refresh(self):
await self.game.refresh_proposal(self)
async def repost(self):
await self.game.repost_proposal(self)
async def fetch_message(self) -> discord.Message:
try:
return await self.game.proposals_channel.fetch_message(self.message_id)
except (discord.NotFound, discord.Forbidden, discord.HTTPException):
return None
@property
def discord_link(self) -> str:
return utils.discord.MESSAGE_LINK_FORMAT.format(
guild=self.game.guild,
channel=self.game.proposals_channel,
message_id=self.message_id,
)
@property
def github_link(self):
return f'{info.GITHUB_REPO_LINK}/blob/{self.game.repo.name}/proposals.md#{self.n}'
@property
def embed(self) -> discord.Embed:
"""Return an embed displaying this proposal."""
# Make the title; e.g. "Proposal #10 -- Passed"
title = f"Proposal #{self.n}"
if self.status != ProposalStatus.VOTING:
title += " \N{EM DASH} "
title += self.status.value.capitalize()
if self.status == ProposalStatus.DELETED:
return discord.Embed(
color=colors.DELETED,
title=title,
)
embed = discord.Embed(
color={
ProposalStatus.VOTING: colors.INFO,
ProposalStatus.PASSED: colors.SUCCESS,
ProposalStatus.FAILED: colors.ERROR,
}[self.status],
title=title,
description=self.content,
timestamp=datetime.fromtimestamp(self.timestamp),
)
# Make an embed field for each type of vote
for vote_type in VOTE_TYPES:
total = 0
value = ''
# Count the votes and list the users
for player, vote_amount in self.votes.items():
if vote_type == 'for':
if vote_amount <= 0:
continue
elif vote_type == 'against':
if vote_amount >= 0:
continue
vote_amount *= -1
elif vote_type == 'abstain':
if vote_amount != 0:
continue
vote_amount = 1
value += player.mention
if vote_amount > 1:
value += f" ({vote_amount}x)"
value += "\n"
total += vote_amount
name = vote_type.capitalize()
if total:
name += f" ({total})"
if vote_type == 'abstain' and total == 0:
continue
embed.add_field(
name=name,
value=value or strings.EMPTY_LIST,
inline=True,
)
# Set the footer
embed.set_footer(**utils.discord.embed_happened_footer("Submitted", self.author))
return embed
@property
def markdown(self):
s = f"<a name='{self.n}'/>"
s += "\n\n"
s += f"## #{self.n}"
if self.status != ProposalStatus.VOTING:
s += f" \N{EM DASH} {self.status.value.capitalize()}"
s += "\n\n"
if self.status != ProposalStatus.DELETED:
s += self.content
s += "\n\n"
return s
def __str__(self):
return f"proposal #{self.n}"
def __lt__(self, other):
return self.n < other.n
def __eq__(self, other):
return type(self) == type(other) and self.n == other.n
def __hash__(self):
# None of these values should ever change, and they should uniquely
# identify this proposal.
return hash((self.game.guild.id, self.n, self.timestamp))
class ProposalManager(GameRepoManager):
def load(self):
db = self.get_db('proposals')
self.proposals_channel = db.get('channel')
if self.proposals_channel:
self.proposals_channel = self.guild.get_channel(self.proposals_channel)
self.proposals = []
if db.get('proposals'):
for proposal in db['proposals']:
self.proposals.append(Proposal(game=self, **proposal))
def save(self):
db = self.get_db('proposals')
db.replace(OrderedDict(
channel=self.proposals_channel and self.proposals_channel.id,
proposals=[p.export() for p in self.proposals],
))
db.save()
with open(self.get_file('proposals.md'), 'w') as f:
f.write(f"# {self.guild.name} \N{EM DASH} Proposals")
f.write('\n\n')
for p in self.proposals:
f.write(p.markdown)
async def commit_proposals_and_log(self,
agent: discord.Member,
action: str,
proposal: Proposal,
post: str = '',
link_to_proposal: bool = True,
**kwargs):
"""Commit the proposals Markdown file and log the event."""
if await self.repo.is_clean('proposals.md'):
return
commit_msg = markdown_msg = f"{utils.discord.fake_mention(agent)} {action} "
commit_msg += str(proposal)
if link_to_proposal:
markdown_msg += f"[{proposal}](../proposals.md#{proposal.n})"
else:
markdown_msg += str(proposal)
await self.commit('proposals.md', msg=commit_msg + post)
await self.log(markdown_msg + post, **kwargs)
async def refresh_proposal(self, *proposals: Proposal):
"""Update the messages for one or more proposals.
May throw `TypeError`, `ValueError`, or `discord.Forbidden` exceptions.
"""
self.assert_locked()
for proposal in sorted(set(proposals)):
try:
m = await proposal.fetch_message()
await m.clear_reactions()
await m.edit(embed=proposal.embed)
if proposal.status == ProposalStatus.VOTING:
await m.add_reaction(emoji.VOTE_FOR)
await m.add_reaction(emoji.VOTE_AGAINST)
await m.add_reaction(emoji.VOTE_ABSTAIN)
except discord.NotFound:
await self.repost_proposal(proposal)
return
async def repost_proposal(self, *proposals: Proposal):
"""Remove and repost the messages for one or more proposals.
May throw `TypeError`, `ValueError`, or `discord.Forbidden` exceptions.
"""
self.assert_locked()
proposal_range = range(min(proposals).n, len(self.proposals) + 1)
proposals = list(map(self.get_proposal, proposal_range))
proposal_messages = []
for proposal in proposals:
m = await proposal.fetch_message()
if m:
proposal_messages.append(m)
if proposal_messages:
await utils.discord.safe_bulk_delete(proposal_messages)
for proposal in proposals:
m = await self.proposals_channel.send(embed=discord.Embed(
color=colors.TEMPORARY,
title=f"Preparing proposal #{proposal.n}\N{HORIZONTAL ELLIPSIS}",
))
proposal.message_id = m.id
self.save()
await self.refresh_proposal(*proposals)
def has_proposal(self, n: int) -> bool:
return isinstance(n, int) and 1 <= n <= len(self.proposals)
def get_proposal(self, n: int) -> Optional[Proposal]:
if self.has_proposal(n):
return self.proposals[n - 1]
async def get_proposal_messages(self) -> Set[discord.Message]:
messages = set()
for proposal in self.proposals:
messages.add(await proposal.fetch_message())
return messages
async def add_proposal(self, **kwargs):
self.assert_locked()
n = len(self.proposals) + 1
new_proposal = Proposal(game=self, n=n, **kwargs)
self.proposals.append(new_proposal)
# ProposalManager.repost_proposal() calls BaseGame.save() so we
# don't have to do that here.
await self.repost_proposal(new_proposal)
return new_proposal
async def permadel_proposal(self, proposal: Proposal):
self.assert_locked()
if not proposal.n == len(self.proposals):
raise RuntimeError("Cannot delete any proposal other than the last one")
del self.proposals[proposal.n - 1]
self.save()
await (await proposal.fetch_message()).delete()
async def log_proposal_submit(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "submitted", proposal, link_to_commit=True
)
async def log_proposal_permadel(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "permanently deleted", proposal, link_to_proposal=False, link_to_commit=True
)
async def log_proposal_change_status(self,
agent: discord.Member,
proposal: Proposal):
if proposal.status == ProposalStatus.VOTING:
action = "reopened"
else:
action = proposal.status.value
await self.commit_proposals_and_log(
agent, action, proposal, link_to_commit=True
)
async def log_proposal_change_content(self,
agent: discord.Member,
proposal: Proposal):
await self.commit_proposals_and_log(
agent, "edited", proposal, link_to_commit=True
)
async def log_proposal_vote(self,
agent: discord.Member,
proposal: Proposal,
player: discord.Member,
old_vote_amount: Optional[int],
new_vote_amount: Optional[int]):
if old_vote_amount == new_vote_amount:
return
if new_vote_amount is None:
action = "removed their vote from"
elif old_vote_amount is not None:
action = "changed their vote on"
elif new_vote_amount == 0:
action = "abstained on"
elif new_vote_amount > 0:
action = "voted for"
elif new_vote_amount < 0:
action = "voted against"
else:
action = "WTFed"
if player != agent:
post = f" on behalf of {utils.discord.fake_mention(player)}"
else:
post = ''
if abs(old_vote_amount or 0) > 1 or abs(new_vote_amount or 0) > 1:
post += " ("
if old_vote_amount is not None:
post += f"was {old_vote_amount}"
if new_vote_amount:
post += "; "
if new_vote_amount is not None:
post += f"now {new_vote_amount}"
post += ")"
await self.commit_proposals_and_log(
agent, action, proposal, post=post
)
| 35.449153 | 97 | 0.573392 | 16,059 | 0.959778 | 0 | 0 | 8,363 | 0.499821 | 9,285 | 0.554925 | 2,479 | 0.148159 |
d0e74c23345f71b01c04f878f36260962612bba5 | 897 | py | Python | vexmpp/features/stream_mgmt.py | nicfit/vexmpp | e67070d2822da8356345976fb15d365935b550a6 | [
"MIT"
]
| null | null | null | vexmpp/features/stream_mgmt.py | nicfit/vexmpp | e67070d2822da8356345976fb15d365935b550a6 | [
"MIT"
]
| 349 | 2017-02-18T22:48:17.000Z | 2021-12-13T19:50:23.000Z | vexmpp/features/stream_mgmt.py | nicfit/vexmpp | e67070d2822da8356345976fb15d365935b550a6 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
from ..stanzas import Stanza
from ..errors import makeStanzaError
from ..protocols.stream_mgmt import NS_URI
async def handle(stream, feature_elem, sm_opts, timeout=None):
assert(feature_elem is not None)
nsmap = {"sm": NS_URI}
enable_elem = Stanza("enable", nsmap={None: NS_URI})
if sm_opts and sm_opts.resume:
enable_elem.set("resume", "true")
stream.send(enable_elem)
resp = await stream.wait([("/sm:enabled", nsmap),
("/sm:failed", nsmap)], timeout=timeout)
if resp.name == "{%s}failed" % NS_URI:
raise makeStanzaError(resp.xml)
sm_opts.sm_id = resp.get("id")
sm_opts.resume = bool(resp.get("resume") and
resp.get("resume") in ("1", "true"))
sm_opts.resume_location = resp.get("location")
sm_opts.max_resume_time = resp.get("max")
return True
| 32.035714 | 70 | 0.625418 | 0 | 0 | 0 | 0 | 0 | 0 | 761 | 0.848384 | 130 | 0.144928 |
d0e7767371bb84c6b1d217086e05c8cb123f2e3b | 293 | py | Python | napari_allencell_segmenter/model/channel.py | neuromusic/napari-allencell-segmenter | c732408023c828c07ec2a425f4f426174d94946b | [
"BSD-3-Clause"
]
| 8 | 2021-06-29T09:24:22.000Z | 2022-03-22T23:43:10.000Z | napari_allencell_segmenter/model/channel.py | neuromusic/napari-allencell-segmenter | c732408023c828c07ec2a425f4f426174d94946b | [
"BSD-3-Clause"
]
| 97 | 2021-02-18T02:39:31.000Z | 2021-06-18T21:38:41.000Z | napari_allencell_segmenter/model/channel.py | neuromusic/napari-allencell-segmenter | c732408023c828c07ec2a425f4f426174d94946b | [
"BSD-3-Clause"
]
| 2 | 2021-09-14T22:07:22.000Z | 2022-02-07T16:41:02.000Z | from dataclasses import dataclass
@dataclass
class Channel:
index: int
name: str = None
@property
def display_name(self):
if self.name is None or self.name.strip().isspace():
return f"Channel {self.index}"
return f"Ch{self.index}. {self.name}"
| 19.533333 | 60 | 0.627986 | 245 | 0.836177 | 0 | 0 | 256 | 0.87372 | 0 | 0 | 54 | 0.1843 |
d0e808e3235cc1782c3a0aac8f2ccd3eaf6e8e7d | 705 | py | Python | tests/feature_extraction/pattern/test_pattern.py | fidsusj/HateSpeechDetection | 1306a8a901aed856e51ee8fe16158ff267fb5405 | [
"BSD-3-Clause"
]
| null | null | null | tests/feature_extraction/pattern/test_pattern.py | fidsusj/HateSpeechDetection | 1306a8a901aed856e51ee8fe16158ff267fb5405 | [
"BSD-3-Clause"
]
| 17 | 2020-11-08T16:55:54.000Z | 2021-05-28T05:58:17.000Z | tests/feature_extraction/pattern/test_pattern.py | fidsusj/HateSpeechDetection | 1306a8a901aed856e51ee8fe16158ff267fb5405 | [
"BSD-3-Clause"
]
| 2 | 2020-12-18T10:42:58.000Z | 2021-05-24T19:32:57.000Z | from unittest import TestCase
import pandas as pd
from feature_extraction.pattern.pattern import Pattern
from preprocessing.corpus import build_corpus
class Test_Pattern(TestCase):
raw_data = {
"class": [0, 0, 1, 0],
"content": [
"John hates bitches",
"John hates hookers",
"John loves turtles",
"Bitch ass nigga",
],
}
def test_pattern_count(self):
df = build_corpus(pd.DataFrame(data=self.raw_data))
pattern_extractor = Pattern(min_pattern_size=2, max_pattern_size=2, threshold=2)
pattern_extractor.extract_features(df)
self.assertEqual([2, 2, 2, 1], df["pattern_count"].tolist())
| 27.115385 | 88 | 0.639716 | 550 | 0.780142 | 0 | 0 | 0 | 0 | 0 | 0 | 108 | 0.153191 |
d0e812a6800aac72cae877576878f53d8cd3bd64 | 11,525 | py | Python | main.py | C3ald/Token-API | 5bb34ac1276b23a6f3c780c8d7011d621f02ab90 | [
"MIT"
]
| 4 | 2021-12-20T22:51:20.000Z | 2021-12-30T17:55:34.000Z | main.py | C3ald/Token-API | 5bb34ac1276b23a6f3c780c8d7011d621f02ab90 | [
"MIT"
]
| 14 | 2021-12-08T18:30:00.000Z | 2022-01-06T05:27:08.000Z | main.py | C3ald/Token-API | 5bb34ac1276b23a6f3c780c8d7011d621f02ab90 | [
"MIT"
]
| null | null | null | from starlette.responses import Response
from passlib.hash import pbkdf2_sha256
from starlette.websockets import WebSocketDisconnect
from blockchain import Blockchain
# from wallet import Wallet
from fastapi import FastAPI, WebSocket
import uvicorn
import socket
import requests as r
from pydantic import BaseModel
from fastapi.templating import Jinja2Templates
import json
import asyncio
# from Utilities.algorithims import Algs
import time as t
import random
import base64
from sys import getsizeof
# from Utilities.cryptography_testing import Make_Keys
# from Utilities.cryptography_testing import primary_addresses
# from Utilities.cryptography_testing import Check_Wallet_Balance
# from Utilities.cryptography_testing import Ring_CT
# from Utilities.cryptography_testing import Decoy_addresses
from Utilities.cryptography_testing import *
from fastapi_signals import *
ring_ct = Ring_CT()
checkbalance = Check_Wallet_Balance()
create_keys = Make_Keys()
primary_addr = primary_addresses()
decoy_addresses = Decoy_addresses()
#imported templates
#from fastapi.staticfiles import StaticFiles #imported staticfiles
# {
# "node": [
# "http://127.0.0.1:8000", "http://127.0.0.1:8001"
# ]
#}
tags_metadata = [
{'name':'information',
'description': 'This will allow you to get info about the blockchain',
'name':'wallet',
'description': 'this will allow you to access your wallet and make wallets',
'name': 'transaction',
'description': 'transactions',
'name': 'mining',
'description': 'mining',
'name': 'nodes',
'description': 'adding nodes and replacing the chain',
'name': 'contracts',
'description': 'smart contracts on the blockchain'
}]
# CONSTANTS
SERVER_NAME = 'Token Network'
SERVER_HOST = '0.0.0.0'
SERVER_PORT = 8000
SERVER_RELOAD = False
DESCRIPTION = "Welcome to The Token Network, a blockchain network with a cryptocurrency called Token, it's like Dogecoin and Bitcoin but faster than Bitcoin and harder to mine than Dogecoin, welcome to the Future of the world."
algs = Algs()
S = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
hostname = socket.gethostname()
IP = socket.gethostbyname(hostname)
# wallet = Wallet()
class Url(BaseModel):
node: str
# class Phrase(BaseModel):
# phrase: str
app = FastAPI(title=SERVER_NAME, openapi_tags=tags_metadata, description=DESCRIPTION)
templates = Jinja2Templates(directory="templates/")
blockchain = Blockchain()
class Transaction(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
amount: float
class AddTransaction(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
transactionID: str
timestamp: str
amount: float
transactiontype: str
class Contract(BaseModel):
sender_public_send_key: str
sender_private_send_key: str
sender_view_key: str
receiver: str
contractbinary: bytes
class Walletkey(BaseModel):
publickey: str
privatekey: str
class Wallet_public(BaseModel):
viewkey: str
class Passphrase(BaseModel):
passphrase: str
class Blockchain(BaseModel):
block: dict
class Recover(BaseModel):
passphrase: str
class Mining(BaseModel):
address: str
class EncryptedTransaction(BaseModel):
sender_publickey: bytes
receiver: bytes
amount: float
@app.get('/')
async def index():
""" returns index page """
return "see /docs for the api"
@app.post('/add_contract', tags=['contracts'])
async def addContract(contractTransaction: Contract):
""" Use this to add smart contracts """
senderPublicKey = contractTransaction.sender_public_send_key
senderPrivateKey = contractTransaction.sender_private_send_key
receiver = contractTransaction.receiver
senderViewKey = contractTransaction.sender_view_key
contractdata = contractTransaction.contractbinary
contract = blockchain.add_smartContract(senderprivatekey= senderPrivateKey,
sendersendpublickey= senderPublicKey,
senderviewkey= senderViewKey,
receiver= receiver,
compiledcontract=contractdata)
return {'message': contract}
@app.get("/get_the_chain", tags=['information'])
async def get_the_chain():
""" Use this to get the whole blockchain """
# update = blockchain.replace_chain()
response = {"blockchain": blockchain.chain, "length": len(blockchain.chain)}
return response
@app.post("/mining", tags=['mining'])
async def mine(keys:Mining):
""" This allows you to mine blocks """
# get previous block
prev_block = blockchain.get_prev_block()
# previous proof
prev_proof = prev_block['proof']
# proof
proof = blockchain.proof_of_work(previous_proof=prev_proof)
# previous hash
prev_hash = blockchain.hash(block=prev_block)
# add data
amount = algs.amount_change(chain=blockchain.chain)
# stealth_key = pbkdf2_sha256.hash(str(keys.publickey))
# decoy = decoy_addresses.decoy_transactions(amount)
# create block
message = blockchain.create_block(proof=proof, previous_hash=prev_hash, forger=keys.address)
#returns the last block in the chain
return {'message': message}
# else:
# return "no wallet detected"
@app.get("/status", tags=['information'])
async def is_valid():
""" Checks to see if chain is valid """
is_valid = blockchain.is_chain_valid(chain=blockchain.chain)
if is_valid:
response = {"message": "Not compromised"}
else:
response = {"message": "Blockchain has been compromised"}
return response
@app.post("/add_transaction/", tags=['transaction'])
async def add_transaction(transaction: AddTransaction):
""" Allows transactions to be added to the chain from nodes"""
senderpublicsendkey = transaction.sender_public_send_key
senderprivatesendkey = transaction.sender_private_send_key
senderviewkey = transaction.sender_view_key
receiver = transaction.receiver
amount = transaction.amount
transactionid = transaction.transactionID
new_transaction = blockchain.add_transaction(
senderprivatekey=senderprivatesendkey,
sendersendpublickey=senderpublicsendkey,
senderviewkey=senderviewkey,
receiver=receiver,
amount=amount,
transactionID=transactionid
)
result = 'transaction has been added and is awaiting verification'
return result
@app.post('/add_unconfirmed_transaction', tags=['transaction'])
async def add_unconfirmed_transaction(transaction: Transaction):
""" broadcasts transactions to all nodes to be verified by miners"""
senderpublicsendkey = transaction.sender_public_send_key
senderprivatesendkey = transaction.sender_private_send_key
senderviewkey = transaction.sender_view_key
receiver = transaction.receiver
amount = transaction.amount
new_transaction = blockchain.add_unconfirmed_transaction(senderprivatekey=senderprivatesendkey,
sendersendpublickey=senderpublicsendkey,
receiver=receiver,
senderviewkey=senderviewkey,
amount=amount)
blockchain.broadcast_transaction(transaction=new_transaction)
result = 'transaction has been added and is awaiting verification'
return result
""" Wallets should be made offline. """
@app.post("/add_node/", tags=['nodes'])
async def add_node(url:Url):
""" This is used to add nodes """
item = url.node
blockchain.add_node(item)
# transaction = blockchain.add_transaction(sender='Network', receiver=wallets.publickey, amount=30)
result = item
return result
@app.post('/add_one_node/', tags=['nodes'])
async def add_one_node(url:Url):
""" adds one node """
item = url.node
blockchain.update_nodes(node=item)
return item
@app.get("/replace_chain", tags=['nodes'])
async def replace_chain():
""" replaces the current chain with the most recent and longest chain """
blockchain.replace_chain()
blockchain.is_chain_valid(chain=blockchain.chain)
return{'message': 'chain has been updated and is valid',
'longest chain': blockchain.chain}
@app.websocket('/dashboard')
async def dashboard(websocket: WebSocket):
""" P2p Dashboard """
await websocket.accept()
# block = blockchain.chain
# websocket.send_json(block)
while True:
block = blockchain.chain
await websocket.send_text(f'Message: {block}')
await asyncio.sleep(10)
@app.websocket("/ws")
async def dashboard_endpoint(websocket: WebSocket):
""" This shows real time data for nodes"""
await websocket.accept()
message = None
while True:
try:
if message != blockchain.chain:
message = blockchain.chain
await websocket.send_json(message)
print(message)
t.sleep(0.2)
else:
pass
except Exception as e:
pass
break
print('client disconnected')
@app.websocket("/nodes")
async def dashboard_endpoint(websocket: WebSocket):
""" This shows real time data of each node, this should be used for detecting new nodes in the network or helping with automating adding nodes"""
await websocket.accept()
message = None
while True:
try:
if message != blockchain.nodes:
message = blockchain.nodes
await websocket.send_json(message)
print(message)
t.sleep(0.2)
else:
pass
except Exception as e:
pass
break
print('client disconnected')
@app.post('/check_balance', tags=['wallet'])
async def check_balance(wallet:Wallet_public):
""" Checks the balance of a wallet with the view key """
#this route checks the balance of a publickey
# wallets.checkbalance(viewkey=wallet.viewkey, chain=blockchain.chain)
# return {"publickey":wallet.viewkey,
# "balance": wallets.balance}
balance = checkbalance.balance_check(wallet.viewkey, blockchain=blockchain.chain)
return {'Address': balance['receive address'], 'balance': f'{balance["balance"]}Tokens'}
@app.post('/insert_block', tags=['nodes'])
async def insert_chain(chain:Blockchain):
""" replace the chain if all nodes are down or if node has a
firewall preventing get requests from web servers """
updated_chain = blockchain.update_chain(new_chain=chain.block)
return updated_chain
# if is_valid == True:
# blockchain.chain
# return chain
# else:
# return "Invalid chain"
# @app.post('/recover_wallet', tags=['wallet'])
# async def recover_wallet(recover:Recover):
# """ recover wallet with passphrase and publickey """
# is_valid = wallets.recover_wallet_with_passphrase(recover.passphrase)
# if is_valid == True:
# return {'message': 'Wallet recovery is successful!', 'private key': wallets.privatekey, 'public key': wallets.publickey, 'passphrase': recover.passphrase}
# else:
# return 'invalid publickey or passphrase!'
if __name__ == '__main__':
# hostname = socket.gethostname()
# IP = socket.gethostbyname(hostname)
# blockchain.replace_chain()
uvicorn.run('main:app', host=SERVER_HOST, port=SERVER_PORT, reload=SERVER_RELOAD)
# ran = run
# while run == ran:
# update = blockchain.replace_chain()
# t.sleep(60.0) | 29.551282 | 227 | 0.702907 | 971 | 0.084252 | 0 | 0 | 7,170 | 0.622126 | 6,572 | 0.570239 | 4,258 | 0.369458 |
d0e8204ae150a3e8c57ae24fe1a684bdf4ee48d0 | 4,650 | py | Python | utils/config.py | ebadawy/JointModeling | 5140e596113a6dabbc503a1fb1a3234efabf0f0b | [
"Apache-2.0"
]
| null | null | null | utils/config.py | ebadawy/JointModeling | 5140e596113a6dabbc503a1fb1a3234efabf0f0b | [
"Apache-2.0"
]
| null | null | null | utils/config.py | ebadawy/JointModeling | 5140e596113a6dabbc503a1fb1a3234efabf0f0b | [
"Apache-2.0"
]
| 1 | 2020-04-11T09:40:17.000Z | 2020-04-11T09:40:17.000Z | import json
from bunch import Bunch
import os
def get_config_from_json(json_file):
"""
Get the config from a json file
:param json_file:
:return: config(namespace) or config(dictionary)
"""
# parse the configurations from the config json file provided
with open(json_file, 'r') as config_file:
config_dict = json.load(config_file)
# convert the dictionary to a namespace using bunch lib
config = Bunch(config_dict)
config = default_values(config)
return config, config_dict
def process_config(jsonfile):
config, _ = get_config_from_json(jsonfile)
config.summary_dir = os.path.join("../experiments", config.exp_name, "summary")
config.checkpoint_dir = os.path.join("../experiments", config.exp_name, "checkpoint")
return config
def default_values(config):
config['target_cluster'] = -1 if not 'target_cluster' in config.keys() else config['target_cluster']
config['rater_id'] = -1 if not 'rater_id' in config.keys() else config['rater_id']
config['gt_priors'] = False if not 'gt_priors' in config.keys() else config['gt_priors']
config['priors'] = False if not 'priors' in config.keys() else config['priors']
config['reg'] = False if not 'reg' in config.keys() else config['reg']
config['modified_CE'] = False if not 'modified_CE' in config.keys() else config['modified_CE']
config['ccc_err'] = False if not 'ccc_err' in config.keys() else config['ccc_err']
config['rmse_weights'] = 1 if not 'rmse_weights' in config.keys() else config['rmse_weights']
config['cccerr_weights'] = 1 if not 'cccerr_weights' in config.keys() else config['cccerr_weights']
config['yout_weights'] = 1 if not 'yout_weights' in config.keys() else config['yout_weights']
config['alpha1'] = 1 if not 'alpha1' in config.keys() else config['alpha1']
config['alpha2'] = 1 if not 'alpha2' in config.keys() else config['alpha2']
config['fcs_num'] = 0 if not 'fcs_num' in config.keys() else config['fcs_num']
config['n_fc'] = 16 if not 'n_fc' in config.keys() else config['n_fc']
config['fc_act'] = 'tanh' if not 'tanh' in config.keys() else config['tanh']
config['fc_path'] = 0 if not 'fc_path' in config.keys() else config['fc_path']
config['clf_bias'] = 0 if not 'clf_bias' in config.keys() else config['clf_bias']
config['audio_video_feat'] = 0 if not 'audio_video_feat' in config.keys() else config['audio_video_feat']
config['clf_bias'] = 1 if not 'clf_bias' in config.keys() else config['clf_bias']
config['gt'] = 'onehot' if not 'gt' in config.keys() else config['gt']
config['ccc_diff'] = -0.01 if not 'ccc_diff' in config.keys() else config['ccc_diff']
config['reset_lr'] = True if not 'reset_lr' in config.keys() else config['reset_lr']
config['stage2'] = 0 if not 'stage2' in config.keys() else config['stage2']
config['max_to_keep'] = 1000 if not 'max_to_keep' in config.keys() else config['max_to_keep']
config['subset'] = 'joint_modling' if not 'subset' in config.keys() else config['subset']
config['log_dir'] = 'logs' if not 'log_dir' in config.keys() else config['log_dir']
config['max_length'] = 7500 if not 'max_length' in config.keys() else config['max_length']
config['sequence_length'] = 7500 if not 'sequence_length' in config.keys() else config['sequence_length']
config['learning_rate'] = 0.02 if not 'learning_rate' in config.keys() else config['learning_rate']
config['num_epochs'] = 20 if not 'num_epochs' in config.keys() else config['num_epochs']
return config
| 73.809524 | 133 | 0.529892 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,285 | 0.276344 |
d0e9d30679706cc8bbbaa272614c4af5c8ce41cd | 7,096 | py | Python | app/user/views.py | cosmos-sajal/magic-link | 346e828673f298bae9ec3075db8d5e837e4b7aaf | [
"MIT"
]
| 2 | 2020-10-19T07:35:59.000Z | 2020-10-24T17:43:41.000Z | app/user/views.py | cosmos-sajal/magic-link | 346e828673f298bae9ec3075db8d5e837e4b7aaf | [
"MIT"
]
| null | null | null | app/user/views.py | cosmos-sajal/magic-link | 346e828673f298bae9ec3075db8d5e837e4b7aaf | [
"MIT"
]
| null | null | null | import json
from django.views import View
from django.shortcuts import redirect, render
from django.core.exceptions import ObjectDoesNotExist
from django.contrib import messages
from django.http.response import HttpResponseRedirect
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from user.services.user_service import UserService, TokenService
from user.services.cookies_service import CookiesService
from user.forms.user_forms import LoginForm, MagicLinkForm, RegisterUserForm
from helpers.cache_adapter import CacheAdapter
from user.serializers import RegisterUserSerializer, LoginUserSerializer, \
GenerateMagicLinkSerializer
from user.services.magic_link_service import MagicLinkService
from worker.send_email import send_email
class RegisterUserView(View):
form_class = RegisterUserForm
template_name = 'user/user_register_form.html'
def __create_user(self, data):
email = data['email']
password = data['password']
username = data['username']
user_service = UserService()
user_service.create_user(
email=email,
password=password,
username=username
)
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
self.__create_user(form.cleaned_data)
messages.success(request, 'User registered')
return HttpResponseRedirect("/api/v1/user/login/")
else:
messages.error(
request, 'User registration failed!')
return render(request, self.template_name, context={'form': form})
class GenerateMagicLinkView(View):
form_class = MagicLinkForm
template_name = 'user/magic_link_form.html'
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
email = form.cleaned_data['email']
user_service = UserService()
user = user_service.get_user(email=email)
magic_link_service = MagicLinkService()
res = magic_link_service.generate_magic_link(
request,
user,
"/api/v1/user/details/"
)
if not res['is_success']:
messages.error(
request, 'Link generation failed!')
return render(request, self.template_name, context={'form': form})
send_email.delay(res['email'], res['content'])
messages.error(
request, 'Magic Link sent to your email!')
else:
messages.error(
request, 'Link generation failed!')
return render(request, self.template_name, context={'form': form})
class RedirectMagicLinkView(APIView):
"""
Redirect the user to the redirect link
corresponding to the magic link token key
"""
def __get_token(self, user_id):
"""
Return token for the user
Args:
user (User)
"""
try:
token = Token.objects.get(user_id=user_id)
return token.key
except ObjectDoesNotExist:
token = Token.objects.create(user_id=user_id)
return token.key
def get(self, request, token):
"""
GET API -> /api/v1/user/magic_link/sign_in/<token>/
"""
service = MagicLinkService()
key = service.get_cache_key(token)
cache_adapter = CacheAdapter()
value = cache_adapter.get(key)
if value is None:
redirect_url = service.get_default_redirect_url()
return HttpResponseRedirect(redirect_url)
value = json.loads(value)
user_id = value['user_id']
redirect_link = value['redirect_link']
token = self.__get_token(user_id)
response = service.set_cookies_in_response(
request,
redirect(redirect_link),
token
)
cache_adapter.delete(key)
return response
class LoginView(View):
form_class = LoginForm
template_name = 'user/login_form.html'
def get(self, request, **kwargs):
form = self.form_class()
return render(request, self.template_name, context={'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
messages.success(request, 'User logged in')
token_service = TokenService(form.cleaned_data['email'])
token = token_service.get_token()
cookies_service = CookiesService()
response = cookies_service.set_cookies_in_response(
request,
redirect("/api/v1/user/details/"),
token
)
return response
else:
messages.error(
request, 'User login failed!')
return render(request, self.template_name, context={'form': form})
class UserDetailView(APIView):
"""
Returns the user details
"""
renderer_classes = [TemplateHTMLRenderer]
template_name = 'user/user_details.html'
def __get_user_from_token(self, token):
"""
Returns user from token
Args:
token (str)
"""
if token is None:
return None
try:
token = Token.objects.get(key=token)
return token.user
except ObjectDoesNotExist:
return None
def get(self, request):
"""
GET API -> /api/v1/user/details/
"""
token = request.COOKIES.get('token', None)
user = self.__get_user_from_token(token)
if user is None:
return Response({
'is_success': False,
'message': 'No token or incorrect token provided.'
})
return Response({
'is_success': True,
'username': user.username,
'email': user.email
})
class LogoutView(APIView):
"""
Logsout a user by deleting the token
from cookies
"""
def get(self, request):
"""
GET API -> /api/v1/user/logout/
"""
token = request.COOKIES.get('token', None)
cookies_service = CookiesService()
token_service = TokenService(token=token)
token_service.delete_token()
response = cookies_service.delete_cookies_in_response(
HttpResponseRedirect("/api/v1/user/login/")
)
return response
| 28.612903 | 82 | 0.604284 | 6,158 | 0.867813 | 0 | 0 | 0 | 0 | 0 | 0 | 1,160 | 0.163472 |
d0ebd202e54ba07a8cd29fa1c18be451fa7b6215 | 560 | py | Python | dvadmin-backend/apps/vadmin/system/models/__init__.py | yuanlaimantou/vue-django-admin | 3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9 | [
"MIT"
]
| 193 | 2021-02-25T17:36:47.000Z | 2022-03-31T09:54:48.000Z | dvadmin-backend/apps/vadmin/system/models/__init__.py | yuanlaimantou/vue-django-admin | 3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9 | [
"MIT"
]
| 6 | 2021-04-23T12:35:14.000Z | 2021-09-16T03:27:28.000Z | dvadmin-backend/apps/vadmin/system/models/__init__.py | yuanlaimantou/vue-django-admin | 3757caf5d5ca2682ffbb6e017ef03ff9a3715cc9 | [
"MIT"
]
| 59 | 2021-03-29T09:25:00.000Z | 2022-03-24T06:53:27.000Z | from apps.vadmin.system.models.celery_log import CeleryLog
from apps.vadmin.system.models.config_settings import ConfigSettings
from apps.vadmin.system.models.dict_data import DictData
from apps.vadmin.system.models.dict_details import DictDetails
from apps.vadmin.system.models.logininfor import LoginInfor
from apps.vadmin.system.models.message_push import MessagePush
from apps.vadmin.system.models.message_push import MessagePushUser
from apps.vadmin.system.models.operation_log import OperationLog
from apps.vadmin.system.models.save_file import SaveFile
| 56 | 68 | 0.871429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
d0efa324455bfc009730dba34af132a2fd676468 | 260 | py | Python | app/hunter/urls.py | edynox/iis | 594200506b641cbac249dc6e95d229bea1edeb28 | [
"MIT"
]
| null | null | null | app/hunter/urls.py | edynox/iis | 594200506b641cbac249dc6e95d229bea1edeb28 | [
"MIT"
]
| null | null | null | app/hunter/urls.py | edynox/iis | 594200506b641cbac249dc6e95d229bea1edeb28 | [
"MIT"
]
| null | null | null | from django.conf.urls import url
from .views import profile, hunterList, changePass
urlpatterns = [
url(r'^$', profile, name='hunter_profile'),
url(r'^password', changePass, name='hunter_password'),
url(r'^list', hunterList, name='hunter_list')
]
| 28.888889 | 58 | 0.703846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.273077 |
d0f002a005c22f45ad7c152982173fb33768f83a | 1,822 | py | Python | src/aves/features/sparse.py | sergioangulo/aves | 43a14ec9c82929136a39590b15fe7f92182aae20 | [
"CC-BY-3.0"
]
| 34 | 2020-10-23T08:57:03.000Z | 2022-03-23T17:07:20.000Z | src/aves/features/sparse.py | sergioangulo/aves | 43a14ec9c82929136a39590b15fe7f92182aae20 | [
"CC-BY-3.0"
]
| 3 | 2021-12-02T22:42:25.000Z | 2021-12-10T02:37:01.000Z | src/aves/features/sparse.py | sergioangulo/aves | 43a14ec9c82929136a39590b15fe7f92182aae20 | [
"CC-BY-3.0"
]
| 11 | 2021-03-25T02:40:34.000Z | 2022-01-03T22:41:29.000Z | from scipy.sparse import dok_matrix
import pandas as pd
from cytoolz import itemmap
def long_dataframe_to_sparse_matrix(
df, index, vars, values, id_to_row=None, var_to_column=None
):
if id_to_row is None:
unique_index_values = df[index].unique()
id_to_row = dict(zip(unique_index_values, range(len(unique_index_values))))
n_rows = len(id_to_row)
if var_to_column is None:
unique_vars = df[vars].unique()
var_to_column = dict(zip(unique_vars, range(len(unique_vars))))
n_cols = len(var_to_column)
dtm = dok_matrix((n_rows, n_cols), dtype=df[values].dtype)
for i, tup in enumerate(df.itertuples()):
elem_row, elem_col, elem_val = (
getattr(tup, index),
getattr(tup, vars),
getattr(tup, values),
)
if elem_row in id_to_row:
row_id = id_to_row[elem_row]
else:
continue
if elem_col in var_to_column:
col_id = var_to_column[elem_col]
else:
continue
dtm[row_id, col_id] = elem_val
return dtm.tocsr(), id_to_row, var_to_column
def sparse_matrix_to_long_dataframe(
matrix,
index_name="index",
var_name="column",
value_name="value",
index_map=None,
var_map=None,
reverse_maps=False,
):
matrix = matrix.todok()
df = pd.DataFrame.from_records(
list(map(lambda x: (x[0][0], x[0][1], x[1]), matrix.items()))
)
df.columns = [index_name, var_name, value_name]
if index_map:
if reverse_maps:
index_map = itemmap(reversed, index_map)
df[index_name] = df[index_name].map(index_map)
if var_map:
if reverse_maps:
var_map = itemmap(reversed, var_map)
df[var_name] = df[var_name].map(var_map)
return df
| 25.305556 | 83 | 0.625137 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 0.012075 |
d0f021bab54a09a9592e986f398fbcf5edaf9bb8 | 1,059 | py | Python | backent/api/migrations/0010_populate_event_tags.py | namafutatsu/backent | 9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d | [
"MIT"
]
| null | null | null | backent/api/migrations/0010_populate_event_tags.py | namafutatsu/backent | 9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d | [
"MIT"
]
| null | null | null | backent/api/migrations/0010_populate_event_tags.py | namafutatsu/backent | 9db38d0db8d9193fad7cd41aa2e4b55c75dfe01d | [
"MIT"
]
| null | null | null | from django.db import migrations
from backent.api import enums
def populate_tags(apps, schema_editor):
EventTag = apps.get_model('backent_api', 'EventTag')
EventTag.objects.get_or_create(name=enums.EVENT_TAG_BEGINNER_FRIENDLY)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_INTERNATIONAL)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_PWD_FRIENDLY)
EventTag.objects.get_or_create(name=enums.EVENT_TAG_UNDERAGE_FRIENDLY)
def unpopulate_tags(apps, schema_editor):
EventTag = apps.get_model('backent_api', 'EventTag')
EventTag.objects.filter(name=enums.EVENT_TAG_BEGINNER_FRIENDLY).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_INTERNATIONAL).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_PWD_FRIENDLY).delete()
EventTag.objects.filter(name=enums.EVENT_TAG_UNDERAGE_FRIENDLY).delete()
class Migration(migrations.Migration):
dependencies = [
('backent_api', '0009_add_event_tags'),
]
operations = [
migrations.RunPython(populate_tags, unpopulate_tags),
]
| 34.16129 | 76 | 0.777148 | 202 | 0.190746 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.075543 |
d0f08bf1893b85dd5be23260d025c66fdf373d25 | 938 | py | Python | test_ukz/test_ukzlang/test_uk_pipes.py | clauderichard/Ultrakazoid | 619f1afd1fd55afb06e7d27b2bc30eee9929f660 | [
"MIT"
]
| null | null | null | test_ukz/test_ukzlang/test_uk_pipes.py | clauderichard/Ultrakazoid | 619f1afd1fd55afb06e7d27b2bc30eee9929f660 | [
"MIT"
]
| null | null | null | test_ukz/test_ukzlang/test_uk_pipes.py | clauderichard/Ultrakazoid | 619f1afd1fd55afb06e7d27b2bc30eee9929f660 | [
"MIT"
]
| null | null | null | from .test_uk_base import TestUkBase
class TestUkPipes(TestUkBase):
################
# Tests
def test_pipe_seq_start(self):
self.equkz("|c","c")
self.equkz("c|","c<¬")
self.equkz("[ce|]","[ce]<¬")
def test_pipe_seq_end(self):
self.equkz("[c||d]e","c(de)")
self.equkz("[c*2_3||d]e","c*2_3(de)")
def test_pipe_par_start(self):
self.equkz("(ce|)","(ce)<¬")
self.equkz("(ce*2|)","[e_2c]<¬")
def test_pipe_par_end(self):
self.equkz("(c*2||de*3)f","(c_2de_3).f")
def test_pipe_reverseorder(self):
self.failukz("[c||d|e]")
self.failukz("(c||d|e)")
def test_pipe_par_duplicate_start(self):
self.failukz("(c|d|e)")
def test_pipe_seq_duplicate_start(self):
self.failukz("[c|d|e]")
def test_pipe_par_duplicate_end(self):
self.failukz("(c||d||e)")
def test_pipe_seq_duplicate_end(self):
self.failukz("[c||d||e]")
################################
| 24.051282 | 44 | 0.574627 | 868 | 0.921444 | 0 | 0 | 0 | 0 | 0 | 0 | 251 | 0.266454 |
d0f14602b75f864977df1e3824cf1f1c1da55ef2 | 25,329 | py | Python | wltp/autograph.py | ankostis/wltp | c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e | [
"Apache-2.0"
]
| null | null | null | wltp/autograph.py | ankostis/wltp | c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e | [
"Apache-2.0"
]
| null | null | null | wltp/autograph.py | ankostis/wltp | c95462cadbcab32d4fc94f8ea8bf9d85a0a3763e | [
"Apache-2.0"
]
| 1 | 2015-02-20T11:47:33.000Z | 2015-02-20T11:47:33.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019-2020 European Commission (JRC);
# Licensed under the EUPL (the 'Licence');
# You may not use this work except in compliance with the Licence.
# You may obtain a copy of the Licence at: http://ec.europa.eu/idabc/eupl
"""
Harvest functions & annotate their :term:`dependencies <dependency>` to build :term:`pipeline`\\s.
>>> from wltp.autograph import *
>>> __name__ = "wltp.autograph"
"""
import functools as fnt
import inspect
import logging
import re
import sys
from collections import ChainMap
from inspect import Parameter
from pathlib import Path
from types import ModuleType
from typing import (
Any,
Callable,
Collection,
Iterable,
List,
Mapping,
Pattern,
Set,
Tuple,
Union,
cast,
)
from boltons.iterutils import first
from boltons.setutils import IndexedSet as iset
from graphtik import keyword, optional, sfx, sfxed
from graphtik.base import Operation, func_name
from graphtik.fnop import FnOp, reparse_operation_data
from graphtik.modifier import is_sfx
from .utils import Literal, Token, asdict, aslist, astuple
try:
from re import Pattern as RegexPattern
except ImportError:
# PY3.6
from typing import Pattern as RegexPattern
log = logging.getLogger(__name__)
_my_project_dir = Path(__file__).parent
_FnKey = Union[Union[str, Pattern], Iterable[Union[str, Pattern]]]
def camel_2_snake_case(word):
"""
>>> camel_2_snake_case("HTTPResponseCodeXYZ")
'http_response_code_xyz'
From https://stackoverflow.com/a/1176023/548792
"""
return re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", word).lower()
def is_regular_class(name, item):
return inspect.isclass(item) and name[0].isupper()
def _is_in_my_project(item) -> bool:
"""UNUSED"""
in_my_project = False
try:
path = inspect.getfile(item)
except TypeError:
pass # raised for builtins e.g.`sys`
else:
try:
Path(path).relative_to(_my_project_dir)
in_my_project = True
except ValueError:
pass # raised when unrelated
return in_my_project
class Prefkey:
"""Index into dicts with a key or a joined(prefix+key), where prefix: tuple"""
sep = "."
def __init__(self, sep=None):
if sep is not None:
self.sep = sep
def _join_path_names(self, *names):
return self.sep.join(str(i) for i in names)
def _prefkey(self, d, key: _FnKey, default: Union[Callable, Any] = None):
if isinstance(key, tuple):
long_key = self.sep.join(key)
if long_key in d:
return d[long_key]
if key[-1] in d:
return d[key[-1]]
if key in d:
return d[key]
if callable(default):
return default()
return default
class FnHarvester(Prefkey):
"""
Collect public ops, routines, classes & their methods, partials into :attr:`collected`.
:param collected:
a list of 2-tuples::
(name_path, item_path)
where the 2 paths correspond to the same items;
the last path element is always a callable, and
the previous items may be modules and/or classes,
in case non-modules are given directly in :meth:`harvest()`::
[module, [class, ...] callable
E.g. the path of a class constructor is ``(module_name, class_name)``.
For :term:`operation`\\s, the name-part is ``None``.
:param excludes:
names to exclude; they can.be.prefixed or not
:param base_modules:
skip function/classes not in these modules; if not given, include all items.
If string, they are searched in :data:`sys.modules`.
:param predicate:
any user callable accepting a single argument returning falsy to exclude
the visited item
:param include_methods:
Whether to collect methods from classes
**Example:**
>>> from wltp import cycler, downscale, engine, vehicle, vmax
>>> modules = ('os', 'sys')
>>> funcs = FnHarvester(
... base_modules=modules,
... include_methods=False,
... ).harvest()
>>> len(funcs) > 50
True
>>> funcs
[(('os', 'PathLike'),
...
Use this pattern when iterating, to account for any :term:`operation` instances:
>>> funcs = [
... (name, fn if isinstance(fn, Operation) else fn)
... for name, fn
... in funcs
... ]
"""
collected: List[Tuple[Tuple[str, ...], Tuple[Callable, ...]]]
include_methods: bool = True
def __init__(
self,
*,
excludes: Iterable[_FnKey] = None,
base_modules: Iterable[Union[ModuleType, str]] = None,
predicate: Callable[[Any], bool] = None,
include_methods=False,
sep=None,
):
super().__init__(sep)
if include_methods is not None:
self.include_methods = bool(include_methods)
self._seen: Set[int] = set()
self.excludes = set(excludes or ())
self.base_modules = iset(
sys.modules[m] if isinstance(m, str) else m for m in (base_modules or ())
)
self.predicate = predicate
self.collected = []
def is_harvestable(self, name_path, item):
"""Exclude already-seen, private, user-excluded objects(by name or path). """
name = name_path[-1]
if (
name.startswith("_")
or id(item) in self._seen
or name in self.excludes
or self._join_path_names(*name_path) in self.excludes
):
return False
self._seen.add(id(item))
return (
(callable(item) or is_regular_class(name, item) or inspect.ismodule(item))
and (not self.base_modules or inspect.getmodule(item) in self.base_modules)
and (not self.predicate or self.predicate(item))
)
def _collect(self, name_path, item_path):
"""Obey decorated `name`"""
fn = item_path[-1]
decors = get_autograph_decors(fn)
if decors and "name" in decors:
name_path = name_path[:-1] + (decors["name"],)
self.collected.append((name_path, item_path))
def _harvest(self, name_path, item_path):
"""Recursively collect modules, routines & classes,."""
name = name_path[-1]
item = item_path[-1]
if not self.is_harvestable(name_path, item):
pass
elif isinstance(item, Operation):
self._collect(None, item_path)
elif inspect.ismodule(item):
for mb_name, member in inspect.getmembers(item):
# Reset path on modules
self._harvest((item.__name__, mb_name), (item, member))
elif inspect.isroutine(item):
self._collect(name_path, item_path)
elif is_regular_class(name, item):
self._collect(name_path, item_path)
if self.include_methods:
# TIP: scavenge ideas from :class:`doctest.DocTestFinder`
for mb_name, member in inspect.getmembers(item, predicate=callable):
self._harvest(name_path + (mb_name,), item_path + (member,))
else:
pass # partial?
def harvest(self, *items: Any, base_modules=...) -> List[Tuple[str, Callable]]:
"""
Collect any callable `items` and children, respecting `base_modules`, `excludes` etc.
:param items:
module fqdn (if already imported), items with ``__name__``, like
modules, classes, functions, or partials (without ``__name__``).
If nothing is given, `attr:`baseModules` is used in its place.
.. Note::
This parameter works differently from :attr:`base_modules`, that is,
harvesting is not limited to those modules only, recursing to
any imported ones from `items`.
:return:
the :attr:`collected`
"""
old_base_modules = self.base_modules
try:
if base_modules is not ...:
self.base_modules = base_modules
if not items:
items = self.base_modules # type: ignore
for bi in items:
if isinstance(bi, str):
bi, name_path = sys.modules[bi], bi
else:
name_path = tuple(
func_name(bi, mod=0, fqdn=0, human=0, partials=1).split(".")
)
self._harvest(name_path, (bi,))
return self.collected
finally:
self.base_modules = old_base_modules
def paths(self):
"""returns the paths only (no callables), sorted"""
return list(zip(*self.collected))[0]
_unset = Token("unset") # TODO: replace `_unset` with ...
def autographed(
fn=_unset,
*,
name=None,
needs=_unset,
provides=_unset,
renames=_unset,
returns_dict=_unset,
aliases=_unset,
inp_sideffects=_unset,
out_sideffects=_unset,
domain: Union[str, int, Collection] = None,
**kws,
):
"""
Decorator adding ``_autograph`` func-attribute with overrides for :class:`Autograph`.
:param name:
the name of the operation.
- If the same `name` has already been defined for the same `domain`,
it is overwritten; otherwise, a new decoration is appended, so that
:meth:`.Autograph.yield_wrapped_ops()` will produce more than one operations.
- if not given, it will be derrived from the `fn` on wrap-time.
:param domain:
one or more list-ified domains to assign decors into
(instead of the "default" domain);
it allows to reuse the same function to build different operation,
when later wrapped into an operation by :class:`.Autograph`.
:param renames:
mappings to rename both any matching the final `needs` & `provides`
:param inp_sideffects:
appended into `needs`; if a tuple, makes it a :class:`.sfxed`
:param out_sideffects:
appended into `provides`; if a tuple, makes it a :class:`.sfxed`
:param kws:
the rest arguments of :class:`graphtik.operation`, such as::
endured, parallel, marshalled, node_props
The rest arguments (e.g. `needs`, etc) are coming from :class:`graphtik.operation`.
"""
kws.update(
{
k: v
for k, v in locals().items()
if v is not _unset and k not in "kws fn name domain".split()
}
)
def decorator(fn):
if hasattr(fn, "_autograph"):
autographs = fn._autograph
if domain in autographs:
autographs[domain][name] = kws
else:
autographs[domain] = {name: kws}
else:
decors = {domain: {name: kws}}
try:
fn._autograph = decors
except TypeError as ex:
# Built-in?
log.debug(
"Wrapped as partial %s to attach `autographed` attribute.", fn
)
fn = fnt.wraps(fn)(fnt.partial(fn))
fn._autograph = decors
return fn
if fn is _unset:
return decorator
return decorator(fn)
def get_autograph_decors(
fn, default=None, domain: Union[str, int, Collection] = None
) -> dict:
"""
Get the 1st match in `domain` of the `fn` :func:`autographed` special attribute.
:param default:
return this if `fn` non-autographed, or domain don't match
:param domain:
list-ified if a single str
:return:
the decors that will override :class:`Autograph` attributes, as found
from the given `fn`, and for the 1st matching domain in `domain`::
<fn>():
_autograph (function-attribute)
<domain> (dict)
<name> (dict)
<decors> (dict)
"""
for dmn in astuple(domain, "domain"):
if hasattr(fn, "_autograph"):
if dmn in fn._autograph:
return fn._autograph[dmn]
return default
class Autograph(Prefkey):
"""
Make a graphtik operation by inspecting a function
The params below (except `full_path_names`) are merged in this order
(1st takes precendance):
1. dict from overrides keyed by `name`
2. decorated with :func:`autographed`
3. inspected from the callable
**Example:**
>>> def calc_sum_ab(a, b=0):
... return a + b
>>> aug = Autograph(out_patterns=['calc_', 'upd_'], renames={"a": "A"})
>>> aug.wrap_funcs([calc_sum_ab])
[FnOp(name='calc_sum_ab',
needs=['A', 'b'(?)],
provides=['sum_ab'],
fn='calc_sum_ab')]
"""
def __init__(
self,
out_patterns: _FnKey = None,
overrides: Mapping[_FnKey, Mapping] = None,
renames: Mapping = None,
full_path_names: bool = False,
domain: Union[str, int, Collection] = None,
sep=None,
):
super().__init__(sep)
#: Autodeduce `provides` by parsing function-names against a collection
#: of these items, and decide `provides` by the the 1st one matching
#: (unless `provides` are specified in the `overrides`):
#:
#: - regex: may contain 1 or 2 groups:
#:
#: - 1 group: the name of a single `provides`
#: - 2 groups: 2nd is the name of a single :term:`sideffected` dependency,
#: the 1st is the sideffect acting upon the former;
#:
#: - str: matched as a prefix of the function-name, which is trimmed
#: by the first one matching to derrive a single `provides`;
#:
#: Note that any `out_sideffects` in overrides, alone, do not block the rule above.
self.out_patterns = out_patterns and aslist(out_patterns, "out_patterns")
#: a mapping of ``fn-keys --> dicts`` with keys::
#:
#: name, needs, provides, renames, inp_sideffects, out_sideffects
#:
#: An `fn-key` may be a string-tuple of names like::
#:
#: [module, [class, ...] callable
self.overrides = overrides and asdict(overrides, "overrides")
#: global ``from --> to`` renamings applied both onto `needs` & `provides`.
#: They are applied after merging has been completed, so they can rename
#: even "inspected" names.
self.renames = renames and asdict(renames, "renames")
#: Whether operation-nodes would be named after the fully qualified name
#: (separated with `.` by default)
self.full_path_names = full_path_names
#: the :func:`.autographed` domains to search when wrapping functions, in-order;
#: if undefined, only the default domain (``None``) is included,
#: otherwise, the default, ``None``, must be appended explicitely
#: (usually at the end).
#: List-ified if a single str, :func:`autographed` decors for the 1st one
#: matching are used;
self.domain: Collection = (None,) if domain is None else domain
def _from_overrides(self, key):
return self.overrides and self._prefkey(self.overrides, key) or {}
def _match_fn_name_pattern(
self, fn_name, pattern
) -> Union[str, Tuple[str, str], None]:
"""return matched group or groups, callable results or after matched prefix string"""
if isinstance(pattern, RegexPattern):
m = pattern.search(fn_name)
groups = m and m.groups()
if groups:
if len(groups) == 1:
return groups[0]
if len(groups) > 2:
raise ValueError(
f"The `out_pattern` {pattern} matched on '{fn_name}' >2 groups: {groups}"
)
return sfxed(*reversed(groups))
elif callable(pattern):
return pattern(fn_name)
elif fn_name.startswith(pattern):
return fn_name[len(pattern) :]
def _deduce_provides_from_fn_name(self, fn_name):
## Trim prefix from function-name to derive a singular "provides".
provides = first(
self._match_fn_name_pattern(fn_name, p) for p in self.out_patterns
)
return provides
def _apply_renames(
self,
rename_maps: Iterable[Union[Mapping, Literal[_unset]]],
word_lists: Iterable,
):
"""
Rename words in all `word_lists` matching keys in `rename_maps`.
"""
rename_maps = [d for d in rename_maps if d and d is not _unset]
renames = ChainMap(*rename_maps)
if renames:
word_lists = tuple([renames.get(w, w) for w in wl] for wl in word_lists)
return word_lists
def _collect_rest_op_args(self, decors: dict):
"""Collect the rest operation arguments from `autographed` decoration."""
# NOTE: append more arguments as graphtik lib evolves.
rest_op_args = (
"returns_dict aliases endured parallel marshalled node_props".split()
)
return {k: v for k, v in decors.items() if k in rest_op_args}
def yield_wrapped_ops(
self,
fn: Union[
Callable,
Tuple[Union[str, Collection[str]], Union[Callable, Collection[Callable]]],
],
exclude=(),
domain: Union[str, int, Collection] = None,
) -> Iterable[FnOp]:
"""
Convert a (possibly **@autographed**) function into an graphtik **FnOperations**,
respecting any configured overrides
:param fn:
either a callable, or a 2-tuple(`name-path`, `fn-path`) for::
[module[, class, ...]] callable
- If `fn` is an operation, yielded as is (found also in 2-tuple).
- Both tuple elements may be singulars, and are auto-tuple-zed.
- The `name-path` may (or may not) correspond to the given `fn-path`,
and is used to derrive the operation-name; If not given, the function
name is inspected.
- The last elements of the `name-path` are overridden by names in decorations;
if the decor-name is the "default" (`None`), the `name-path` becomes
the op-name.
- The `name-path` is not used when matching overrides.
:param exclude:
a list of decor-names to exclude, as stored in decors.
Ignored if `fn` already an operation.
:param domain:
if given, overrides :attr:`domain` for :func:`.autographed` decorators
to search.
List-ified if a single str, :func:`autographed` decors for the 1st one
matching are used.
:return:
one or more :class:`FnOp` instances (if more than one name is defined
when the given function was :func:`autographed`).
Overriddes order: my-args, self.overrides, autograph-decorator, inspection
See also: David Brubeck Quartet, "40 days"
"""
if isinstance(fn, tuple):
name_path, fn_path = fn
else:
name_path, fn_path = (), fn
fun_path = cast(Tuple[Callable, ...], astuple(fn_path, None))
fun = fun_path[-1]
if isinstance(fun, Operation):
## pass-through operations
yield fun
return
def param_to_modifier(name: str, param: inspect.Parameter) -> str:
return (
optional(name)
# is optional?
if param.default is not inspect._empty # type: ignore
else keyword(name)
if param.kind == Parameter.KEYWORD_ONLY
else name
)
given_name_path = astuple(name_path, None)
decors_by_name = get_autograph_decors(fun, {}, domain or self.domain)
for decor_name, decors in decors_by_name.items() or ((None, {}),):
if given_name_path and not decor_name:
name_path = decor_path = given_name_path
else: # Name in decors was "default"(None).
name_path = decor_path = astuple(
(decor_name if decor_name else func_name(fun, fqdn=1)).split("."),
None,
)
assert decor_path, locals()
if given_name_path:
# Overlay `decor_path` over `named_path`, right-aligned.
name_path = tuple(*name_path[: -len(decor_path)], *decor_path)
fn_name = str(name_path[-1])
if fn_name in exclude:
continue
overrides = self._from_overrides(decor_path)
op_data = (
ChainMap(overrides, decors)
if (overrides and decors)
else overrides
if overrides
else decors
)
if op_data:
log.debug("Autograph overrides for %r: %s", name_path, op_data)
op_props = "needs provides renames, inp_sideffects out_sideffects".split()
needs, provides, override_renames, inp_sideffects, out_sideffects = (
op_data.get(a, _unset) for a in op_props
)
sig = None
if needs is _unset:
sig = inspect.signature(fun)
needs = [
param_to_modifier(name, param)
for name, param in sig.parameters.items()
if name != "self" and param.kind is not Parameter.VAR_KEYWORD
]
## Insert object as 1st need for object-methods.
#
if len(fun_path) > 1:
clazz = fun_path[-2]
# TODO: respect autograph decorator for object-names.
class_name = name_path[-2] if len(name_path) > 1 else clazz.__name__
if is_regular_class(class_name, clazz):
log.debug("Object-method %s.%s", class_name, fn_name)
needs.insert(0, camel_2_snake_case(class_name))
needs = aslist(needs, "needs")
if ... in needs:
if sig is None:
sig = inspect.signature(fun)
needs = [
arg_name if n is ... else n
for n, arg_name in zip(needs, sig.parameters)
]
if provides is _unset:
if is_regular_class(fn_name, fun):
## Convert class-name into object variable.
provides = camel_2_snake_case(fn_name)
elif self.out_patterns:
provides = self._deduce_provides_from_fn_name(fn_name) or _unset
if provides is _unset:
provides = ()
provides = aslist(provides, "provides")
needs, provides = self._apply_renames(
(override_renames, self.renames), (needs, provides)
)
if inp_sideffects is not _unset:
needs.extend(
(i if is_sfx(i) else sfxed(*i) if isinstance(i, tuple) else sfx(i))
for i in aslist(inp_sideffects, "inp_sideffects")
)
if out_sideffects is not _unset:
provides.extend(
(i if is_sfx(i) else sfxed(*i) if isinstance(i, tuple) else sfx(i))
for i in aslist(out_sideffects, "out_sideffects")
)
if self.full_path_names:
fn_name = self._join_path_names(*name_path)
op_kws = self._collect_rest_op_args(decors)
yield FnOp(fn=fun, name=fn_name, needs=needs, provides=provides, **op_kws)
def wrap_funcs(
self,
funcs: Collection[
Union[
Callable,
Tuple[
Union[str, Collection[str]], Union[Callable, Collection[Callable]]
],
]
],
exclude=(),
domain: Union[str, int, Collection] = None,
) -> List[FnOp]:
"""
Convert a (possibly **@autographed**) function into one (or more) :term:`operation`\\s.
:param fn:
a list of funcs (or 2-tuples (name-path, fn-path)
.. seealso:: :meth:`yield_wrapped_ops()` for the rest arguments.
"""
return [
op
for fn_or_paths in funcs
for op in self.yield_wrapped_ops(
fn_or_paths, exclude=exclude, domain=domain
)
]
"""
Example code hidden from Sphinx:
>>> from graphtik import compose
>>> aug = Autograph(['calc_', 'upd_'], {
... 'calc_p_available':{'provides': 'p_avail'},
... 'calc_p_resist': {'provides': 'p_resist'},
... 'calc_inertial_power': {'provides': 'p_inert'},
... })
>>> ops = [aug.wrap_funcs(funcs.items()]
>>> netop = compose('wltp', *(op for op in ops if op.provides))
"""
| 34.182186 | 98 | 0.569189 | 19,334 | 0.763315 | 6,625 | 0.261558 | 0 | 0 | 0 | 0 | 10,859 | 0.428718 |
d0f233170bb0ccf428c1b03e76710d47e94cea40 | 2,637 | py | Python | gen_embeddings.py | dominiccarrano/backdoor-nn-geometry | d1fa0754f1d57a9b303e2eb71edf0787a86529c8 | [
"MIT"
]
| 1 | 2021-05-28T14:57:57.000Z | 2021-05-28T14:57:57.000Z | gen_embeddings.py | dominiccarrano/backdoor-nn-geometry | d1fa0754f1d57a9b303e2eb71edf0787a86529c8 | [
"MIT"
]
| null | null | null | gen_embeddings.py | dominiccarrano/backdoor-nn-geometry | d1fa0754f1d57a9b303e2eb71edf0787a86529c8 | [
"MIT"
]
| null | null | null | import pandas as pd
import os
import torch
import numpy as np
import argparse
from trojai_utils import *
def batch_embeddings(reviews, N, batch_size, tokenizer, embedding, cls_first, embedding_dim=768):
embeddings = torch.zeros((N, 1, embedding_dim))
for i in range(N // batch_size):
review_batch = reviews[i*batch_size:(i+1)*batch_size]
embedding_batch = get_embeddings(tokenizer, embedding, review_batch, cls_token_is_first=cls_first)
embeddings[i*batch_size:(i+1)*batch_size, :, :] = embedding_batch
return embeddings
# Get args
parser = argparse.ArgumentParser(description="Generate embeddings")
parser.add_argument('--embedding-type', type=str,
help='Model architecture (one of "BERT", "DistilBERT", "GPT-2")')
parser.add_argument('--n', type=int, default=1000,
help='Number of embeddings of each sentiment to generate')
parser.add_argument('--batch-size', type=int, default=50,
help='Size of batches to feed into the language model for embedding generation')
args = parser.parse_args()
# Load in the data
base_huggingface_path = "your path with the huggingface transformer files"
base_data_path = "your file path with the reviews datasets"
sentiment_data = pd.read_csv(os.path.join(base_data_path, "train_datasets.csv"))
# Split by sentiment
pos_data = sentiment_data[sentiment_data.sentiment==True].sample(args.n)
neg_data = sentiment_data[sentiment_data.sentiment==False].sample(args.n)
# Get random samples
pos_reviews = list(np.asarray(pos_data.reviewText, dtype=str))
pos_labels = torch.ones(args.n)
neg_reviews = list(np.asarray(neg_data.reviewText, dtype=str))
neg_labels = torch.zeros(args.n)
# Make embeddings
cls_first = (args.embedding_type == "DistilBERT") or (args.embedding_type == "BERT")
tokenizer, embedding = get_LM(args.embedding_type, base_huggingface_path)
pos_embeddings = batch_embeddings(pos_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
neg_embeddings = batch_embeddings(neg_reviews, args.n, args.batch_size, tokenizer, embedding, cls_first)
# Save results
base_embedding_path = "your path to save embeddings to"
torch.save(pos_embeddings, os.path.join(base_embedding_path, args.embedding_type, "pos_embeddings{}.pt".format(args.n)))
torch.save(neg_embeddings, os.path.join(base_embedding_path, args.embedding_type, "neg_embeddings{}.pt".format(args.n)))
torch.save(pos_labels, os.path.join(base_embedding_path, args.embedding_type, "pos_labels{}.pt".format(args.n)))
torch.save(neg_labels, os.path.join(base_embedding_path, args.embedding_type, "neg_labels{}.pt".format(args.n))) | 49.754717 | 120 | 0.759954 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 582 | 0.220705 |
d0f527a740c29092c88c485c40c531a07e3a243b | 1,584 | py | Python | example/image-classification/test_score.py | Abusnina/mxnet | 7f8d94a24bf64fe0f24712a7952a09725c2df9bd | [
"Apache-2.0"
]
| 399 | 2017-05-30T05:12:48.000Z | 2022-01-29T05:53:08.000Z | smd_hpi/examples/binary-imagenet1k/test_score.py | yanghaojin/BMXNet | 102f8d0ed59529bbd162c37bf07ae58ad6c4caa1 | [
"Apache-2.0"
]
| 58 | 2017-05-30T23:25:32.000Z | 2019-11-18T09:30:54.000Z | smd_hpi/examples/binary-imagenet1k/test_score.py | yanghaojin/BMXNet | 102f8d0ed59529bbd162c37bf07ae58ad6c4caa1 | [
"Apache-2.0"
]
| 107 | 2017-05-30T05:53:22.000Z | 2021-06-24T02:43:31.000Z | """
test pretrained models
"""
from __future__ import print_function
import mxnet as mx
from common import find_mxnet, modelzoo
from common.util import download_file, get_gpus
from score import score
def download_data():
download_file('http://data.mxnet.io/data/val-5k-256.rec', 'data/val-5k-256.rec')
def test_imagenet1k_resnet(**kwargs):
models = ['imagenet1k-resnet-34',
'imagenet1k-resnet-50',
'imagenet1k-resnet-101',
'imagenet1k-resnet-152']
accs = [.72, .75, .765, .76]
for (m, g) in zip(models, accs):
acc = mx.metric.create('acc')
(speed,) = score(model=m, data_val='data/val-5k-256.rec',
rgb_mean='0,0,0', metrics=acc, **kwargs)
r = acc.get()[1]
print('testing %s, acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
def test_imagenet1k_inception_bn(**kwargs):
acc = mx.metric.create('acc')
m = 'imagenet1k-inception-bn'
g = 0.72
(speed,) = score(model=m,
data_val='data/val-5k-256.rec',
rgb_mean='123.68,116.779,103.939', metrics=acc, **kwargs)
r = acc.get()[1]
print('Tested %s acc = %f, speed = %f img/sec' % (m, r, speed))
assert r > g and r < g + .1
if __name__ == '__main__':
gpus = get_gpus()
assert len(gpus) > 0
batch_size = 16 * len(gpus)
gpus = ','.join([str(i) for i in gpus])
download_data()
test_imagenet1k_resnet(gpus=gpus, batch_size=batch_size)
test_imagenet1k_inception_bn(gpus=gpus, batch_size=batch_size)
| 33.702128 | 84 | 0.599747 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 386 | 0.243687 |
d0f7159e2aa65ab951c742e07f51245c54e91b4b | 293 | py | Python | rest_framework_siren/renderers.py | clarkperkins/django-rest-framework-siren | a0bc3d9990d88cfda7204fd0cf78ef08c38084b5 | [
"Apache-2.0"
]
| 2 | 2017-01-29T03:08:12.000Z | 2019-03-28T20:12:01.000Z | rest_framework_siren/renderers.py | clarkperkins/django-rest-framework-siren | a0bc3d9990d88cfda7204fd0cf78ef08c38084b5 | [
"Apache-2.0"
]
| null | null | null | rest_framework_siren/renderers.py | clarkperkins/django-rest-framework-siren | a0bc3d9990d88cfda7204fd0cf78ef08c38084b5 | [
"Apache-2.0"
]
| null | null | null | """
Provides Siren rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class SirenRenderer(JSONRenderer):
"""
Renderer which serializes to YAML.
"""
media_type = 'application/vnd.siren+json'
format = 'siren'
| 18.3125 | 49 | 0.726962 | 157 | 0.535836 | 0 | 0 | 0 | 0 | 0 | 0 | 126 | 0.430034 |
d0f7870601183663c27b2407e2c8458ad2ea5542 | 4,050 | py | Python | src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
]
| null | null | null | src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
]
| null | null | null | src/ekpmeasure/experiments/ferroelectric/_tester/_wfs.py | cjfinnell/ekpmeasure | e6611c053cad28e06f4f8a94764ebe3805cddb15 | [
"MIT"
]
| null | null | null | import numpy as np
__all__ = (
"standard_bipolar_sine",
"double_bipolar_sine",
"standard_bipolar",
"double_bipolar",
"semicircle",
"double_semicircle",
"gaussian",
"double_gaussian",
)
def semicircle(a, T):
"""Return semicircle bipolar wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = []
for i in range(count):
if i <= count / 2:
wf.append(np.sqrt(1 - ((i - count / 4) / (count / 4)) ** 2))
else:
wf.append(
-1 * np.sqrt(1 - ((i - count / 2 - count / 4) / (count / 4)) ** 2)
)
wf = np.array([int_amp * i + 2047 for i in wf])
return wf
def double_semicircle(a, T):
"""Return double semicircle bipolar wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
wf = np.concatenate((semicircle(a, T / 2), semicircle(a, T / 2)))
return wf
def gaussian(a, T, sigma="default"):
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
if sigma == "default":
sigma = count / 20
# sigma = (1/(a*np.sqrt(2*np.pi)))
mu = count / 4
wf = []
for i in range(count):
if i <= count / 2:
# wf.append((1/(sigma*np.sqrt(2*np.pi)))*np.exp(-(i-mu)**2/(2*sigma**2)))
wf.append((a) * np.exp(-((i - mu) ** 2) / (2 * sigma ** 2)))
else:
wf.append(-(a) * np.exp(-((i - count / 2 - mu) ** 2) / (2 * sigma ** 2)))
wf = np.array([int_amp * i + 2047 for i in wf])
return wf
def double_gaussian(a, T, sigma="default"):
wf = np.concatenate((gaussian(a, T / 2, sigma), gaussian(a, T / 2, sigma)))
return wf
def standard_bipolar(a, T):
"""Return standard bipolar triangle wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
step = 4 / (count)
wf = []
for i in range(count):
if i <= count / 4:
wf.append(i * step)
elif i > count / 4 and i <= 3 * count / 4:
wf.append(wf[i - 1] - step)
else:
wf.append(wf[i - 1] + step)
wf = np.array([int_amp * i + 2047 for i in wf])
return wf
def double_bipolar(a, T):
"""Return double bipolar triangle wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
wf = np.concatenate((standard_bipolar(a, T / 2), standard_bipolar(a, T / 2)))
return wf
def standard_bipolar_sine(a, T):
"""Return standard bipolar sine wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = np.array(
[int_amp * np.sin(2 * np.pi * i / (count)) + 2047 for i in range(count)]
)
return wf
def double_bipolar_sine(a, T):
"""Return standard bipolar sine wave with amplitude a (units of V) and period T (units of ms)
args:
a (float): Amplitude in Volts
T (float): Period in ms
"""
if T < 0.01:
raise ValueError("limit of Ferroelectric Tester")
count = int(T * 1000)
int_amp = int(2047 * a / 10)
wf = np.array(
[int_amp * np.sin(4 * np.pi * i / (count)) + 2047 for i in range(count)]
)
return wf
| 23.546512 | 101 | 0.536049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,576 | 0.389136 |
d0f98a3a2c3fd048d824ad74c18868bd24ec85c0 | 1,686 | py | Python | inference.py | KirtoXX/Object_track | 92b7d3308ab12d9211b04d18f825bf9a488c46a2 | [
"Apache-2.0"
]
| null | null | null | inference.py | KirtoXX/Object_track | 92b7d3308ab12d9211b04d18f825bf9a488c46a2 | [
"Apache-2.0"
]
| null | null | null | inference.py | KirtoXX/Object_track | 92b7d3308ab12d9211b04d18f825bf9a488c46a2 | [
"Apache-2.0"
]
| null | null | null | from keras import layers
import tensorflow as tf
from Resnet import ResNet50
import keras
from keras.models import Input,Model
def inference(image_pre,image_now,location_tensor,shape):
#extra high leavl feature
input = Input(shape=shape)
vision_model = keras.applications.MobileNet(include_top=False,
weights='imagenet',
input_tensor=input,
input_shape=[224,224,3])
vision_model.trainable = False
feature1 = vision_model(image_pre)
feature2 = vision_model(image_now)
#reshape tensor to vector
flatten = layers.Flatten()
feature1 = flatten(feature1)
feature2 = flatten(feature2)
#get high level feature
fc_unit = 512
fc1 = layers.Dense(units=fc_unit,name='fc1',activation='relu')
reshape = layers.Reshape((1,fc_unit))
bn1 = layers.BatchNormalization(name='bn1')
#fc1 block
feature1 = fc1(feature1)
feature2 = fc1(feature2)
feature1 = bn1(feature1)
feature2 = bn1(feature2)
feature1 = layers.Activation('relu')(feature1)
feature2 = layers.Activation('relu')(feature2)
feature1 = reshape(feature1)
feature2 = reshape(feature2)
#build feature to (samle,time_step,input_dim)
out = layers.concatenate([feature1,feature2],axis=1)
out = layers.GRU(units=128,name='GRU')(out)
out = layers.concatenate([out,location_tensor],axis=1)
out = layers.Dense(units=64, name='fc2')(out)
out = layers.Activation('relu')(out)
out = layers.Dense(units=4,name='fc3')(out)
out = layers.Activation('sigmoid')(out)
return out
| 31.811321 | 72 | 0.645907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 196 | 0.116251 |
d0f9bc7af6fe73617d028c362d0710385e92b83d | 1,807 | py | Python | src/visualizations/Visualize.py | chpatola/election_nlp | 6463edb2eacca09ff828029c69d11be7985ceeb0 | [
"MIT"
]
| 1 | 2020-04-11T12:00:09.000Z | 2020-04-11T12:00:09.000Z | src/visualizations/Visualize.py | chpatola/election_nlp | 6463edb2eacca09ff828029c69d11be7985ceeb0 | [
"MIT"
]
| null | null | null | src/visualizations/Visualize.py | chpatola/election_nlp | 6463edb2eacca09ff828029c69d11be7985ceeb0 | [
"MIT"
]
| null | null | null | """Visualizations for NLP analysis"""
import pandas as pd
import numpy as np
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sns
def _plot_classification_report(val_y,predictions):
report = classification_report(val_y, predictions, output_dict=True)
report_df = pd.DataFrame(report).transpose().round(4)
fig, ax = plt.subplots()
ax.axis('off')
ax.axis('tight')
ax.table(cellText=report_df.values,
colLabels=report_df.columns,
rowLabels=report_df.index,
loc='center',
bbox=[0.2, 0.2, 0.8, 0.8])
fig.tight_layout()
return fig
def cm_analysis(y_true, y_pred, filename, labels, ymap=None, figsize=(15,50)):
if ymap is not None:
y_pred = [ymap[yi] for yi in y_pred]
y_true = [ymap[yi] for yi in y_true]
labels = [ymap[yi] for yi in labels]
cm = confusion_matrix(y_true, y_pred, labels=labels)
cm_sum = np.sum(cm, axis=1, keepdims=True)
cm_perc = cm / cm_sum.astype(float) * 100
annot = np.empty_like(cm).astype(str)
nrows, ncols = cm.shape
for i in range(nrows):
for j in range(ncols):
c = cm[i, j]
p = cm_perc[i, j]
if i == j:
s = cm_sum[i]
annot[i, j] = '%.1f%%\n%d/%d' % (p, c, s)
elif c == 0:
annot[i, j] = ''
else:
annot[i, j] = '%.1f%%\n%d' % (p, c)
cm = pd.DataFrame(cm, index=labels, columns=labels)
cm.index.name = 'Actual'
cm.columns.name = 'Predicted'
fig, ax = plt.subplots(figsize=figsize)
sns.heatmap(cm, annot=annot, fmt='', ax=ax,vmin=0, vmax=60)
plt.show()
plt.savefig(filename,bbox_inches='tight') | 35.431373 | 78 | 0.598229 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 114 | 0.063088 |
d0f9d9924cd74ed348272e7fd7ebf2c3d8c9e835 | 2,844 | py | Python | sqlpuzzle/_queries/selectoptions.py | Dundee/python-sqlpuzzle | 260524922a0645c9bf94a9779195f93ef2c78cba | [
"MIT"
]
| 8 | 2015-03-19T11:25:32.000Z | 2020-09-02T11:30:10.000Z | sqlpuzzle/_queries/selectoptions.py | Dundee/python-sqlpuzzle | 260524922a0645c9bf94a9779195f93ef2c78cba | [
"MIT"
]
| 7 | 2015-03-23T14:34:28.000Z | 2022-02-21T12:36:01.000Z | sqlpuzzle/_queries/selectoptions.py | Dundee/python-sqlpuzzle | 260524922a0645c9bf94a9779195f93ef2c78cba | [
"MIT"
]
| 4 | 2018-11-28T21:59:27.000Z | 2020-01-05T01:50:08.000Z | from sqlpuzzle._common import Object
from sqlpuzzle._queries.options import Options
__all__ = ()
class SelectOptions(Options):
_definition_of_options = {
'sql_cache': {
'off': '',
'cache': 'SQL_CACHE',
'no_cache': 'SQL_NO_CACHE'
},
'duplicated': {
'off': '',
'all': 'ALL',
'distinct': 'DISTINCT',
'distinctrow': 'DISTINCTROW',
},
'sql_small_result': {
'off': '',
'on': 'SQL_SMALL_RESULT',
},
'sql_big_result': {
'off': '',
'on': 'SQL_BIG_RESULT',
},
'sql_buffer_result': {
'off': '',
'on': 'SQL_BUFFER_RESULT',
},
'sql_calc_found_rows': {
'off': '',
'on': 'SQL_CALC_FOUND_ROWS',
},
'straight_join': {
'off': '',
'on': 'STRAIGHT_JOIN',
},
'high_priority': {
'off': '',
'on': 'HIGH_PRIORITY',
},
}
def sql_cache(self, allow=True):
self._options['sql_cache'] = 'cache' if allow else 'off'
def sql_no_cache(self, allow=True):
self._options['sql_cache'] = 'no_cache' if allow else 'off'
def all(self, allow=True):
self._options['duplicated'] = 'all' if allow else 'off'
def distinct(self, allow=True):
self._options['duplicated'] = 'distinct' if allow else 'off'
def distinctrow(self, allow=True):
self._options['duplicated'] = 'distinctrow' if allow else 'off'
def sql_small_result(self, allow=True):
self._options['sql_small_result'] = 'on' if allow else 'off'
def sql_big_result(self, allow=True):
self._options['sql_big_result'] = 'on' if allow else 'off'
def sql_buffer_result(self, allow=True):
self._options['sql_buffer_result'] = 'on' if allow else 'off'
def sql_calc_found_rows(self, allow=True):
self._options['sql_calc_found_rows'] = 'on' if allow else 'off'
def straight_join(self, allow=True):
self._options['straight_join'] = 'on' if allow else 'off'
def high_priority(self, allow=True):
self._options['high_priority'] = 'on' if allow else 'off'
class SelectForUpdate(Object):
def __init__(self):
super().__init__()
self._for_update = False
def __str__(self):
if self._for_update:
return 'FOR UPDATE'
return ''
def __eq__(self, other):
return (
type(self) == type(other)
and self._for_update == other._for_update
)
@property
def is_set(self):
return self._for_update
def has(self, value):
return hasattr(self, value)
def for_update(self, allow=True):
self._for_update = bool(allow)
| 27.085714 | 71 | 0.54782 | 2,740 | 0.963432 | 0 | 0 | 63 | 0.022152 | 0 | 0 | 709 | 0.249297 |
d0fa3582daacb7e7b72dfa9ecf2ba23f58d16510 | 1,199 | py | Python | Code/RowVariable.py | akankshadiwedy/t2wml | 02f60611eec19d10a92fd2cb06f07339cd2cb269 | [
"MIT"
]
| null | null | null | Code/RowVariable.py | akankshadiwedy/t2wml | 02f60611eec19d10a92fd2cb06f07339cd2cb269 | [
"MIT"
]
| null | null | null | Code/RowVariable.py | akankshadiwedy/t2wml | 02f60611eec19d10a92fd2cb06f07339cd2cb269 | [
"MIT"
]
| null | null | null | from Code.utility_functions import get_excel_row_index
class RowVariable:
def __init__(self) -> None:
self.value = None
def evaluate(self, bindings: dict) -> str:
"""
This function checks if the row variable exists in the bindings dictionary
If yes, then returns the value from the dictionary else it returns the row variable as is.
:param bindings:
:return: row variable of type str
"""
try:
value = bindings[self.value]
if value is not None:
return value
except KeyError:
return get_excel_row_index(self.value)
def check_for_top(self) -> bool:
"""
this function checks if $top is present as a column variable at any leaf
:return:
"""
if self.value and self.value == "$top":
return True
else:
return False
def check_for_bottom(self) -> bool:
"""
this function checks if $bottom is present as a column variable at any leaf
:return:
"""
if self.value and self.value == "$bottom":
return True
else:
return False
| 29.243902 | 98 | 0.574646 | 1,141 | 0.951626 | 0 | 0 | 0 | 0 | 0 | 0 | 508 | 0.423686 |
d0fc29ac4209ca758dbc0af3c328c5e20828a2e9 | 9,189 | py | Python | cvprac_abstraction/cvpChangeControl.py | titom73/arista-cvp-scripts | 64f7ffa28d2483b3dd357e9b6c671725a51661b4 | [
"BSD-3-Clause"
]
| 2 | 2019-08-20T07:35:08.000Z | 2019-10-01T00:52:14.000Z | cvprac_abstraction/cvpChangeControl.py | inetsix/arista-cvp-scripts | 64f7ffa28d2483b3dd357e9b6c671725a51661b4 | [
"BSD-3-Clause"
]
| 2 | 2019-05-07T14:36:38.000Z | 2019-07-26T05:56:51.000Z | cvprac_abstraction/cvpChangeControl.py | titom73/arista-cvp-scripts | 64f7ffa28d2483b3dd357e9b6c671725a51661b4 | [
"BSD-3-Clause"
]
| 1 | 2021-05-08T20:15:36.000Z | 2021-05-08T20:15:36.000Z | import logging
from datetime import datetime
from datetime import timedelta
from cvprac.cvp_client_errors import CvpApiError
class CvpChangeControl(object):
"""Change-control class to provide generic method for CVP CC mechanism.
Change Control structure is based on:
- A name to identify change
- A list of tasks already created on CVP and on pending state
- An optional scheduling. If no schedule is defined,
then task will be run 3 minutes after creatio of CC
**List of public available methods:**
Methods
-------
add_task()
Append a task to self._list_changes
get_tasks()
Return list of of available tasks for this CC
get_list_changes()
Return list of tasks attached to this CC
create()
Create change-control on CVP server
Example
-------
>>> from cvprac_abstraction import CVP
>>> from cvprac_abstraction import connect_to_cvp
>>> from cvprac_abstraction.cvpConfiglet import CvpChangeControl
>>>
>>> parameters['cvp'] = '127.0.0.1'
>>> parameters['username'] = 'arista'
>>> parameters['password'] = 'arista'
>>>
>>> client = connect_to_cvp(parameters)
>>>
>>> change_control = CvpChangeControl(cvp_server=client, name='MyChanegControl')
>>> result = change_control.create(tz=timezone,
country='FR',
schedule=True,
schedule_at='2019-03-01-12h00',
snap_template="snapshotTemplate_9_4694793526491",
change_type='Custom', stop_on_error="true")
>>>
Warnings
--------
- Change Control execution is not running snapshot before and after with cvprac 1.0.1
"""
def __init__(self, cvp_server, name='Automated_Change_Control'):
"""Class Constructor.
Build class content with followinactivities:
- save cvp_server information
- save name for CC
- instanciate list for tasks
- Collect tasks available from CVP
Parameters
----------
cvp_server : CvpClient
CVP Server information
name : str
Optional - Name of the Change Control.
Default is ``Automated_Change_Control``
"""
logging.debug('create instance of CvpChangeControl')
self._cvp_server = cvp_server
self._name = name
# List of available tasks from server
self._available = list()
# List to save tasks to run with their order
# Ex: [{'taskId': '100', 'taskOrder': 1},
# {'taskId': '101', 'taskOrder': 1},
# {'taskId': '102', 'taskOrder': 2}]
self._list_changes = list()
self._retrieve_tasks()
def _retrieve_tasks(self):
"""Extract tasks from CVP Server.
Connect to CVP server and collect tasks in pending state
These tasks are saved in self._available structure dedicated
to pending tasks.
"""
logging.debug('getting list of available task for change control')
self._available = self._cvp_server.api.change_control_available_tasks()
def add_task(self, task):
"""Add a tasks to available list.
This task attach this new tasks to the pending tasks list.
Parameters
----------
task : str
TaskID from CVP server
"""
self._available.append(task)
def get_tasks(self, refresh=False):
"""Provide list of all available tasks.
Return list of all tasks getting from CVP and/or attached
with add_task method.
Parameters
----------
refresh : bool
Optional - Make a call to CVP to get latest list of tasks
Returns
-------
list
List of available tasks found in this CC
"""
logging.debug('extractig list of available tasks out of our instance')
if refresh:
logging.debug('refreshing list of tasks available for change control') # noqa E501
self._retrieve_tasks()
return self._available
def _build_change_dictionnary(self, order_mode='linear'):
"""Build ordered list to schedule changes.
CVP Change Control expect a list with an order to run tasks.
By default, all tasks are executed at the same time.
But using order_mode set to incremental every task will
be scheduled sequentially in this change-control
Parameters
----------
order_mode : str
Optional - Method to build task list.
Shall be ``linear`` or ``incremental``.
Note
----
Only linear has been tested.
"""
logging.info('Building a dictionary of changes')
change_position = 1
for task in self._available:
change = dict()
change['taskId'] = task['workOrderId']
change['taskOrder'] = (change_position)
logging.debug(' > Adding task %s to position %s',
change['taskId'],
change['taskOrder'])
self._list_changes.append(change)
if order_mode == 'incremental':
change_position += 1
def get_list_changes(self, mode='linear'):
"""Return list of tasks and their execution order.
Parameters
----------
mode : str
Information about tasks scheduling.
Shall be ``linear`` or ``incremental``.
Note
----
Only linear has been tested.
Returns
-------
list
List of changes and their order
"""
if len(self._list_changes) == 0:
self._build_change_dictionnary(order_mode=mode)
return self._list_changes
# TODO: manage way to retrieve Template ID
def create(self, mode='linear',
country='France',
tz='Europe/Paris',
schedule=False,
schedule_at='',
snap_template='1708dd89-ff4b-4d1e-b09e-ee490b3e27f0',
change_type='Custom',
stop_on_error="true"):
"""Create a change-control.
Parameters
----------
mode : str
Optional - method to order tasks (default : linear)
country : str
Optional - Country requested by CVP API (default:France)
tz : str
Optional - Timezone required by CVP (default: Europe/Paris)
schedule : bool
Optional - Enable CC scheduling (default: False)
schedule_at : str
Optional - Time to execute CC if scheduled
snap_template : str
Optional - Snapshot template ID to run before / after tasks
change_type : str
Optional - CVP definition for CC Might be Custom or Rollback.
(default: Custom)
stop_on_error : str
Optional - boolean string to stop CVP on errors
Returns
-------
dict
CVP creation result (None if error occurs)
"""
# If scheduling is not enable, then we create cahnge control
# to be run now+3 minutes by default
if schedule is False:
schedule_at = (datetime.now() + timedelta(seconds=180)).strftime("%Y-%m-%d %H:%M") # noqa E501
logging.debug('configure execution time in +3 minutes (%s)',
schedule_at)
# If list of changes to apply hsa not been built already,
# then we do it before creating change request
if len(self._list_changes) == 0:
self._build_change_dictionnary(order_mode=mode)
logging.debug('Tasks to attach to current change-control:')
for entry in self._list_changes:
logging.debug(' * Found task %s w/ position %s',
entry['taskId'],
entry['taskOrder'])
# FIXME: change-control does not set snapshot ID correctly and this one is not run before and after change
# Fix implemented in develop version :
# https://github.com/aristanetworks/cvprac/blob/develop/cvprac/cvp_api.py#L1633
# pip install pip install git+https://github.com/aristanetworks/cvprac.git@develop
# Should solve problem
try:
creation_request = self._cvp_server.api.create_change_control(name=self._name, # noqa E501
change_control_tasks=self._list_changes,
timezone=tz,
country_id=country,
date_time=schedule_at,
snapshot_template_key=snap_template,
change_control_type=change_type,
stop_on_error=stop_on_error)
return creation_request
except CvpApiError as err:
logging.error('Cannot create change-control - error message is %s',
format(err))
return None
| 34.939163 | 114 | 0.575144 | 9,061 | 0.98607 | 0 | 0 | 0 | 0 | 0 | 0 | 6,336 | 0.68952 |
d0fd59da3a0fd9fe3acc37b2d63f3055243a7e1f | 1,289 | py | Python | app/controllers/stores/update.py | Brunoro811/api_dangels | 21c064eaa4f5009412dddc9676044d6cc08a5b65 | [
"MIT"
]
| null | null | null | app/controllers/stores/update.py | Brunoro811/api_dangels | 21c064eaa4f5009412dddc9676044d6cc08a5b65 | [
"MIT"
]
| null | null | null | app/controllers/stores/update.py | Brunoro811/api_dangels | 21c064eaa4f5009412dddc9676044d6cc08a5b65 | [
"MIT"
]
| null | null | null | from flask import current_app, request
from http import HTTPStatus
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import Session
from app.models.stores.store_model import StoreModel
from app.decorators import verify_payload, validator
@validator(zip_code="zip_code")
@verify_payload(
fields_and_types={
"name_store": str,
"street": str,
"number": int,
"zip_code": str,
"other_information": str,
},
optional=[
"name_store",
"street",
"number",
"zip_code",
"other_information",
],
)
def update_store(data: dict, id: int):
session: Session = current_app.db.session
try:
storie = StoreModel.query.get(id)
if not (storie):
raise NoResultFound
data: dict = request.get_json()
for key, value in data.items():
if key == "name_store":
value = value.title()
else:
value = value.capitalize()
setattr(storie, key, value)
session.add(storie)
session.commit()
return "", HTTPStatus.NO_CONTENT
except NoResultFound:
return {"error": "Not found store."}, HTTPStatus.BAD_REQUEST
except Exception as e:
raise e
| 25.78 | 68 | 0.59969 | 0 | 0 | 0 | 0 | 1,031 | 0.799845 | 0 | 0 | 163 | 0.126455 |
190383e67ecd4d49f6a52f77fa42e3e6a18c204f | 4,197 | py | Python | misc/openstack-dev.py | tnoff/OpenDerp | 44f1e5c2027a2949b785941044a8503a34423228 | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | misc/openstack-dev.py | tnoff/OpenDerp | 44f1e5c2027a2949b785941044a8503a34423228 | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | misc/openstack-dev.py | tnoff/OpenDerp | 44f1e5c2027a2949b785941044a8503a34423228 | [
"BSD-2-Clause-FreeBSD"
]
| null | null | null | #!/usr/bin/env python
import argparse
import boto
from boto.s3 import connection as s3_connection
from cinderclient.v1 import client as cinder_v1
import code
from novaclient.v1_1 import client as nova_v1
from novaclient.shell import OpenStackComputeShell as open_shell
from glanceclient import Client as glance_client
from keystoneclient.v2_0 import client as key_v2
from neutronclient.v2_0 import client as neutron_v2
import os
import swiftclient
import sys
from urlparse import urlparse
def parse_args():
a = argparse.ArgumentParser(description='Give me the api clients')
a.add_argument('--username', help='Auth username')
a.add_argument('--password', help='Auth password')
a.add_argument('--tenant-name', help='Auth tenant name')
a.add_argument('--auth-url', help='Auth url')
a.add_argument('--ca-cert', help='Ca cert file')
return a.parse_args()
def get_env(args):
if not args['username']:
args['username'] = os.getenv('OS_USERNAME', None)
if not args['password']:
args['password'] = os.getenv('OS_PASSWORD', None)
if not args['tenant_name']:
args['tenant_name'] = os.getenv('OS_TENANT_NAME', None)
if not args['auth_url']:
args['auth_url'] = os.getenv('OS_AUTH_URL', None)
if not args['ca_cert']:
args['ca_cert'] = os.getenv('OS_CACERT')
# Check for args
must_have = ['username', 'password', 'tenant_name', 'auth_url']
for item in must_have:
if args[item] == None:
sys.exit("Don't have:%s, exiting" % item)
return args
def main():
args = vars(parse_args())
args = get_env(args)
extensions = open_shell()._discover_extensions("1.1")
nova = nova_v1.Client(args['username'],
args['password'],
args['tenant_name'],
args['auth_url'],
extensions=extensions,
cacert=args['ca_cert'])
keystone = key_v2.Client(username=args['username'],
password=args['password'],
tenant_name=args['tenant_name'],
auth_url=args['auth_url'],
cacert=args['ca_cert'],)
neutron = neutron_v2.Client(username=args['username'],
password=args['password'],
tenant_name=args['tenant_name'],
auth_url=args['auth_url'],
cacert=args['ca_cert'],)
cinder = cinder_v1.Client(args['username'],
args['password'],
args['tenant_name'],
args['auth_url'],
cacert=args['ca_cert'],)
swift = swiftclient.client.Connection(auth_version='2',
user=args['username'],
key=args['password'],
tenant_name=args['tenant_name'],
authurl=args['auth_url'])
token = keystone.auth_token
service_catalog = keystone.service_catalog
catalog = service_catalog.catalog['serviceCatalog']
glance_ip = None
for endpoint in catalog:
if 'image' == endpoint['type']:
glance_ip = endpoint['endpoints'][0]['publicURL']
glance = glance_client('1', endpoint=glance_ip, token=token)
creds = keystone.ec2.list(keystone.user_id)
if len(creds) == 0:
keystone.ec2.create(keystone.user_id, keystone.tenant_id)
creds = keystone.ec2.list(keystone.user_id)
cred = creds[-1]
s3_url = urlparse(keystone.service_catalog.url_for(service_type='object-store'))
host, port = s3_url.netloc.split(':')
s3 = boto.connect_s3(aws_access_key_id=cred.access,
aws_secret_access_key=cred.secret,
host=host,
port=int(port),
is_secure=False,
calling_format=s3_connection.OrdinaryCallingFormat())
code.interact(local=locals())
if __name__ == '__main__':
main()
| 41.97 | 84 | 0.571122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 773 | 0.184179 |
190468c6a6a1d847c7290e48226df8d86d0b3049 | 330 | py | Python | src/area51/nowd/decorator.py | thatcr/knowed | 025b66e4b660ed4339d3156ebca19065bb65b630 | [
"MIT"
]
| null | null | null | src/area51/nowd/decorator.py | thatcr/knowed | 025b66e4b660ed4339d3156ebca19065bb65b630 | [
"MIT"
]
| null | null | null | src/area51/nowd/decorator.py | thatcr/knowed | 025b66e4b660ed4339d3156ebca19065bb65b630 | [
"MIT"
]
| null | null | null | import inspect
from .descriptors import Descriptor
from .arguments import ArgsDescriptor
def node(fget, *args, **kwargs):
signature = inspect.signature(fget)
base = ArgsDescriptor if len(signature.parameters) > 1 else Descriptor
cls = type(fget.__code__.co_name, (base, ), {})
return cls(fget, *args, **kwargs) | 27.5 | 74 | 0.718182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.